mirror of
https://github.com/ClickHouse/ClickHouse.git
synced 2024-11-30 03:22:14 +00:00
Merge branch 'ClickHouse:master' into origin/67-replace-zookeeper-to-clickhouse-keeper-in-docs-and-tutorials
This commit is contained in:
commit
2ffdefe839
73
.clang-tidy
73
.clang-tidy
@ -1,3 +1,6 @@
|
|||||||
|
# Enable all checks + disale selected checks. Feel free to remove disabled checks from below list if
|
||||||
|
# a) the new check is not controversial (this includes many checks in readability-* and google-*) or
|
||||||
|
# b) too noisy (checks with > 100 new warnings are considered noisy, this includes e.g. cppcoreguidelines-*).
|
||||||
Checks: '*,
|
Checks: '*,
|
||||||
-abseil-*,
|
-abseil-*,
|
||||||
|
|
||||||
@ -5,38 +8,19 @@ Checks: '*,
|
|||||||
|
|
||||||
-android-*,
|
-android-*,
|
||||||
|
|
||||||
-bugprone-assert-side-effect,
|
|
||||||
-bugprone-branch-clone,
|
-bugprone-branch-clone,
|
||||||
-bugprone-dynamic-static-initializers,
|
|
||||||
-bugprone-easily-swappable-parameters,
|
-bugprone-easily-swappable-parameters,
|
||||||
-bugprone-exception-escape,
|
-bugprone-exception-escape,
|
||||||
-bugprone-forwarding-reference-overload,
|
|
||||||
-bugprone-implicit-widening-of-multiplication-result,
|
-bugprone-implicit-widening-of-multiplication-result,
|
||||||
-bugprone-lambda-function-name,
|
|
||||||
-bugprone-misplaced-widening-cast,
|
|
||||||
-bugprone-narrowing-conversions,
|
-bugprone-narrowing-conversions,
|
||||||
-bugprone-no-escape,
|
|
||||||
-bugprone-not-null-terminated-result,
|
-bugprone-not-null-terminated-result,
|
||||||
-bugprone-signal-handler,
|
|
||||||
-bugprone-spuriously-wake-up-functions,
|
|
||||||
-bugprone-suspicious-semicolon,
|
|
||||||
-bugprone-unhandled-exception-at-new,
|
|
||||||
-bugprone-unhandled-self-assignment,
|
|
||||||
|
|
||||||
-cert-dcl16-c,
|
-cert-dcl16-c,
|
||||||
-cert-dcl37-c,
|
|
||||||
-cert-dcl51-cpp,
|
|
||||||
-cert-dcl58-cpp,
|
|
||||||
-cert-err58-cpp,
|
-cert-err58-cpp,
|
||||||
-cert-err60-cpp,
|
|
||||||
-cert-msc32-c,
|
-cert-msc32-c,
|
||||||
-cert-msc51-cpp,
|
-cert-msc51-cpp,
|
||||||
-cert-oop54-cpp,
|
-cert-oop54-cpp,
|
||||||
-cert-oop57-cpp,
|
-cert-oop57-cpp,
|
||||||
-cert-oop58-cpp,
|
|
||||||
|
|
||||||
-clang-analyzer-core.DynamicTypePropagation,
|
|
||||||
-clang-analyzer-core.uninitialized.CapturedBlockVariable,
|
|
||||||
|
|
||||||
-clang-analyzer-optin.performance.Padding,
|
-clang-analyzer-optin.performance.Padding,
|
||||||
-clang-analyzer-optin.portability.UnixAPI,
|
-clang-analyzer-optin.portability.UnixAPI,
|
||||||
@ -44,7 +28,31 @@ Checks: '*,
|
|||||||
-clang-analyzer-security.insecureAPI.bzero,
|
-clang-analyzer-security.insecureAPI.bzero,
|
||||||
-clang-analyzer-security.insecureAPI.strcpy,
|
-clang-analyzer-security.insecureAPI.strcpy,
|
||||||
|
|
||||||
-cppcoreguidelines-*,
|
-cppcoreguidelines-avoid-c-arrays,
|
||||||
|
-cppcoreguidelines-avoid-goto,
|
||||||
|
-cppcoreguidelines-avoid-magic-numbers,
|
||||||
|
-cppcoreguidelines-avoid-non-const-global-variables,
|
||||||
|
-cppcoreguidelines-explicit-virtual-functions,
|
||||||
|
-cppcoreguidelines-init-variables,
|
||||||
|
-cppcoreguidelines-interfaces-global-init,
|
||||||
|
-cppcoreguidelines-macro-usage,
|
||||||
|
-cppcoreguidelines-narrowing-conversions,
|
||||||
|
-cppcoreguidelines-no-malloc,
|
||||||
|
-cppcoreguidelines-non-private-member-variables-in-classes,
|
||||||
|
-cppcoreguidelines-owning-memory,
|
||||||
|
-cppcoreguidelines-prefer-member-initializer,
|
||||||
|
-cppcoreguidelines-pro-bounds-array-to-pointer-decay,
|
||||||
|
-cppcoreguidelines-pro-bounds-constant-array-index,
|
||||||
|
-cppcoreguidelines-pro-bounds-pointer-arithmetic,
|
||||||
|
-cppcoreguidelines-pro-type-const-cast,
|
||||||
|
-cppcoreguidelines-pro-type-cstyle-cast,
|
||||||
|
-cppcoreguidelines-pro-type-member-init,
|
||||||
|
-cppcoreguidelines-pro-type-reinterpret-cast,
|
||||||
|
-cppcoreguidelines-pro-type-static-cast-downcast,
|
||||||
|
-cppcoreguidelines-pro-type-union-access,
|
||||||
|
-cppcoreguidelines-pro-type-vararg,
|
||||||
|
-cppcoreguidelines-slicing,
|
||||||
|
-cppcoreguidelines-special-member-functions,
|
||||||
|
|
||||||
-concurrency-mt-unsafe,
|
-concurrency-mt-unsafe,
|
||||||
|
|
||||||
@ -53,7 +61,6 @@ Checks: '*,
|
|||||||
-fuchsia-*,
|
-fuchsia-*,
|
||||||
|
|
||||||
-google-build-using-namespace,
|
-google-build-using-namespace,
|
||||||
-google-global-names-in-headers,
|
|
||||||
-google-readability-braces-around-statements,
|
-google-readability-braces-around-statements,
|
||||||
-google-readability-function-size,
|
-google-readability-function-size,
|
||||||
-google-readability-namespace-comments,
|
-google-readability-namespace-comments,
|
||||||
@ -63,10 +70,8 @@ Checks: '*,
|
|||||||
-hicpp-avoid-c-arrays,
|
-hicpp-avoid-c-arrays,
|
||||||
-hicpp-avoid-goto,
|
-hicpp-avoid-goto,
|
||||||
-hicpp-braces-around-statements,
|
-hicpp-braces-around-statements,
|
||||||
-hicpp-deprecated-headers,
|
|
||||||
-hicpp-explicit-conversions,
|
-hicpp-explicit-conversions,
|
||||||
-hicpp-function-size,
|
-hicpp-function-size,
|
||||||
-hicpp-invalid-access-moved,
|
|
||||||
-hicpp-member-init,
|
-hicpp-member-init,
|
||||||
-hicpp-move-const-arg,
|
-hicpp-move-const-arg,
|
||||||
-hicpp-multiway-paths-covered,
|
-hicpp-multiway-paths-covered,
|
||||||
@ -79,61 +84,45 @@ Checks: '*,
|
|||||||
-hicpp-uppercase-literal-suffix,
|
-hicpp-uppercase-literal-suffix,
|
||||||
-hicpp-use-auto,
|
-hicpp-use-auto,
|
||||||
-hicpp-use-emplace,
|
-hicpp-use-emplace,
|
||||||
-hicpp-use-equals-default,
|
|
||||||
-hicpp-use-noexcept,
|
|
||||||
-hicpp-use-override,
|
|
||||||
-hicpp-vararg,
|
-hicpp-vararg,
|
||||||
|
|
||||||
|
-linuxkernel-*,
|
||||||
|
|
||||||
-llvm-*,
|
-llvm-*,
|
||||||
|
|
||||||
-llvmlibc-*,
|
-llvmlibc-*,
|
||||||
|
|
||||||
-openmp-*,
|
-openmp-*,
|
||||||
|
|
||||||
-misc-definitions-in-headers,
|
|
||||||
-misc-new-delete-overloads,
|
|
||||||
-misc-no-recursion,
|
-misc-no-recursion,
|
||||||
-misc-non-copyable-objects,
|
|
||||||
-misc-non-private-member-variables-in-classes,
|
-misc-non-private-member-variables-in-classes,
|
||||||
-misc-static-assert,
|
|
||||||
|
|
||||||
-modernize-avoid-c-arrays,
|
-modernize-avoid-c-arrays,
|
||||||
-modernize-concat-nested-namespaces,
|
-modernize-concat-nested-namespaces,
|
||||||
-modernize-deprecated-headers,
|
|
||||||
-modernize-deprecated-ios-base-aliases,
|
|
||||||
-modernize-pass-by-value,
|
-modernize-pass-by-value,
|
||||||
-modernize-replace-auto-ptr,
|
|
||||||
-modernize-replace-disallow-copy-and-assign-macro,
|
|
||||||
-modernize-return-braced-init-list,
|
-modernize-return-braced-init-list,
|
||||||
-modernize-unary-static-assert,
|
|
||||||
-modernize-use-auto,
|
-modernize-use-auto,
|
||||||
-modernize-use-default-member-init,
|
-modernize-use-default-member-init,
|
||||||
-modernize-use-emplace,
|
-modernize-use-emplace,
|
||||||
-modernize-use-equals-default,
|
|
||||||
-modernize-use-nodiscard,
|
-modernize-use-nodiscard,
|
||||||
-modernize-use-noexcept,
|
|
||||||
-modernize-use-override,
|
-modernize-use-override,
|
||||||
-modernize-use-trailing-return-type,
|
-modernize-use-trailing-return-type,
|
||||||
|
|
||||||
-performance-inefficient-string-concatenation,
|
-performance-inefficient-string-concatenation,
|
||||||
-performance-no-int-to-ptr,
|
-performance-no-int-to-ptr,
|
||||||
-performance-type-promotion-in-math-fn,
|
|
||||||
-performance-trivially-destructible,
|
|
||||||
-performance-unnecessary-value-param,
|
-performance-unnecessary-value-param,
|
||||||
|
|
||||||
-portability-simd-intrinsics,
|
-portability-simd-intrinsics,
|
||||||
|
|
||||||
-readability-convert-member-functions-to-static,
|
|
||||||
-readability-braces-around-statements,
|
-readability-braces-around-statements,
|
||||||
-readability-else-after-return,
|
-readability-else-after-return,
|
||||||
-readability-function-cognitive-complexity,
|
-readability-function-cognitive-complexity,
|
||||||
-readability-function-size,
|
-readability-function-size,
|
||||||
|
-readability-identifier-length,
|
||||||
-readability-implicit-bool-conversion,
|
-readability-implicit-bool-conversion,
|
||||||
-readability-isolate-declaration,
|
-readability-isolate-declaration,
|
||||||
-readability-magic-numbers,
|
-readability-magic-numbers,
|
||||||
-readability-misleading-indentation,
|
|
||||||
-readability-named-parameter,
|
-readability-named-parameter,
|
||||||
-readability-qualified-auto,
|
|
||||||
-readability-redundant-declaration,
|
-readability-redundant-declaration,
|
||||||
-readability-static-accessed-through-instance,
|
-readability-static-accessed-through-instance,
|
||||||
-readability-suspicious-call-argument,
|
-readability-suspicious-call-argument,
|
||||||
|
6
.github/PULL_REQUEST_TEMPLATE.md
vendored
6
.github/PULL_REQUEST_TEMPLATE.md
vendored
@ -1,3 +1,9 @@
|
|||||||
|
<!---
|
||||||
|
A technical comment, you are free to remove or leave it as it is when PR is created
|
||||||
|
The following categories are used in the next scripts, update them accordingly
|
||||||
|
utils/changelog/changelog.py
|
||||||
|
tests/ci/run_check.py
|
||||||
|
-->
|
||||||
### Changelog category (leave one):
|
### Changelog category (leave one):
|
||||||
- New Feature
|
- New Feature
|
||||||
- Improvement
|
- Improvement
|
||||||
|
6
.github/workflows/backport.yml
vendored
6
.github/workflows/backport.yml
vendored
@ -38,6 +38,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
docker kill "$(docker ps -q)" ||:
|
# shellcheck disable=SC2046
|
||||||
docker rm -f "$(docker ps -a -q)" ||:
|
docker kill $(docker ps -q) ||:
|
||||||
|
# shellcheck disable=SC2046
|
||||||
|
docker rm -f $(docker ps -a -q) ||:
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
|
81
.github/workflows/backport_branches.yml
vendored
81
.github/workflows/backport_branches.yml
vendored
@ -112,8 +112,10 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
docker kill "$(docker ps -q)" ||:
|
# shellcheck disable=SC2046
|
||||||
docker rm -f "$(docker ps -a -q)" ||:
|
docker kill $(docker ps -q) ||:
|
||||||
|
# shellcheck disable=SC2046
|
||||||
|
docker rm -f $(docker ps -a -q) ||:
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
#########################################################################################
|
#########################################################################################
|
||||||
#################################### ORDINARY BUILDS ####################################
|
#################################### ORDINARY BUILDS ####################################
|
||||||
@ -129,7 +131,6 @@ jobs:
|
|||||||
IMAGES_PATH=${{runner.temp}}/images_path
|
IMAGES_PATH=${{runner.temp}}/images_path
|
||||||
REPO_COPY=${{runner.temp}}/build_check/ClickHouse
|
REPO_COPY=${{runner.temp}}/build_check/ClickHouse
|
||||||
CACHES_PATH=${{runner.temp}}/../ccaches
|
CACHES_PATH=${{runner.temp}}/../ccaches
|
||||||
CHECK_NAME=ClickHouse build check (actions)
|
|
||||||
BUILD_NAME=package_release
|
BUILD_NAME=package_release
|
||||||
EOF
|
EOF
|
||||||
- name: Download changed images
|
- name: Download changed images
|
||||||
@ -149,7 +150,7 @@ jobs:
|
|||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
mkdir -p "$TEMP_PATH"
|
mkdir -p "$TEMP_PATH"
|
||||||
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
||||||
cd "$REPO_COPY/tests/ci" && python3 build_check.py "$CHECK_NAME" "$BUILD_NAME"
|
cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME"
|
||||||
- name: Upload build URLs to artifacts
|
- name: Upload build URLs to artifacts
|
||||||
if: ${{ success() || failure() }}
|
if: ${{ success() || failure() }}
|
||||||
uses: actions/upload-artifact@v2
|
uses: actions/upload-artifact@v2
|
||||||
@ -159,8 +160,10 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
docker kill "$(docker ps -q)" ||:
|
# shellcheck disable=SC2046
|
||||||
docker rm -f "$(docker ps -a -q)" ||:
|
docker kill $(docker ps -q) ||:
|
||||||
|
# shellcheck disable=SC2046
|
||||||
|
docker rm -f $(docker ps -a -q) ||:
|
||||||
sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
|
sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
|
||||||
BuilderDebAarch64:
|
BuilderDebAarch64:
|
||||||
needs: [DockerHubPush]
|
needs: [DockerHubPush]
|
||||||
@ -173,7 +176,6 @@ jobs:
|
|||||||
IMAGES_PATH=${{runner.temp}}/images_path
|
IMAGES_PATH=${{runner.temp}}/images_path
|
||||||
REPO_COPY=${{runner.temp}}/build_check/ClickHouse
|
REPO_COPY=${{runner.temp}}/build_check/ClickHouse
|
||||||
CACHES_PATH=${{runner.temp}}/../ccaches
|
CACHES_PATH=${{runner.temp}}/../ccaches
|
||||||
CHECK_NAME=ClickHouse build check (actions)
|
|
||||||
BUILD_NAME=package_aarch64
|
BUILD_NAME=package_aarch64
|
||||||
EOF
|
EOF
|
||||||
- name: Download changed images
|
- name: Download changed images
|
||||||
@ -193,7 +195,7 @@ jobs:
|
|||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
mkdir -p "$TEMP_PATH"
|
mkdir -p "$TEMP_PATH"
|
||||||
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
||||||
cd "$REPO_COPY/tests/ci" && python3 build_check.py "$CHECK_NAME" "$BUILD_NAME"
|
cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME"
|
||||||
- name: Upload build URLs to artifacts
|
- name: Upload build URLs to artifacts
|
||||||
if: ${{ success() || failure() }}
|
if: ${{ success() || failure() }}
|
||||||
uses: actions/upload-artifact@v2
|
uses: actions/upload-artifact@v2
|
||||||
@ -203,8 +205,10 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
docker kill "$(docker ps -q)" ||:
|
# shellcheck disable=SC2046
|
||||||
docker rm -f "$(docker ps -a -q)" ||:
|
docker kill $(docker ps -q) ||:
|
||||||
|
# shellcheck disable=SC2046
|
||||||
|
docker rm -f $(docker ps -a -q) ||:
|
||||||
sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
|
sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
|
||||||
BuilderDebAsan:
|
BuilderDebAsan:
|
||||||
needs: [DockerHubPush]
|
needs: [DockerHubPush]
|
||||||
@ -217,7 +221,6 @@ jobs:
|
|||||||
IMAGES_PATH=${{runner.temp}}/images_path
|
IMAGES_PATH=${{runner.temp}}/images_path
|
||||||
REPO_COPY=${{runner.temp}}/build_check/ClickHouse
|
REPO_COPY=${{runner.temp}}/build_check/ClickHouse
|
||||||
CACHES_PATH=${{runner.temp}}/../ccaches
|
CACHES_PATH=${{runner.temp}}/../ccaches
|
||||||
CHECK_NAME=ClickHouse build check (actions)
|
|
||||||
BUILD_NAME=package_asan
|
BUILD_NAME=package_asan
|
||||||
EOF
|
EOF
|
||||||
- name: Download changed images
|
- name: Download changed images
|
||||||
@ -237,7 +240,7 @@ jobs:
|
|||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
mkdir -p "$TEMP_PATH"
|
mkdir -p "$TEMP_PATH"
|
||||||
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
||||||
cd "$REPO_COPY/tests/ci" && python3 build_check.py "$CHECK_NAME" "$BUILD_NAME"
|
cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME"
|
||||||
- name: Upload build URLs to artifacts
|
- name: Upload build URLs to artifacts
|
||||||
if: ${{ success() || failure() }}
|
if: ${{ success() || failure() }}
|
||||||
uses: actions/upload-artifact@v2
|
uses: actions/upload-artifact@v2
|
||||||
@ -247,8 +250,10 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
docker kill "$(docker ps -q)" ||:
|
# shellcheck disable=SC2046
|
||||||
docker rm -f "$(docker ps -a -q)" ||:
|
docker kill $(docker ps -q) ||:
|
||||||
|
# shellcheck disable=SC2046
|
||||||
|
docker rm -f $(docker ps -a -q) ||:
|
||||||
sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
|
sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
|
||||||
BuilderDebTsan:
|
BuilderDebTsan:
|
||||||
needs: [DockerHubPush]
|
needs: [DockerHubPush]
|
||||||
@ -261,7 +266,6 @@ jobs:
|
|||||||
IMAGES_PATH=${{runner.temp}}/images_path
|
IMAGES_PATH=${{runner.temp}}/images_path
|
||||||
REPO_COPY=${{runner.temp}}/build_check/ClickHouse
|
REPO_COPY=${{runner.temp}}/build_check/ClickHouse
|
||||||
CACHES_PATH=${{runner.temp}}/../ccaches
|
CACHES_PATH=${{runner.temp}}/../ccaches
|
||||||
CHECK_NAME=ClickHouse build check (actions)
|
|
||||||
BUILD_NAME=package_tsan
|
BUILD_NAME=package_tsan
|
||||||
EOF
|
EOF
|
||||||
- name: Download changed images
|
- name: Download changed images
|
||||||
@ -281,7 +285,7 @@ jobs:
|
|||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
mkdir -p "$TEMP_PATH"
|
mkdir -p "$TEMP_PATH"
|
||||||
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
||||||
cd "$REPO_COPY/tests/ci" && python3 build_check.py "$CHECK_NAME" "$BUILD_NAME"
|
cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME"
|
||||||
- name: Upload build URLs to artifacts
|
- name: Upload build URLs to artifacts
|
||||||
if: ${{ success() || failure() }}
|
if: ${{ success() || failure() }}
|
||||||
uses: actions/upload-artifact@v2
|
uses: actions/upload-artifact@v2
|
||||||
@ -291,8 +295,10 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
docker kill "$(docker ps -q)" ||:
|
# shellcheck disable=SC2046
|
||||||
docker rm -f "$(docker ps -a -q)" ||:
|
docker kill $(docker ps -q) ||:
|
||||||
|
# shellcheck disable=SC2046
|
||||||
|
docker rm -f $(docker ps -a -q) ||:
|
||||||
sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
|
sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
|
||||||
BuilderDebDebug:
|
BuilderDebDebug:
|
||||||
needs: [DockerHubPush]
|
needs: [DockerHubPush]
|
||||||
@ -305,7 +311,6 @@ jobs:
|
|||||||
IMAGES_PATH=${{runner.temp}}/images_path
|
IMAGES_PATH=${{runner.temp}}/images_path
|
||||||
REPO_COPY=${{runner.temp}}/build_check/ClickHouse
|
REPO_COPY=${{runner.temp}}/build_check/ClickHouse
|
||||||
CACHES_PATH=${{runner.temp}}/../ccaches
|
CACHES_PATH=${{runner.temp}}/../ccaches
|
||||||
CHECK_NAME=ClickHouse build check (actions)
|
|
||||||
BUILD_NAME=package_debug
|
BUILD_NAME=package_debug
|
||||||
EOF
|
EOF
|
||||||
- name: Download changed images
|
- name: Download changed images
|
||||||
@ -325,7 +330,7 @@ jobs:
|
|||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
mkdir -p "$TEMP_PATH"
|
mkdir -p "$TEMP_PATH"
|
||||||
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
||||||
cd "$REPO_COPY/tests/ci" && python3 build_check.py "$CHECK_NAME" "$BUILD_NAME"
|
cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME"
|
||||||
- name: Upload build URLs to artifacts
|
- name: Upload build URLs to artifacts
|
||||||
if: ${{ success() || failure() }}
|
if: ${{ success() || failure() }}
|
||||||
uses: actions/upload-artifact@v2
|
uses: actions/upload-artifact@v2
|
||||||
@ -335,8 +340,10 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
docker kill "$(docker ps -q)" ||:
|
# shellcheck disable=SC2046
|
||||||
docker rm -f "$(docker ps -a -q)" ||:
|
docker kill $(docker ps -q) ||:
|
||||||
|
# shellcheck disable=SC2046
|
||||||
|
docker rm -f $(docker ps -a -q) ||:
|
||||||
sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
|
sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
|
||||||
############################################################################################
|
############################################################################################
|
||||||
##################################### BUILD REPORTER #######################################
|
##################################### BUILD REPORTER #######################################
|
||||||
@ -380,8 +387,10 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
docker kill "$(docker ps -q)" ||:
|
# shellcheck disable=SC2046
|
||||||
docker rm -f "$(docker ps -a -q)" ||:
|
docker kill $(docker ps -q) ||:
|
||||||
|
# shellcheck disable=SC2046
|
||||||
|
docker rm -f $(docker ps -a -q) ||:
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
##############################################################################################
|
##############################################################################################
|
||||||
########################### FUNCTIONAl STATELESS TESTS #######################################
|
########################### FUNCTIONAl STATELESS TESTS #######################################
|
||||||
@ -418,8 +427,10 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
docker kill "$(docker ps -q)" ||:
|
# shellcheck disable=SC2046
|
||||||
docker rm -f "$(docker ps -a -q)" ||:
|
docker kill $(docker ps -q) ||:
|
||||||
|
# shellcheck disable=SC2046
|
||||||
|
docker rm -f $(docker ps -a -q) ||:
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
##############################################################################################
|
##############################################################################################
|
||||||
############################ FUNCTIONAl STATEFUL TESTS #######################################
|
############################ FUNCTIONAl STATEFUL TESTS #######################################
|
||||||
@ -456,8 +467,10 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
docker kill "$(docker ps -q)" ||:
|
# shellcheck disable=SC2046
|
||||||
docker rm -f "$(docker ps -a -q)" ||:
|
docker kill $(docker ps -q) ||:
|
||||||
|
# shellcheck disable=SC2046
|
||||||
|
docker rm -f $(docker ps -a -q) ||:
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
##############################################################################################
|
##############################################################################################
|
||||||
######################################### STRESS TESTS #######################################
|
######################################### STRESS TESTS #######################################
|
||||||
@ -497,8 +510,10 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
docker kill "$(docker ps -q)" ||:
|
# shellcheck disable=SC2046
|
||||||
docker rm -f "$(docker ps -a -q)" ||:
|
docker kill $(docker ps -q) ||:
|
||||||
|
# shellcheck disable=SC2046
|
||||||
|
docker rm -f $(docker ps -a -q) ||:
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
#############################################################################################
|
#############################################################################################
|
||||||
############################# INTEGRATION TESTS #############################################
|
############################# INTEGRATION TESTS #############################################
|
||||||
@ -534,8 +549,10 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
docker kill "$(docker ps -q)" ||:
|
# shellcheck disable=SC2046
|
||||||
docker rm -f "$(docker ps -a -q)" ||:
|
docker kill $(docker ps -q) ||:
|
||||||
|
# shellcheck disable=SC2046
|
||||||
|
docker rm -f $(docker ps -a -q) ||:
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
FinishCheck:
|
FinishCheck:
|
||||||
needs:
|
needs:
|
||||||
|
12
.github/workflows/docs_check.yml
vendored
12
.github/workflows/docs_check.yml
vendored
@ -122,8 +122,10 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
docker kill "$(docker ps -q)" ||:
|
# shellcheck disable=SC2046
|
||||||
docker rm -f "$(docker ps -a -q)" ||:
|
docker kill $(docker ps -q) ||:
|
||||||
|
# shellcheck disable=SC2046
|
||||||
|
docker rm -f $(docker ps -a -q) ||:
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
DocsCheck:
|
DocsCheck:
|
||||||
needs: DockerHubPush
|
needs: DockerHubPush
|
||||||
@ -153,8 +155,10 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
docker kill "$(docker ps -q)" ||:
|
# shellcheck disable=SC2046
|
||||||
docker rm -f "$(docker ps -a -q)" ||:
|
docker kill $(docker ps -q) ||:
|
||||||
|
# shellcheck disable=SC2046
|
||||||
|
docker rm -f $(docker ps -a -q) ||:
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
FinishCheck:
|
FinishCheck:
|
||||||
needs:
|
needs:
|
||||||
|
17
.github/workflows/docs_release.yml
vendored
17
.github/workflows/docs_release.yml
vendored
@ -7,16 +7,17 @@ env:
|
|||||||
concurrency:
|
concurrency:
|
||||||
group: master-release
|
group: master-release
|
||||||
cancel-in-progress: true
|
cancel-in-progress: true
|
||||||
on: # yamllint disable-line rule:truthy
|
'on':
|
||||||
push:
|
push:
|
||||||
branches:
|
branches:
|
||||||
- master
|
- master
|
||||||
paths:
|
paths:
|
||||||
- 'docs/**'
|
|
||||||
- 'website/**'
|
|
||||||
- 'benchmark/**'
|
|
||||||
- 'docker/**'
|
|
||||||
- '.github/**'
|
- '.github/**'
|
||||||
|
- 'benchmark/**'
|
||||||
|
- 'docker/docs/release/**'
|
||||||
|
- 'docs/**'
|
||||||
|
- 'utils/list-versions/version_date.tsv'
|
||||||
|
- 'website/**'
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
jobs:
|
jobs:
|
||||||
DockerHubPushAarch64:
|
DockerHubPushAarch64:
|
||||||
@ -116,6 +117,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
docker kill "$(docker ps -q)" ||:
|
# shellcheck disable=SC2046
|
||||||
docker rm -f "$(docker ps -a -q)" ||:
|
docker kill $(docker ps -q) ||:
|
||||||
|
# shellcheck disable=SC2046
|
||||||
|
docker rm -f $(docker ps -a -q) ||:
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
|
6
.github/workflows/jepsen.yml
vendored
6
.github/workflows/jepsen.yml
vendored
@ -39,6 +39,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
docker kill "$(docker ps -q)" ||:
|
# shellcheck disable=SC2046
|
||||||
docker rm -f "$(docker ps -a -q)" ||:
|
docker kill $(docker ps -q) ||:
|
||||||
|
# shellcheck disable=SC2046
|
||||||
|
docker rm -f $(docker ps -a -q) ||:
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
|
558
.github/workflows/master.yml
vendored
558
.github/workflows/master.yml
vendored
File diff suppressed because it is too large
Load Diff
6
.github/workflows/nightly.yml
vendored
6
.github/workflows/nightly.yml
vendored
@ -118,6 +118,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
docker kill "$(docker ps -q)" ||:
|
# shellcheck disable=SC2046
|
||||||
docker rm -f "$(docker ps -a -q)" ||:
|
docker kill $(docker ps -q) ||:
|
||||||
|
# shellcheck disable=SC2046
|
||||||
|
docker rm -f $(docker ps -a -q) ||:
|
||||||
sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
|
sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
|
||||||
|
588
.github/workflows/pull_request.yml
vendored
588
.github/workflows/pull_request.yml
vendored
File diff suppressed because it is too large
Load Diff
6
.github/workflows/release.yml
vendored
6
.github/workflows/release.yml
vendored
@ -58,6 +58,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
docker kill "$(docker ps -q)" ||:
|
# shellcheck disable=SC2046
|
||||||
docker rm -f "$(docker ps -a -q)" ||:
|
docker kill $(docker ps -q) ||:
|
||||||
|
# shellcheck disable=SC2046
|
||||||
|
docker rm -f $(docker ps -a -q) ||:
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
|
285
.github/workflows/release_branches.yml
vendored
285
.github/workflows/release_branches.yml
vendored
@ -103,8 +103,10 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
docker kill "$(docker ps -q)" ||:
|
# shellcheck disable=SC2046
|
||||||
docker rm -f "$(docker ps -a -q)" ||:
|
docker kill $(docker ps -q) ||:
|
||||||
|
# shellcheck disable=SC2046
|
||||||
|
docker rm -f $(docker ps -a -q) ||:
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
#########################################################################################
|
#########################################################################################
|
||||||
#################################### ORDINARY BUILDS ####################################
|
#################################### ORDINARY BUILDS ####################################
|
||||||
@ -120,7 +122,6 @@ jobs:
|
|||||||
IMAGES_PATH=${{runner.temp}}/images_path
|
IMAGES_PATH=${{runner.temp}}/images_path
|
||||||
REPO_COPY=${{runner.temp}}/build_check/ClickHouse
|
REPO_COPY=${{runner.temp}}/build_check/ClickHouse
|
||||||
CACHES_PATH=${{runner.temp}}/../ccaches
|
CACHES_PATH=${{runner.temp}}/../ccaches
|
||||||
CHECK_NAME=ClickHouse build check (actions)
|
|
||||||
BUILD_NAME=package_release
|
BUILD_NAME=package_release
|
||||||
EOF
|
EOF
|
||||||
- name: Download changed images
|
- name: Download changed images
|
||||||
@ -142,7 +143,7 @@ jobs:
|
|||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
mkdir -p "$TEMP_PATH"
|
mkdir -p "$TEMP_PATH"
|
||||||
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
||||||
cd "$REPO_COPY/tests/ci" && python3 build_check.py "$CHECK_NAME" "$BUILD_NAME"
|
cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME"
|
||||||
- name: Upload build URLs to artifacts
|
- name: Upload build URLs to artifacts
|
||||||
if: ${{ success() || failure() }}
|
if: ${{ success() || failure() }}
|
||||||
uses: actions/upload-artifact@v2
|
uses: actions/upload-artifact@v2
|
||||||
@ -152,8 +153,10 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
docker kill "$(docker ps -q)" ||:
|
# shellcheck disable=SC2046
|
||||||
docker rm -f "$(docker ps -a -q)" ||:
|
docker kill $(docker ps -q) ||:
|
||||||
|
# shellcheck disable=SC2046
|
||||||
|
docker rm -f $(docker ps -a -q) ||:
|
||||||
sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
|
sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
|
||||||
BuilderDebAarch64:
|
BuilderDebAarch64:
|
||||||
needs: [DockerHubPush]
|
needs: [DockerHubPush]
|
||||||
@ -166,7 +169,6 @@ jobs:
|
|||||||
IMAGES_PATH=${{runner.temp}}/images_path
|
IMAGES_PATH=${{runner.temp}}/images_path
|
||||||
REPO_COPY=${{runner.temp}}/build_check/ClickHouse
|
REPO_COPY=${{runner.temp}}/build_check/ClickHouse
|
||||||
CACHES_PATH=${{runner.temp}}/../ccaches
|
CACHES_PATH=${{runner.temp}}/../ccaches
|
||||||
CHECK_NAME=ClickHouse build check (actions)
|
|
||||||
BUILD_NAME=package_aarch64
|
BUILD_NAME=package_aarch64
|
||||||
EOF
|
EOF
|
||||||
- name: Download changed images
|
- name: Download changed images
|
||||||
@ -185,7 +187,7 @@ jobs:
|
|||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
mkdir -p "$TEMP_PATH"
|
mkdir -p "$TEMP_PATH"
|
||||||
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
||||||
cd "$REPO_COPY/tests/ci" && python3 build_check.py "$CHECK_NAME" "$BUILD_NAME"
|
cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME"
|
||||||
- name: Upload build URLs to artifacts
|
- name: Upload build URLs to artifacts
|
||||||
uses: actions/upload-artifact@v2
|
uses: actions/upload-artifact@v2
|
||||||
with:
|
with:
|
||||||
@ -194,8 +196,10 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
docker kill "$(docker ps -q)" ||:
|
# shellcheck disable=SC2046
|
||||||
docker rm -f "$(docker ps -a -q)" ||:
|
docker kill $(docker ps -q) ||:
|
||||||
|
# shellcheck disable=SC2046
|
||||||
|
docker rm -f $(docker ps -a -q) ||:
|
||||||
sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
|
sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
|
||||||
BuilderDebAsan:
|
BuilderDebAsan:
|
||||||
needs: [DockerHubPush]
|
needs: [DockerHubPush]
|
||||||
@ -208,7 +212,6 @@ jobs:
|
|||||||
IMAGES_PATH=${{runner.temp}}/images_path
|
IMAGES_PATH=${{runner.temp}}/images_path
|
||||||
REPO_COPY=${{runner.temp}}/build_check/ClickHouse
|
REPO_COPY=${{runner.temp}}/build_check/ClickHouse
|
||||||
CACHES_PATH=${{runner.temp}}/../ccaches
|
CACHES_PATH=${{runner.temp}}/../ccaches
|
||||||
CHECK_NAME=ClickHouse build check (actions)
|
|
||||||
BUILD_NAME=package_asan
|
BUILD_NAME=package_asan
|
||||||
EOF
|
EOF
|
||||||
- name: Download changed images
|
- name: Download changed images
|
||||||
@ -228,7 +231,7 @@ jobs:
|
|||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
mkdir -p "$TEMP_PATH"
|
mkdir -p "$TEMP_PATH"
|
||||||
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
||||||
cd "$REPO_COPY/tests/ci" && python3 build_check.py "$CHECK_NAME" "$BUILD_NAME"
|
cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME"
|
||||||
- name: Upload build URLs to artifacts
|
- name: Upload build URLs to artifacts
|
||||||
if: ${{ success() || failure() }}
|
if: ${{ success() || failure() }}
|
||||||
uses: actions/upload-artifact@v2
|
uses: actions/upload-artifact@v2
|
||||||
@ -238,8 +241,10 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
docker kill "$(docker ps -q)" ||:
|
# shellcheck disable=SC2046
|
||||||
docker rm -f "$(docker ps -a -q)" ||:
|
docker kill $(docker ps -q) ||:
|
||||||
|
# shellcheck disable=SC2046
|
||||||
|
docker rm -f $(docker ps -a -q) ||:
|
||||||
sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
|
sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
|
||||||
BuilderDebUBsan:
|
BuilderDebUBsan:
|
||||||
needs: [DockerHubPush]
|
needs: [DockerHubPush]
|
||||||
@ -252,7 +257,6 @@ jobs:
|
|||||||
IMAGES_PATH=${{runner.temp}}/images_path
|
IMAGES_PATH=${{runner.temp}}/images_path
|
||||||
REPO_COPY=${{runner.temp}}/build_check/ClickHouse
|
REPO_COPY=${{runner.temp}}/build_check/ClickHouse
|
||||||
CACHES_PATH=${{runner.temp}}/../ccaches
|
CACHES_PATH=${{runner.temp}}/../ccaches
|
||||||
CHECK_NAME=ClickHouse build check (actions)
|
|
||||||
BUILD_NAME=package_ubsan
|
BUILD_NAME=package_ubsan
|
||||||
EOF
|
EOF
|
||||||
- name: Download changed images
|
- name: Download changed images
|
||||||
@ -272,7 +276,7 @@ jobs:
|
|||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
mkdir -p "$TEMP_PATH"
|
mkdir -p "$TEMP_PATH"
|
||||||
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
||||||
cd "$REPO_COPY/tests/ci" && python3 build_check.py "$CHECK_NAME" "$BUILD_NAME"
|
cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME"
|
||||||
- name: Upload build URLs to artifacts
|
- name: Upload build URLs to artifacts
|
||||||
if: ${{ success() || failure() }}
|
if: ${{ success() || failure() }}
|
||||||
uses: actions/upload-artifact@v2
|
uses: actions/upload-artifact@v2
|
||||||
@ -282,8 +286,10 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
docker kill "$(docker ps -q)" ||:
|
# shellcheck disable=SC2046
|
||||||
docker rm -f "$(docker ps -a -q)" ||:
|
docker kill $(docker ps -q) ||:
|
||||||
|
# shellcheck disable=SC2046
|
||||||
|
docker rm -f $(docker ps -a -q) ||:
|
||||||
sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
|
sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
|
||||||
BuilderDebTsan:
|
BuilderDebTsan:
|
||||||
needs: [DockerHubPush]
|
needs: [DockerHubPush]
|
||||||
@ -296,7 +302,6 @@ jobs:
|
|||||||
IMAGES_PATH=${{runner.temp}}/images_path
|
IMAGES_PATH=${{runner.temp}}/images_path
|
||||||
REPO_COPY=${{runner.temp}}/build_check/ClickHouse
|
REPO_COPY=${{runner.temp}}/build_check/ClickHouse
|
||||||
CACHES_PATH=${{runner.temp}}/../ccaches
|
CACHES_PATH=${{runner.temp}}/../ccaches
|
||||||
CHECK_NAME=ClickHouse build check (actions)
|
|
||||||
BUILD_NAME=package_tsan
|
BUILD_NAME=package_tsan
|
||||||
EOF
|
EOF
|
||||||
- name: Download changed images
|
- name: Download changed images
|
||||||
@ -316,7 +321,7 @@ jobs:
|
|||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
mkdir -p "$TEMP_PATH"
|
mkdir -p "$TEMP_PATH"
|
||||||
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
||||||
cd "$REPO_COPY/tests/ci" && python3 build_check.py "$CHECK_NAME" "$BUILD_NAME"
|
cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME"
|
||||||
- name: Upload build URLs to artifacts
|
- name: Upload build URLs to artifacts
|
||||||
if: ${{ success() || failure() }}
|
if: ${{ success() || failure() }}
|
||||||
uses: actions/upload-artifact@v2
|
uses: actions/upload-artifact@v2
|
||||||
@ -326,8 +331,10 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
docker kill "$(docker ps -q)" ||:
|
# shellcheck disable=SC2046
|
||||||
docker rm -f "$(docker ps -a -q)" ||:
|
docker kill $(docker ps -q) ||:
|
||||||
|
# shellcheck disable=SC2046
|
||||||
|
docker rm -f $(docker ps -a -q) ||:
|
||||||
sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
|
sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
|
||||||
BuilderDebMsan:
|
BuilderDebMsan:
|
||||||
needs: [DockerHubPush]
|
needs: [DockerHubPush]
|
||||||
@ -340,7 +347,6 @@ jobs:
|
|||||||
IMAGES_PATH=${{runner.temp}}/images_path
|
IMAGES_PATH=${{runner.temp}}/images_path
|
||||||
REPO_COPY=${{runner.temp}}/build_check/ClickHouse
|
REPO_COPY=${{runner.temp}}/build_check/ClickHouse
|
||||||
CACHES_PATH=${{runner.temp}}/../ccaches
|
CACHES_PATH=${{runner.temp}}/../ccaches
|
||||||
CHECK_NAME=ClickHouse build check (actions)
|
|
||||||
BUILD_NAME=package_msan
|
BUILD_NAME=package_msan
|
||||||
EOF
|
EOF
|
||||||
- name: Download changed images
|
- name: Download changed images
|
||||||
@ -360,7 +366,7 @@ jobs:
|
|||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
mkdir -p "$TEMP_PATH"
|
mkdir -p "$TEMP_PATH"
|
||||||
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
||||||
cd "$REPO_COPY/tests/ci" && python3 build_check.py "$CHECK_NAME" "$BUILD_NAME"
|
cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME"
|
||||||
- name: Upload build URLs to artifacts
|
- name: Upload build URLs to artifacts
|
||||||
if: ${{ success() || failure() }}
|
if: ${{ success() || failure() }}
|
||||||
uses: actions/upload-artifact@v2
|
uses: actions/upload-artifact@v2
|
||||||
@ -370,8 +376,10 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
docker kill "$(docker ps -q)" ||:
|
# shellcheck disable=SC2046
|
||||||
docker rm -f "$(docker ps -a -q)" ||:
|
docker kill $(docker ps -q) ||:
|
||||||
|
# shellcheck disable=SC2046
|
||||||
|
docker rm -f $(docker ps -a -q) ||:
|
||||||
sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
|
sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
|
||||||
BuilderDebDebug:
|
BuilderDebDebug:
|
||||||
needs: [DockerHubPush]
|
needs: [DockerHubPush]
|
||||||
@ -384,7 +392,6 @@ jobs:
|
|||||||
IMAGES_PATH=${{runner.temp}}/images_path
|
IMAGES_PATH=${{runner.temp}}/images_path
|
||||||
REPO_COPY=${{runner.temp}}/build_check/ClickHouse
|
REPO_COPY=${{runner.temp}}/build_check/ClickHouse
|
||||||
CACHES_PATH=${{runner.temp}}/../ccaches
|
CACHES_PATH=${{runner.temp}}/../ccaches
|
||||||
CHECK_NAME=ClickHouse build check (actions)
|
|
||||||
BUILD_NAME=package_debug
|
BUILD_NAME=package_debug
|
||||||
EOF
|
EOF
|
||||||
- name: Download changed images
|
- name: Download changed images
|
||||||
@ -404,7 +411,7 @@ jobs:
|
|||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
mkdir -p "$TEMP_PATH"
|
mkdir -p "$TEMP_PATH"
|
||||||
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
||||||
cd "$REPO_COPY/tests/ci" && python3 build_check.py "$CHECK_NAME" "$BUILD_NAME"
|
cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME"
|
||||||
- name: Upload build URLs to artifacts
|
- name: Upload build URLs to artifacts
|
||||||
if: ${{ success() || failure() }}
|
if: ${{ success() || failure() }}
|
||||||
uses: actions/upload-artifact@v2
|
uses: actions/upload-artifact@v2
|
||||||
@ -414,8 +421,10 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
docker kill "$(docker ps -q)" ||:
|
# shellcheck disable=SC2046
|
||||||
docker rm -f "$(docker ps -a -q)" ||:
|
docker kill $(docker ps -q) ||:
|
||||||
|
# shellcheck disable=SC2046
|
||||||
|
docker rm -f $(docker ps -a -q) ||:
|
||||||
sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
|
sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
|
||||||
############################################################################################
|
############################################################################################
|
||||||
##################################### BUILD REPORTER #######################################
|
##################################### BUILD REPORTER #######################################
|
||||||
@ -462,8 +471,10 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
docker kill "$(docker ps -q)" ||:
|
# shellcheck disable=SC2046
|
||||||
docker rm -f "$(docker ps -a -q)" ||:
|
docker kill $(docker ps -q) ||:
|
||||||
|
# shellcheck disable=SC2046
|
||||||
|
docker rm -f $(docker ps -a -q) ||:
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
##############################################################################################
|
##############################################################################################
|
||||||
########################### FUNCTIONAl STATELESS TESTS #######################################
|
########################### FUNCTIONAl STATELESS TESTS #######################################
|
||||||
@ -500,8 +511,10 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
docker kill "$(docker ps -q)" ||:
|
# shellcheck disable=SC2046
|
||||||
docker rm -f "$(docker ps -a -q)" ||:
|
docker kill $(docker ps -q) ||:
|
||||||
|
# shellcheck disable=SC2046
|
||||||
|
docker rm -f $(docker ps -a -q) ||:
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
FunctionalStatelessTestAarch64:
|
FunctionalStatelessTestAarch64:
|
||||||
needs: [BuilderDebAarch64]
|
needs: [BuilderDebAarch64]
|
||||||
@ -535,8 +548,10 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
docker kill "$(docker ps -q)" ||:
|
# shellcheck disable=SC2046
|
||||||
docker rm -f "$(docker ps -a -q)" ||:
|
docker kill $(docker ps -q) ||:
|
||||||
|
# shellcheck disable=SC2046
|
||||||
|
docker rm -f $(docker ps -a -q) ||:
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
FunctionalStatelessTestAsan0:
|
FunctionalStatelessTestAsan0:
|
||||||
needs: [BuilderDebAsan]
|
needs: [BuilderDebAsan]
|
||||||
@ -572,8 +587,10 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
docker kill "$(docker ps -q)" ||:
|
# shellcheck disable=SC2046
|
||||||
docker rm -f "$(docker ps -a -q)" ||:
|
docker kill $(docker ps -q) ||:
|
||||||
|
# shellcheck disable=SC2046
|
||||||
|
docker rm -f $(docker ps -a -q) ||:
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
FunctionalStatelessTestAsan1:
|
FunctionalStatelessTestAsan1:
|
||||||
needs: [BuilderDebAsan]
|
needs: [BuilderDebAsan]
|
||||||
@ -609,8 +626,10 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
docker kill "$(docker ps -q)" ||:
|
# shellcheck disable=SC2046
|
||||||
docker rm -f "$(docker ps -a -q)" ||:
|
docker kill $(docker ps -q) ||:
|
||||||
|
# shellcheck disable=SC2046
|
||||||
|
docker rm -f $(docker ps -a -q) ||:
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
FunctionalStatelessTestTsan0:
|
FunctionalStatelessTestTsan0:
|
||||||
needs: [BuilderDebTsan]
|
needs: [BuilderDebTsan]
|
||||||
@ -646,8 +665,10 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
docker kill "$(docker ps -q)" ||:
|
# shellcheck disable=SC2046
|
||||||
docker rm -f "$(docker ps -a -q)" ||:
|
docker kill $(docker ps -q) ||:
|
||||||
|
# shellcheck disable=SC2046
|
||||||
|
docker rm -f $(docker ps -a -q) ||:
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
FunctionalStatelessTestTsan1:
|
FunctionalStatelessTestTsan1:
|
||||||
needs: [BuilderDebTsan]
|
needs: [BuilderDebTsan]
|
||||||
@ -683,8 +704,10 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
docker kill "$(docker ps -q)" ||:
|
# shellcheck disable=SC2046
|
||||||
docker rm -f "$(docker ps -a -q)" ||:
|
docker kill $(docker ps -q) ||:
|
||||||
|
# shellcheck disable=SC2046
|
||||||
|
docker rm -f $(docker ps -a -q) ||:
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
FunctionalStatelessTestTsan2:
|
FunctionalStatelessTestTsan2:
|
||||||
needs: [BuilderDebTsan]
|
needs: [BuilderDebTsan]
|
||||||
@ -720,8 +743,10 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
docker kill "$(docker ps -q)" ||:
|
# shellcheck disable=SC2046
|
||||||
docker rm -f "$(docker ps -a -q)" ||:
|
docker kill $(docker ps -q) ||:
|
||||||
|
# shellcheck disable=SC2046
|
||||||
|
docker rm -f $(docker ps -a -q) ||:
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
FunctionalStatelessTestUBsan:
|
FunctionalStatelessTestUBsan:
|
||||||
needs: [BuilderDebUBsan]
|
needs: [BuilderDebUBsan]
|
||||||
@ -755,8 +780,10 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
docker kill "$(docker ps -q)" ||:
|
# shellcheck disable=SC2046
|
||||||
docker rm -f "$(docker ps -a -q)" ||:
|
docker kill $(docker ps -q) ||:
|
||||||
|
# shellcheck disable=SC2046
|
||||||
|
docker rm -f $(docker ps -a -q) ||:
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
FunctionalStatelessTestMsan0:
|
FunctionalStatelessTestMsan0:
|
||||||
needs: [BuilderDebMsan]
|
needs: [BuilderDebMsan]
|
||||||
@ -792,8 +819,10 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
docker kill "$(docker ps -q)" ||:
|
# shellcheck disable=SC2046
|
||||||
docker rm -f "$(docker ps -a -q)" ||:
|
docker kill $(docker ps -q) ||:
|
||||||
|
# shellcheck disable=SC2046
|
||||||
|
docker rm -f $(docker ps -a -q) ||:
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
FunctionalStatelessTestMsan1:
|
FunctionalStatelessTestMsan1:
|
||||||
needs: [BuilderDebMsan]
|
needs: [BuilderDebMsan]
|
||||||
@ -829,8 +858,10 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
docker kill "$(docker ps -q)" ||:
|
# shellcheck disable=SC2046
|
||||||
docker rm -f "$(docker ps -a -q)" ||:
|
docker kill $(docker ps -q) ||:
|
||||||
|
# shellcheck disable=SC2046
|
||||||
|
docker rm -f $(docker ps -a -q) ||:
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
FunctionalStatelessTestMsan2:
|
FunctionalStatelessTestMsan2:
|
||||||
needs: [BuilderDebMsan]
|
needs: [BuilderDebMsan]
|
||||||
@ -866,8 +897,10 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
docker kill "$(docker ps -q)" ||:
|
# shellcheck disable=SC2046
|
||||||
docker rm -f "$(docker ps -a -q)" ||:
|
docker kill $(docker ps -q) ||:
|
||||||
|
# shellcheck disable=SC2046
|
||||||
|
docker rm -f $(docker ps -a -q) ||:
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
FunctionalStatelessTestDebug0:
|
FunctionalStatelessTestDebug0:
|
||||||
needs: [BuilderDebDebug]
|
needs: [BuilderDebDebug]
|
||||||
@ -903,8 +936,10 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
docker kill "$(docker ps -q)" ||:
|
# shellcheck disable=SC2046
|
||||||
docker rm -f "$(docker ps -a -q)" ||:
|
docker kill $(docker ps -q) ||:
|
||||||
|
# shellcheck disable=SC2046
|
||||||
|
docker rm -f $(docker ps -a -q) ||:
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
FunctionalStatelessTestDebug1:
|
FunctionalStatelessTestDebug1:
|
||||||
needs: [BuilderDebDebug]
|
needs: [BuilderDebDebug]
|
||||||
@ -940,8 +975,10 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
docker kill "$(docker ps -q)" ||:
|
# shellcheck disable=SC2046
|
||||||
docker rm -f "$(docker ps -a -q)" ||:
|
docker kill $(docker ps -q) ||:
|
||||||
|
# shellcheck disable=SC2046
|
||||||
|
docker rm -f $(docker ps -a -q) ||:
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
FunctionalStatelessTestDebug2:
|
FunctionalStatelessTestDebug2:
|
||||||
needs: [BuilderDebDebug]
|
needs: [BuilderDebDebug]
|
||||||
@ -977,8 +1014,10 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
docker kill "$(docker ps -q)" ||:
|
# shellcheck disable=SC2046
|
||||||
docker rm -f "$(docker ps -a -q)" ||:
|
docker kill $(docker ps -q) ||:
|
||||||
|
# shellcheck disable=SC2046
|
||||||
|
docker rm -f $(docker ps -a -q) ||:
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
##############################################################################################
|
##############################################################################################
|
||||||
############################ FUNCTIONAl STATEFUL TESTS #######################################
|
############################ FUNCTIONAl STATEFUL TESTS #######################################
|
||||||
@ -1015,8 +1054,10 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
docker kill "$(docker ps -q)" ||:
|
# shellcheck disable=SC2046
|
||||||
docker rm -f "$(docker ps -a -q)" ||:
|
docker kill $(docker ps -q) ||:
|
||||||
|
# shellcheck disable=SC2046
|
||||||
|
docker rm -f $(docker ps -a -q) ||:
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
FunctionalStatefulTestAarch64:
|
FunctionalStatefulTestAarch64:
|
||||||
needs: [BuilderDebAarch64]
|
needs: [BuilderDebAarch64]
|
||||||
@ -1050,8 +1091,10 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
docker kill "$(docker ps -q)" ||:
|
# shellcheck disable=SC2046
|
||||||
docker rm -f "$(docker ps -a -q)" ||:
|
docker kill $(docker ps -q) ||:
|
||||||
|
# shellcheck disable=SC2046
|
||||||
|
docker rm -f $(docker ps -a -q) ||:
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
FunctionalStatefulTestAsan:
|
FunctionalStatefulTestAsan:
|
||||||
needs: [BuilderDebAsan]
|
needs: [BuilderDebAsan]
|
||||||
@ -1085,8 +1128,10 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
docker kill "$(docker ps -q)" ||:
|
# shellcheck disable=SC2046
|
||||||
docker rm -f "$(docker ps -a -q)" ||:
|
docker kill $(docker ps -q) ||:
|
||||||
|
# shellcheck disable=SC2046
|
||||||
|
docker rm -f $(docker ps -a -q) ||:
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
FunctionalStatefulTestTsan:
|
FunctionalStatefulTestTsan:
|
||||||
needs: [BuilderDebTsan]
|
needs: [BuilderDebTsan]
|
||||||
@ -1120,8 +1165,10 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
docker kill "$(docker ps -q)" ||:
|
# shellcheck disable=SC2046
|
||||||
docker rm -f "$(docker ps -a -q)" ||:
|
docker kill $(docker ps -q) ||:
|
||||||
|
# shellcheck disable=SC2046
|
||||||
|
docker rm -f $(docker ps -a -q) ||:
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
FunctionalStatefulTestMsan:
|
FunctionalStatefulTestMsan:
|
||||||
needs: [BuilderDebMsan]
|
needs: [BuilderDebMsan]
|
||||||
@ -1155,8 +1202,10 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
docker kill "$(docker ps -q)" ||:
|
# shellcheck disable=SC2046
|
||||||
docker rm -f "$(docker ps -a -q)" ||:
|
docker kill $(docker ps -q) ||:
|
||||||
|
# shellcheck disable=SC2046
|
||||||
|
docker rm -f $(docker ps -a -q) ||:
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
FunctionalStatefulTestUBsan:
|
FunctionalStatefulTestUBsan:
|
||||||
needs: [BuilderDebUBsan]
|
needs: [BuilderDebUBsan]
|
||||||
@ -1190,8 +1239,10 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
docker kill "$(docker ps -q)" ||:
|
# shellcheck disable=SC2046
|
||||||
docker rm -f "$(docker ps -a -q)" ||:
|
docker kill $(docker ps -q) ||:
|
||||||
|
# shellcheck disable=SC2046
|
||||||
|
docker rm -f $(docker ps -a -q) ||:
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
FunctionalStatefulTestDebug:
|
FunctionalStatefulTestDebug:
|
||||||
needs: [BuilderDebDebug]
|
needs: [BuilderDebDebug]
|
||||||
@ -1225,8 +1276,10 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
docker kill "$(docker ps -q)" ||:
|
# shellcheck disable=SC2046
|
||||||
docker rm -f "$(docker ps -a -q)" ||:
|
docker kill $(docker ps -q) ||:
|
||||||
|
# shellcheck disable=SC2046
|
||||||
|
docker rm -f $(docker ps -a -q) ||:
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
##############################################################################################
|
##############################################################################################
|
||||||
######################################### STRESS TESTS #######################################
|
######################################### STRESS TESTS #######################################
|
||||||
@ -1262,8 +1315,10 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
docker kill "$(docker ps -q)" ||:
|
# shellcheck disable=SC2046
|
||||||
docker rm -f "$(docker ps -a -q)" ||:
|
docker kill $(docker ps -q) ||:
|
||||||
|
# shellcheck disable=SC2046
|
||||||
|
docker rm -f $(docker ps -a -q) ||:
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
StressTestTsan:
|
StressTestTsan:
|
||||||
needs: [BuilderDebTsan]
|
needs: [BuilderDebTsan]
|
||||||
@ -1300,8 +1355,10 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
docker kill "$(docker ps -q)" ||:
|
# shellcheck disable=SC2046
|
||||||
docker rm -f "$(docker ps -a -q)" ||:
|
docker kill $(docker ps -q) ||:
|
||||||
|
# shellcheck disable=SC2046
|
||||||
|
docker rm -f $(docker ps -a -q) ||:
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
StressTestMsan:
|
StressTestMsan:
|
||||||
needs: [BuilderDebMsan]
|
needs: [BuilderDebMsan]
|
||||||
@ -1334,8 +1391,10 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
docker kill "$(docker ps -q)" ||:
|
# shellcheck disable=SC2046
|
||||||
docker rm -f "$(docker ps -a -q)" ||:
|
docker kill $(docker ps -q) ||:
|
||||||
|
# shellcheck disable=SC2046
|
||||||
|
docker rm -f $(docker ps -a -q) ||:
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
StressTestUBsan:
|
StressTestUBsan:
|
||||||
needs: [BuilderDebUBsan]
|
needs: [BuilderDebUBsan]
|
||||||
@ -1368,8 +1427,10 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
docker kill "$(docker ps -q)" ||:
|
# shellcheck disable=SC2046
|
||||||
docker rm -f "$(docker ps -a -q)" ||:
|
docker kill $(docker ps -q) ||:
|
||||||
|
# shellcheck disable=SC2046
|
||||||
|
docker rm -f $(docker ps -a -q) ||:
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
StressTestDebug:
|
StressTestDebug:
|
||||||
needs: [BuilderDebDebug]
|
needs: [BuilderDebDebug]
|
||||||
@ -1402,8 +1463,10 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
docker kill "$(docker ps -q)" ||:
|
# shellcheck disable=SC2046
|
||||||
docker rm -f "$(docker ps -a -q)" ||:
|
docker kill $(docker ps -q) ||:
|
||||||
|
# shellcheck disable=SC2046
|
||||||
|
docker rm -f $(docker ps -a -q) ||:
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
#############################################################################################
|
#############################################################################################
|
||||||
############################# INTEGRATION TESTS #############################################
|
############################# INTEGRATION TESTS #############################################
|
||||||
@ -1441,8 +1504,10 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
docker kill "$(docker ps -q)" ||:
|
# shellcheck disable=SC2046
|
||||||
docker rm -f "$(docker ps -a -q)" ||:
|
docker kill $(docker ps -q) ||:
|
||||||
|
# shellcheck disable=SC2046
|
||||||
|
docker rm -f $(docker ps -a -q) ||:
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
IntegrationTestsAsan1:
|
IntegrationTestsAsan1:
|
||||||
needs: [BuilderDebAsan]
|
needs: [BuilderDebAsan]
|
||||||
@ -1477,8 +1542,10 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
docker kill "$(docker ps -q)" ||:
|
# shellcheck disable=SC2046
|
||||||
docker rm -f "$(docker ps -a -q)" ||:
|
docker kill $(docker ps -q) ||:
|
||||||
|
# shellcheck disable=SC2046
|
||||||
|
docker rm -f $(docker ps -a -q) ||:
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
IntegrationTestsAsan2:
|
IntegrationTestsAsan2:
|
||||||
needs: [BuilderDebAsan]
|
needs: [BuilderDebAsan]
|
||||||
@ -1513,8 +1580,10 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
docker kill "$(docker ps -q)" ||:
|
# shellcheck disable=SC2046
|
||||||
docker rm -f "$(docker ps -a -q)" ||:
|
docker kill $(docker ps -q) ||:
|
||||||
|
# shellcheck disable=SC2046
|
||||||
|
docker rm -f $(docker ps -a -q) ||:
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
IntegrationTestsTsan0:
|
IntegrationTestsTsan0:
|
||||||
needs: [BuilderDebTsan]
|
needs: [BuilderDebTsan]
|
||||||
@ -1549,8 +1618,10 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
docker kill "$(docker ps -q)" ||:
|
# shellcheck disable=SC2046
|
||||||
docker rm -f "$(docker ps -a -q)" ||:
|
docker kill $(docker ps -q) ||:
|
||||||
|
# shellcheck disable=SC2046
|
||||||
|
docker rm -f $(docker ps -a -q) ||:
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
IntegrationTestsTsan1:
|
IntegrationTestsTsan1:
|
||||||
needs: [BuilderDebTsan]
|
needs: [BuilderDebTsan]
|
||||||
@ -1585,8 +1656,10 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
docker kill "$(docker ps -q)" ||:
|
# shellcheck disable=SC2046
|
||||||
docker rm -f "$(docker ps -a -q)" ||:
|
docker kill $(docker ps -q) ||:
|
||||||
|
# shellcheck disable=SC2046
|
||||||
|
docker rm -f $(docker ps -a -q) ||:
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
IntegrationTestsTsan2:
|
IntegrationTestsTsan2:
|
||||||
needs: [BuilderDebTsan]
|
needs: [BuilderDebTsan]
|
||||||
@ -1621,8 +1694,10 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
docker kill "$(docker ps -q)" ||:
|
# shellcheck disable=SC2046
|
||||||
docker rm -f "$(docker ps -a -q)" ||:
|
docker kill $(docker ps -q) ||:
|
||||||
|
# shellcheck disable=SC2046
|
||||||
|
docker rm -f $(docker ps -a -q) ||:
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
IntegrationTestsTsan3:
|
IntegrationTestsTsan3:
|
||||||
needs: [BuilderDebTsan]
|
needs: [BuilderDebTsan]
|
||||||
@ -1657,8 +1732,10 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
docker kill "$(docker ps -q)" ||:
|
# shellcheck disable=SC2046
|
||||||
docker rm -f "$(docker ps -a -q)" ||:
|
docker kill $(docker ps -q) ||:
|
||||||
|
# shellcheck disable=SC2046
|
||||||
|
docker rm -f $(docker ps -a -q) ||:
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
IntegrationTestsRelease0:
|
IntegrationTestsRelease0:
|
||||||
needs: [BuilderDebRelease]
|
needs: [BuilderDebRelease]
|
||||||
@ -1693,8 +1770,10 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
docker kill "$(docker ps -q)" ||:
|
# shellcheck disable=SC2046
|
||||||
docker rm -f "$(docker ps -a -q)" ||:
|
docker kill $(docker ps -q) ||:
|
||||||
|
# shellcheck disable=SC2046
|
||||||
|
docker rm -f $(docker ps -a -q) ||:
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
IntegrationTestsRelease1:
|
IntegrationTestsRelease1:
|
||||||
needs: [BuilderDebRelease]
|
needs: [BuilderDebRelease]
|
||||||
@ -1729,8 +1808,10 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
docker kill "$(docker ps -q)" ||:
|
# shellcheck disable=SC2046
|
||||||
docker rm -f "$(docker ps -a -q)" ||:
|
docker kill $(docker ps -q) ||:
|
||||||
|
# shellcheck disable=SC2046
|
||||||
|
docker rm -f $(docker ps -a -q) ||:
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
FinishCheck:
|
FinishCheck:
|
||||||
needs:
|
needs:
|
||||||
|
2
.github/workflows/tags_stable.yml
vendored
2
.github/workflows/tags_stable.yml
vendored
@ -32,7 +32,7 @@ jobs:
|
|||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
run: |
|
run: |
|
||||||
./utils/list-versions/list-versions.sh > ./utils/list-versions/version_date.tsv
|
./utils/list-versions/list-versions.sh > ./utils/list-versions/version_date.tsv
|
||||||
GID=$(id -d "${UID}")
|
GID=$(id -g "${UID}")
|
||||||
docker run -u "${UID}:${GID}" -e PYTHONUNBUFFERED=1 \
|
docker run -u "${UID}:${GID}" -e PYTHONUNBUFFERED=1 \
|
||||||
--volume="${GITHUB_WORKSPACE}:/ClickHouse" clickhouse/style-test \
|
--volume="${GITHUB_WORKSPACE}:/ClickHouse" clickhouse/style-test \
|
||||||
/ClickHouse/utils/changelog/changelog.py -vv --gh-user-or-token="$GITHUB_TOKEN" \
|
/ClickHouse/utils/changelog/changelog.py -vv --gh-user-or-token="$GITHUB_TOKEN" \
|
||||||
|
6
.github/workflows/woboq.yml
vendored
6
.github/workflows/woboq.yml
vendored
@ -37,6 +37,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
docker kill "$(docker ps -q)" ||:
|
# shellcheck disable=SC2046
|
||||||
docker rm -f "$(docker ps -a -q)" ||:
|
docker kill $(docker ps -q) ||:
|
||||||
|
# shellcheck disable=SC2046
|
||||||
|
docker rm -f $(docker ps -a -q) ||:
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
|
7
.gitmodules
vendored
7
.gitmodules
vendored
@ -265,3 +265,10 @@
|
|||||||
[submodule "contrib/wyhash"]
|
[submodule "contrib/wyhash"]
|
||||||
path = contrib/wyhash
|
path = contrib/wyhash
|
||||||
url = https://github.com/wangyi-fudan/wyhash.git
|
url = https://github.com/wangyi-fudan/wyhash.git
|
||||||
|
[submodule "contrib/eigen"]
|
||||||
|
path = contrib/eigen
|
||||||
|
url = https://github.com/eigen-mirror/eigen
|
||||||
|
[submodule "contrib/hashidsxx"]
|
||||||
|
path = contrib/hashidsxx
|
||||||
|
url = https://github.com/schoentoon/hashidsxx.git
|
||||||
|
|
||||||
|
165
CHANGELOG.md
165
CHANGELOG.md
@ -1,11 +1,174 @@
|
|||||||
### Table of Contents
|
### Table of Contents
|
||||||
|
**[ClickHouse release v22.5, 2022-05-19](#225)**<br>
|
||||||
**[ClickHouse release v22.4, 2022-04-20](#224)**<br>
|
**[ClickHouse release v22.4, 2022-04-20](#224)**<br>
|
||||||
**[ClickHouse release v22.3-lts, 2022-03-17](#223)**<br>
|
**[ClickHouse release v22.3-lts, 2022-03-17](#223)**<br>
|
||||||
**[ClickHouse release v22.2, 2022-02-17](#222)**<br>
|
**[ClickHouse release v22.2, 2022-02-17](#222)**<br>
|
||||||
**[ClickHouse release v22.1, 2022-01-18](#221)**<br>
|
**[ClickHouse release v22.1, 2022-01-18](#221)**<br>
|
||||||
**[Changelog for 2021](https://github.com/ClickHouse/ClickHouse/blob/master/docs/en/whats-new/changelog/2021.md)**<br>
|
**[Changelog for 2021](https://github.com/ClickHouse/ClickHouse/blob/master/docs/en/whats-new/changelog/2021.md)**<br>
|
||||||
|
|
||||||
### <a id="224"></a> ClickHouse release master FIXME as compared to v22.3.3.44-lts
|
### <a id="225"></a> ClickHouse release 22.5, 2022-05-19
|
||||||
|
|
||||||
|
#### Upgrade Notes
|
||||||
|
|
||||||
|
* Now, background merges, mutations and `OPTIMIZE` will not increment `SelectedRows` and `SelectedBytes` metrics. They (still) will increment `MergedRows` and `MergedUncompressedBytes` as it was before. This only affects the metric values, and makes them better. This change does not introduce any incompatibility, but you may wonder about the changes of metrics, so we put in this category. [#37040](https://github.com/ClickHouse/ClickHouse/pull/37040) ([Nikolai Kochetov](https://github.com/KochetovNicolai)).
|
||||||
|
* Updated the BoringSSL module to the official FIPS compliant version. This makes ClickHouse FIPS compliant. [#35914](https://github.com/ClickHouse/ClickHouse/pull/35914) ([Meena-Renganathan](https://github.com/Meena-Renganathan)). The ciphers `aes-192-cfb128` and `aes-256-cfb128` were removed, because they are not included in the FIPS certified version of BoringSSL.
|
||||||
|
* `max_memory_usage` setting is removed from the default user profile in `users.xml`. This enables flexible memory limits for queries instead of the old rigid limit of 10 GB.
|
||||||
|
* Disable `log_query_threads` setting by default. It controls the logging of statistics about every thread participating in query execution. After supporting asynchronous reads, the total number of distinct thread ids became too large, and logging into the `query_thread_log` has become too heavy. [#37077](https://github.com/ClickHouse/ClickHouse/pull/37077) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* Remove function `groupArraySorted` which has a bug. [#36822](https://github.com/ClickHouse/ClickHouse/pull/36822) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
|
||||||
|
#### New Feature
|
||||||
|
|
||||||
|
* Enable memory overcommit by default. [#35921](https://github.com/ClickHouse/ClickHouse/pull/35921) ([Dmitry Novik](https://github.com/novikd)).
|
||||||
|
* Add support of GROUPING SETS in GROUP BY clause. This implementation supports a parallel processing of grouping sets. [#33631](https://github.com/ClickHouse/ClickHouse/pull/33631) ([Dmitry Novik](https://github.com/novikd)).
|
||||||
|
* Added `system.certificates` table. [#37142](https://github.com/ClickHouse/ClickHouse/pull/37142) ([Yakov Olkhovskiy](https://github.com/yakov-olkhovskiy)).
|
||||||
|
* Adds `h3Line`, `h3Distance` and `h3HexRing` functions. [#37030](https://github.com/ClickHouse/ClickHouse/pull/37030) ([Bharat Nallan](https://github.com/bharatnc)).
|
||||||
|
* New single binary based diagnostics tool (clickhouse-diagnostics). [#36705](https://github.com/ClickHouse/ClickHouse/pull/36705) ([Dale McDiarmid](https://github.com/gingerwizard)).
|
||||||
|
* Add output format `Prometheus` [#36051](https://github.com/ClickHouse/ClickHouse/issues/36051). [#36206](https://github.com/ClickHouse/ClickHouse/pull/36206) ([Vladimir C](https://github.com/vdimir)).
|
||||||
|
* Add `MySQLDump` input format. It reads all data from INSERT queries belonging to one table in dump. If there are more than one table, by default it reads data from the first one. [#36667](https://github.com/ClickHouse/ClickHouse/pull/36667) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||||
|
* Show the `total_rows` and `total_bytes` fields in `system.tables` for temporary tables. [#36401](https://github.com/ClickHouse/ClickHouse/issues/36401). [#36439](https://github.com/ClickHouse/ClickHouse/pull/36439) ([xiedeyantu](https://github.com/xiedeyantu)).
|
||||||
|
* Allow to override `parts_to_delay_insert` and `parts_to_throw_insert` with query-level settings. If they are defined, they will override table-level settings. [#36371](https://github.com/ClickHouse/ClickHouse/pull/36371) ([Memo](https://github.com/Joeywzr)).
|
||||||
|
|
||||||
|
#### Experimental Feature
|
||||||
|
|
||||||
|
* Implemented L1, L2, Linf, Cosine distance functions for arrays and L1, L2, Linf norm functions for arrays.
|
||||||
|
[#37033](https://github.com/ClickHouse/ClickHouse/pull/37033) ([qieqieplus](https://github.com/qieqieplus)). Caveat: the functions will be renamed.
|
||||||
|
* Improve the `WATCH` query in WindowView: 1. Reduce the latency of providing query results by calling the `fire_condition` signal. 2. Makes the cancel query operation(ctrl-c) faster, by checking `isCancelled()` more frequently. [#37226](https://github.com/ClickHouse/ClickHouse/pull/37226) ([vxider](https://github.com/Vxider)).
|
||||||
|
* Introspection for remove filesystem cache. [#36802](https://github.com/ClickHouse/ClickHouse/pull/36802) ([Han Shukai](https://github.com/KinderRiven)).
|
||||||
|
* Added new hash function `wyHash64` for SQL. [#36467](https://github.com/ClickHouse/ClickHouse/pull/36467) ([olevino](https://github.com/olevino)).
|
||||||
|
* Improvement for replicated databases: Added `SYSTEM SYNC DATABASE REPLICA` query which allows to sync tables metadata inside Replicated database, because currently synchronisation is asynchronous. [#35944](https://github.com/ClickHouse/ClickHouse/pull/35944) ([Nikita Mikhaylov](https://github.com/nikitamikhaylov)).
|
||||||
|
* Improvement for remote filesystem cache: Better read from cache. [#37054](https://github.com/ClickHouse/ClickHouse/pull/37054) ([Kseniia Sumarokova](https://github.com/kssenii)). Improve `SYSTEM DROP FILESYSTEM CACHE` query: `<path>` option and `FORCE` option. [#36639](https://github.com/ClickHouse/ClickHouse/pull/36639) ([Kseniia Sumarokova](https://github.com/kssenii)).
|
||||||
|
* Improvement for semistructured data: Allow to cast columns of type `Object(...)` to `Object(Nullable(...))`. [#36564](https://github.com/ClickHouse/ClickHouse/pull/36564) ([awakeljw](https://github.com/awakeljw)).
|
||||||
|
* Improvement for parallel replicas: We create a local interpreter if we want to execute query on localhost replica. But for when executing query on multiple replicas we rely on the fact that a connection exists so replicas can talk to coordinator. It is now improved and localhost replica can talk to coordinator directly in the same process. [#36281](https://github.com/ClickHouse/ClickHouse/pull/36281) ([Nikita Mikhaylov](https://github.com/nikitamikhaylov)).
|
||||||
|
|
||||||
|
#### Performance Improvement
|
||||||
|
|
||||||
|
* Improve performance of `avg`, `sum` aggregate functions if used without GROUP BY expression. [#37257](https://github.com/ClickHouse/ClickHouse/pull/37257) ([Maksim Kita](https://github.com/kitaisreal)).
|
||||||
|
* Improve performance of unary arithmetic functions (`bitCount`, `bitNot`, `abs`, `intExp2`, `intExp10`, `negate`, `roundAge`, `roundDuration`, `roundToExp2`, `sign`) using dynamic dispatch. [#37289](https://github.com/ClickHouse/ClickHouse/pull/37289) ([Maksim Kita](https://github.com/kitaisreal)).
|
||||||
|
* Improve performance of ORDER BY, MergeJoin, insertion into MergeTree using JIT compilation of sort columns comparator. [#34469](https://github.com/ClickHouse/ClickHouse/pull/34469) ([Maksim Kita](https://github.com/kitaisreal)).
|
||||||
|
* Change structure of `system.asynchronous_metric_log`. It will take about 10 times less space. This closes [#36357](https://github.com/ClickHouse/ClickHouse/issues/36357). The field `event_time_microseconds` was removed, because it is useless. [#36360](https://github.com/ClickHouse/ClickHouse/pull/36360) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* Load marks for only necessary columns when reading wide parts. [#36879](https://github.com/ClickHouse/ClickHouse/pull/36879) ([Anton Kozlov](https://github.com/tonickkozlov)).
|
||||||
|
* Improves performance of file descriptor cache by narrowing mutex scopes. [#36682](https://github.com/ClickHouse/ClickHouse/pull/36682) ([Anton Kozlov](https://github.com/tonickkozlov)).
|
||||||
|
* Improve performance of reading from storage `File` and table functions `file` in case when path has globs and matched directory contains large number of files. [#36647](https://github.com/ClickHouse/ClickHouse/pull/36647) ([Anton Popov](https://github.com/CurtizJ)).
|
||||||
|
* Apply parallel parsing for input format `HiveText`, which can speed up HiveText parsing by 2x when reading local file. [#36650](https://github.com/ClickHouse/ClickHouse/pull/36650) ([李扬](https://github.com/taiyang-li)).
|
||||||
|
* The default `HashJoin` is not thread safe for inserting right table's rows and run it in a single thread. When the right table is large, the join process is too slow with low cpu utilization. [#36415](https://github.com/ClickHouse/ClickHouse/pull/36415) ([lgbo](https://github.com/lgbo-ustc)).
|
||||||
|
* Allow to rewrite `select countDistinct(a) from t` to `select count(1) from (select a from t groupBy a)`. [#35993](https://github.com/ClickHouse/ClickHouse/pull/35993) ([zhanglistar](https://github.com/zhanglistar)).
|
||||||
|
* Transform OR LIKE chain to multiMatchAny. Will enable once we have more confidence it works. [#34932](https://github.com/ClickHouse/ClickHouse/pull/34932) ([Daniel Kutenin](https://github.com/danlark1)).
|
||||||
|
* Improve performance of some functions with inlining. [#34544](https://github.com/ClickHouse/ClickHouse/pull/34544) ([Daniel Kutenin](https://github.com/danlark1)).
|
||||||
|
* Add a branch to avoid unnecessary memcpy in readBig. It improves performance somewhat. [#36095](https://github.com/ClickHouse/ClickHouse/pull/36095) ([jasperzhu](https://github.com/jinjunzh)).
|
||||||
|
* Implement partial GROUP BY key for optimize_aggregation_in_order. [#35111](https://github.com/ClickHouse/ClickHouse/pull/35111) ([Azat Khuzhin](https://github.com/azat)).
|
||||||
|
|
||||||
|
#### Improvement
|
||||||
|
|
||||||
|
* Show names of erroneous files in case of parsing errors while executing table functions `file`, `s3` and `url`. [#36314](https://github.com/ClickHouse/ClickHouse/pull/36314) ([Anton Popov](https://github.com/CurtizJ)).
|
||||||
|
* Allowed to increase the number of threads for executing background operations (merges, mutations, moves and fetches) at runtime if they are specified at top level config. [#36425](https://github.com/ClickHouse/ClickHouse/pull/36425) ([Nikita Mikhaylov](https://github.com/nikitamikhaylov)).
|
||||||
|
* Now date time conversion functions that generates time before 1970-01-01 00:00:00 with partial hours/minutes timezones will be saturated to zero instead of overflow. This is the continuation of https://github.com/ClickHouse/ClickHouse/pull/29953 which addresses https://github.com/ClickHouse/ClickHouse/pull/29953#discussion_r800550280 . Mark as improvement because it's implementation defined behavior (and very rare case) and we are allowed to break it. [#36656](https://github.com/ClickHouse/ClickHouse/pull/36656) ([Amos Bird](https://github.com/amosbird)).
|
||||||
|
* Add a warning if someone running clickhouse-server with log level "test". The log level "test" was added recently and cannot be used in production due to inevitable, unavoidable, fatal and life-threatening performance degradation. [#36824](https://github.com/ClickHouse/ClickHouse/pull/36824) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* Parse collations in CREATE TABLE, throw exception or ignore. closes [#35892](https://github.com/ClickHouse/ClickHouse/issues/35892). [#36271](https://github.com/ClickHouse/ClickHouse/pull/36271) ([yuuch](https://github.com/yuuch)).
|
||||||
|
* Option `compatibility_ignore_auto_increment_in_create_table` allows ignoring `AUTO_INCREMENT` keyword in a column declaration to simplify migration from MySQL. [#37178](https://github.com/ClickHouse/ClickHouse/pull/37178) ([Igor Nikonov](https://github.com/devcrafter)).
|
||||||
|
* Add aliases `JSONLines` and `NDJSON` for `JSONEachRow`. Closes [#36303](https://github.com/ClickHouse/ClickHouse/issues/36303). [#36327](https://github.com/ClickHouse/ClickHouse/pull/36327) ([flynn](https://github.com/ucasfl)).
|
||||||
|
* Limit the max partitions could be queried for each hive table. Avoid resource overruns. [#37281](https://github.com/ClickHouse/ClickHouse/pull/37281) ([lgbo](https://github.com/lgbo-ustc)).
|
||||||
|
* Added implicit cast for `h3kRing` function second argument to improve usability. Closes [#35432](https://github.com/ClickHouse/ClickHouse/issues/35432). [#37189](https://github.com/ClickHouse/ClickHouse/pull/37189) ([Maksim Kita](https://github.com/kitaisreal)).
|
||||||
|
* Fix progress indication for `INSERT SELECT` in `clickhouse-local` for any query and for file progress in client, more correct file progress. [#37075](https://github.com/ClickHouse/ClickHouse/pull/37075) ([Kseniia Sumarokova](https://github.com/kssenii)).
|
||||||
|
* Fix bug which can lead to forgotten outdated parts in MergeTree table engines family in case of filesystem failures during parts removal. Before fix they will be removed only after first server restart. [#37014](https://github.com/ClickHouse/ClickHouse/pull/37014) ([alesapin](https://github.com/alesapin)).
|
||||||
|
* Implemented a new mode of handling row policies which can be enabled in the main configuration which enables users without permissive row policies to read rows. [#36997](https://github.com/ClickHouse/ClickHouse/pull/36997) ([Vitaly Baranov](https://github.com/vitlibar)).
|
||||||
|
* Play UI: Nullable numbers will be aligned to the right in table cells. This closes [#36982](https://github.com/ClickHouse/ClickHouse/issues/36982). [#36988](https://github.com/ClickHouse/ClickHouse/pull/36988) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* Play UI: If there is one row in result and more than a few columns, display the result vertically. Continuation of [#36811](https://github.com/ClickHouse/ClickHouse/issues/36811). [#36842](https://github.com/ClickHouse/ClickHouse/pull/36842) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* Cleanup CSS in Play UI. The pixels are more evenly placed. Better usability for long content in table cells. [#36569](https://github.com/ClickHouse/ClickHouse/pull/36569) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* Finalize write buffers in case of exception to avoid doing it in destructors. Hope it fixes: [#36907](https://github.com/ClickHouse/ClickHouse/issues/36907). [#36979](https://github.com/ClickHouse/ClickHouse/pull/36979) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||||
|
* After [#36425](https://github.com/ClickHouse/ClickHouse/issues/36425) settings like `background_fetches_pool_size` became obsolete and can appear in top level config, but clickhouse throws and exception like `Error updating configuration from '/etc/clickhouse-server/config.xml' config.: Code: 137. DB::Exception: A setting 'background_fetches_pool_size' appeared at top level in config /etc/clickhouse-server/config.xml.` This is fixed. [#36917](https://github.com/ClickHouse/ClickHouse/pull/36917) ([Nikita Mikhaylov](https://github.com/nikitamikhaylov)).
|
||||||
|
* Add extra diagnostic info (if applicable) when sending exception to other server. [#36872](https://github.com/ClickHouse/ClickHouse/pull/36872) ([tavplubix](https://github.com/tavplubix)).
|
||||||
|
* Allow to execute hash functions with arguments of type `Array(Tuple(..))`. [#36812](https://github.com/ClickHouse/ClickHouse/pull/36812) ([Anton Popov](https://github.com/CurtizJ)).
|
||||||
|
* Added `user_defined_path` config setting. [#36753](https://github.com/ClickHouse/ClickHouse/pull/36753) ([Maksim Kita](https://github.com/kitaisreal)).
|
||||||
|
* Allow cluster macro in `s3Cluster` table function. [#36726](https://github.com/ClickHouse/ClickHouse/pull/36726) ([Vadim Volodin](https://github.com/PolyProgrammist)).
|
||||||
|
* Properly cancel INSERT queries in `clickhouse-client`/`clickhouse-local`. [#36710](https://github.com/ClickHouse/ClickHouse/pull/36710) ([Azat Khuzhin](https://github.com/azat)).
|
||||||
|
* Allow to cancel a query while still keeping a decent query id in `MySQLHandler`. [#36699](https://github.com/ClickHouse/ClickHouse/pull/36699) ([Amos Bird](https://github.com/amosbird)).
|
||||||
|
* Add `is_all_data_sent` column into `system.processes`, and improve internal testing hardening check based on it. [#36649](https://github.com/ClickHouse/ClickHouse/pull/36649) ([Azat Khuzhin](https://github.com/azat)).
|
||||||
|
* The metrics about time spent reading from s3 now calculated correctly. Close [#35483](https://github.com/ClickHouse/ClickHouse/issues/35483). [#36572](https://github.com/ClickHouse/ClickHouse/pull/36572) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* Allow file descriptors in table function file if it is run in clickhouse-local. [#36562](https://github.com/ClickHouse/ClickHouse/pull/36562) ([wuxiaobai24](https://github.com/wuxiaobai24)).
|
||||||
|
* Allow names of tuple elements that start from digits. [#36544](https://github.com/ClickHouse/ClickHouse/pull/36544) ([Anton Popov](https://github.com/CurtizJ)).
|
||||||
|
* Now clickhouse-benchmark can read authentication info from environment variables. [#36497](https://github.com/ClickHouse/ClickHouse/pull/36497) ([Anton Kozlov](https://github.com/tonickkozlov)).
|
||||||
|
* `clickhouse-keeper` improvement: add support for force recovery which allows you to reconfigure cluster without quorum. [#36258](https://github.com/ClickHouse/ClickHouse/pull/36258) ([Antonio Andelic](https://github.com/antonio2368)).
|
||||||
|
* Improve schema inference for JSON objects. [#36207](https://github.com/ClickHouse/ClickHouse/pull/36207) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||||
|
* Refactor code around schema inference with globs. Try next file from glob only if it makes sense (previously we tried next file in case of any error). Also it fixes [#36317](https://github.com/ClickHouse/ClickHouse/issues/36317). [#36205](https://github.com/ClickHouse/ClickHouse/pull/36205) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||||
|
* Add a separate `CLUSTER` grant (and `access_control_improvements.on_cluster_queries_require_cluster_grant` configuration directive, for backward compatibility, default to `false`). [#35767](https://github.com/ClickHouse/ClickHouse/pull/35767) ([Azat Khuzhin](https://github.com/azat)).
|
||||||
|
* If the required amount of memory is available before the selected query stopped, all waiting queries continue execution. Now we don't stop any query if memory is freed before the moment when the selected query knows about the cancellation. [#35637](https://github.com/ClickHouse/ClickHouse/pull/35637) ([Dmitry Novik](https://github.com/novikd)).
|
||||||
|
* Nullables detection in protobuf. In proto3, default values are not sent on the wire. This makes it non-trivial to distinguish between null and default values for Nullable columns. A standard way to deal with this problem is to use Google wrappers to nest the target value within an inner message (see https://github.com/protocolbuffers/protobuf/blob/master/src/google/protobuf/wrappers.proto). In this case, a missing field is interpreted as null value, a field with missing value if interpreted as default value, and a field with regular value is interpreted as regular value. However, ClickHouse interprets Google wrappers as nested columns. We propose to introduce special behaviour to detect Google wrappers and interpret them like in the description above. For example, to serialize values for a Nullable column `test`, we would use `google.protobuf.StringValue test` in our .proto schema. Note that these types are so called "well-known types" in Protobuf, implemented in the library itself. [#35149](https://github.com/ClickHouse/ClickHouse/pull/35149) ([Jakub Kuklis](https://github.com/jkuklis)).
|
||||||
|
* Added support for specifying `content_type` in predefined and static HTTP handler config. [#34916](https://github.com/ClickHouse/ClickHouse/pull/34916) ([Roman Nikonov](https://github.com/nic11)).
|
||||||
|
* Warn properly if use clickhouse-client --file without preceeding --external. Close [#34747](https://github.com/ClickHouse/ClickHouse/issues/34747). [#34765](https://github.com/ClickHouse/ClickHouse/pull/34765) ([李扬](https://github.com/taiyang-li)).
|
||||||
|
* Improve MySQL database engine to compatible with binary(0) dataType. [#37232](https://github.com/ClickHouse/ClickHouse/pull/37232) ([zzsmdfj](https://github.com/zzsmdfj)).
|
||||||
|
* Improve JSON report of clickhouse-benchmark. [#36473](https://github.com/ClickHouse/ClickHouse/pull/36473) ([Tian Xinhui](https://github.com/xinhuitian)).
|
||||||
|
* Server might refuse to start if it cannot resolve hostname of external ClickHouse dictionary. It's fixed. Fixes [#36451](https://github.com/ClickHouse/ClickHouse/issues/36451). [#36463](https://github.com/ClickHouse/ClickHouse/pull/36463) ([tavplubix](https://github.com/tavplubix)).
|
||||||
|
|
||||||
|
#### Build/Testing/Packaging Improvement
|
||||||
|
|
||||||
|
* Now `clickhouse-keeper` for the `x86_64` architecture is statically linked with [musl](https://musl.libc.org/) and doesn't depend on any system libraries. [#31833](https://github.com/ClickHouse/ClickHouse/pull/31833) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* ClickHouse builds for `PowerPC64LE` architecture are now available in universal installation script `curl https://clickhouse.com/ | sh` and by direct link `https://builds.clickhouse.com/master/powerpc64le/clickhouse`. [#37095](https://github.com/ClickHouse/ClickHouse/pull/37095) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* Limit PowerPC code generation to Power8 for better compatibility. This closes [#36025](https://github.com/ClickHouse/ClickHouse/issues/36025). [#36529](https://github.com/ClickHouse/ClickHouse/pull/36529) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* Simplify performance test. This will give a chance for us to use it. [#36769](https://github.com/ClickHouse/ClickHouse/pull/36769) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* Fail performance comparison on errors in the report. [#34797](https://github.com/ClickHouse/ClickHouse/pull/34797) ([Mikhail f. Shiryaev](https://github.com/Felixoid)).
|
||||||
|
* Add ZSTD support for Arrow. This fixes [#35283](https://github.com/ClickHouse/ClickHouse/issues/35283). [#35486](https://github.com/ClickHouse/ClickHouse/pull/35486) ([Sean Lafferty](https://github.com/seanlaff)).
|
||||||
|
|
||||||
|
#### Bug Fix
|
||||||
|
|
||||||
|
* Extracts Version ID if present from the URI and adds a request to the AWS HTTP URI. Closes [#31221](https://github.com/ClickHouse/ClickHouse/issues/31221). - [x] Extract `Version ID` from URI if present and reassemble without it. - [x] Configure `AWS HTTP URI` object with request. - [x] Unit Tests: [`gtest_s3_uri`](https://github.com/ClickHouse/ClickHouse/blob/2340a6c6849ebc05a8efbf97ba8de3ff9dc0eff4/src/IO/tests/gtest_s3_uri.cpp) - [x] Drop instrumentation commit. [#34571](https://github.com/ClickHouse/ClickHouse/pull/34571) ([Saad Ur Rahman](https://github.com/surahman)).
|
||||||
|
* Fix system.opentelemetry_span_log attribute.values alias to values instead of keys. [#37275](https://github.com/ClickHouse/ClickHouse/pull/37275) ([Aleksandr Razumov](https://github.com/ernado)).
|
||||||
|
* Fix Nullable(String) to Nullable(Bool/IPv4/IPv6) conversion Closes [#37221](https://github.com/ClickHouse/ClickHouse/issues/37221). [#37270](https://github.com/ClickHouse/ClickHouse/pull/37270) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||||
|
* Experimental feature: Fix execution of mutations in tables, in which there exist columns of type `Object`. Using subcolumns of type `Object` in `WHERE` expression of `UPDATE` or `DELETE` queries is now allowed yet, as well as manipulating (`DROP`, `MODIFY`) of separate subcolumns. Fixes [#37205](https://github.com/ClickHouse/ClickHouse/issues/37205). [#37266](https://github.com/ClickHouse/ClickHouse/pull/37266) ([Anton Popov](https://github.com/CurtizJ)).
|
||||||
|
* Kafka does not need `group.id` on producer stage. In console log you can find Warning that describe this issue: ``` 2022.05.15 17:59:13.270227 [ 137 ] {} <Warning> StorageKafka (topic-name): [rdk:CONFWARN] [thrd:app]: Configuration property group.id is a consumer property and will be ignored by this producer instance ```. [#37228](https://github.com/ClickHouse/ClickHouse/pull/37228) ([Mark Andreev](https://github.com/mrk-andreev)).
|
||||||
|
* Experimental feature (WindowView): Update `max_fired_watermark ` after blocks actually fired, in case delete data that hasn't been fired yet. [#37225](https://github.com/ClickHouse/ClickHouse/pull/37225) ([vxider](https://github.com/Vxider)).
|
||||||
|
* Fix "Cannot create column of type Set" for distributed queries with LIMIT BY. [#37193](https://github.com/ClickHouse/ClickHouse/pull/37193) ([Azat Khuzhin](https://github.com/azat)).
|
||||||
|
* Experimental feature: Now WindowView `WATCH EVENTS` query will not be terminated due to the nonempty Chunk created in `WindowViewSource.h:58`. [#37182](https://github.com/ClickHouse/ClickHouse/pull/37182) ([vxider](https://github.com/Vxider)).
|
||||||
|
* Enable `enable_global_with_statement` for subqueries, close [#37141](https://github.com/ClickHouse/ClickHouse/issues/37141). [#37166](https://github.com/ClickHouse/ClickHouse/pull/37166) ([Vladimir C](https://github.com/vdimir)).
|
||||||
|
* Fix implicit cast for optimize_skip_unused_shards_rewrite_in. [#37153](https://github.com/ClickHouse/ClickHouse/pull/37153) ([Azat Khuzhin](https://github.com/azat)).
|
||||||
|
* The ILIKE function on FixedString columns could have returned wrong results (i.e. match less than it should). [#37117](https://github.com/ClickHouse/ClickHouse/pull/37117) ([Robert Schulze](https://github.com/rschu1ze)).
|
||||||
|
* Fix `GROUP BY` `AggregateFunction` (i.e. you `GROUP BY` by the column that has `AggregateFunction` type). [#37093](https://github.com/ClickHouse/ClickHouse/pull/37093) ([Azat Khuzhin](https://github.com/azat)).
|
||||||
|
* Experimental feature: Fix optimize_aggregation_in_order with prefix GROUP BY and *Array aggregate functions. [#37050](https://github.com/ClickHouse/ClickHouse/pull/37050) ([Azat Khuzhin](https://github.com/azat)).
|
||||||
|
* Fixed performance degradation of some INSERT SELECT queries with implicit aggregation. Fixes [#36792](https://github.com/ClickHouse/ClickHouse/issues/36792). [#37047](https://github.com/ClickHouse/ClickHouse/pull/37047) ([tavplubix](https://github.com/tavplubix)).
|
||||||
|
* Experimental feature: Fix in-order `GROUP BY` (`optimize_aggregation_in_order=1`) with `*Array` (`groupArrayArray`/...) aggregate functions. [#37046](https://github.com/ClickHouse/ClickHouse/pull/37046) ([Azat Khuzhin](https://github.com/azat)).
|
||||||
|
* Fix LowCardinality->ArrowDictionary invalid output when type of indexes is not UInt8. Closes [#36832](https://github.com/ClickHouse/ClickHouse/issues/36832). [#37043](https://github.com/ClickHouse/ClickHouse/pull/37043) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||||
|
* Fixed problem with infs in `quantileTDigest`. Fixes [#32107](https://github.com/ClickHouse/ClickHouse/issues/32107). [#37021](https://github.com/ClickHouse/ClickHouse/pull/37021) ([Vladimir Chebotarev](https://github.com/excitoon)).
|
||||||
|
* Fix sending external tables data in HedgedConnections with max_parallel_replicas != 1. [#36981](https://github.com/ClickHouse/ClickHouse/pull/36981) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||||
|
* Fixed logical error on `TRUNCATE` query in `Replicated` database. Fixes [#33747](https://github.com/ClickHouse/ClickHouse/issues/33747). [#36976](https://github.com/ClickHouse/ClickHouse/pull/36976) ([tavplubix](https://github.com/tavplubix)).
|
||||||
|
* Experimental feature: Fix stuck when dropping source table in WindowView. Closes [#35678](https://github.com/ClickHouse/ClickHouse/issues/35678). [#36967](https://github.com/ClickHouse/ClickHouse/pull/36967) ([vxider](https://github.com/Vxider)).
|
||||||
|
* Experimental feature (rocksdb cache): Fix issue: [#36671](https://github.com/ClickHouse/ClickHouse/issues/36671). [#36929](https://github.com/ClickHouse/ClickHouse/pull/36929) ([李扬](https://github.com/taiyang-li)).
|
||||||
|
* Experimental feature: Fix bugs when using multiple columns in WindowView by adding converting actions to make it possible to call`writeIntoWindowView` with a slightly different schema. [#36928](https://github.com/ClickHouse/ClickHouse/pull/36928) ([vxider](https://github.com/Vxider)).
|
||||||
|
* Fix bug in clickhouse-keeper which can lead to corrupted compressed log files in case of small load and restarts. [#36910](https://github.com/ClickHouse/ClickHouse/pull/36910) ([alesapin](https://github.com/alesapin)).
|
||||||
|
* Fix incorrect query result when doing constant aggregation. This fixes [#36728](https://github.com/ClickHouse/ClickHouse/issues/36728) . [#36888](https://github.com/ClickHouse/ClickHouse/pull/36888) ([Amos Bird](https://github.com/amosbird)).
|
||||||
|
* Experimental feature: Fix `current_size` count in cache. [#36887](https://github.com/ClickHouse/ClickHouse/pull/36887) ([Kseniia Sumarokova](https://github.com/kssenii)).
|
||||||
|
* Experimental feature: Fix fire in window view with hop window [#34044](https://github.com/ClickHouse/ClickHouse/issues/34044). [#36861](https://github.com/ClickHouse/ClickHouse/pull/36861) ([vxider](https://github.com/Vxider)).
|
||||||
|
* Experimental feature: Fix incorrect cast in cached buffer from remote fs. [#36809](https://github.com/ClickHouse/ClickHouse/pull/36809) ([Kseniia Sumarokova](https://github.com/kssenii)).
|
||||||
|
* Fix creation of tables with `flatten_nested = 0`. Previously unflattened `Nested` columns could be flattened after server restart. [#36803](https://github.com/ClickHouse/ClickHouse/pull/36803) ([Anton Popov](https://github.com/CurtizJ)).
|
||||||
|
* Fix some issues with async reads from remote filesystem which happened when reading low cardinality. [#36763](https://github.com/ClickHouse/ClickHouse/pull/36763) ([Kseniia Sumarokova](https://github.com/kssenii)).
|
||||||
|
* Experimental feature: Fix insertion to columns of type `Object` from multiple files, e.g. via table function `file` with globs. [#36762](https://github.com/ClickHouse/ClickHouse/pull/36762) ([Anton Popov](https://github.com/CurtizJ)).
|
||||||
|
* Fix timeouts in Hedged requests. Connection hang right after sending remote query could lead to eternal waiting. [#36749](https://github.com/ClickHouse/ClickHouse/pull/36749) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||||
|
* Experimental feature: Fix a bug of `groupBitmapAndState`/`groupBitmapOrState`/`groupBitmapXorState` on distributed table. [#36739](https://github.com/ClickHouse/ClickHouse/pull/36739) ([Zhang Yifan](https://github.com/zhangyifan27)).
|
||||||
|
* Experimental feature: During the [test](https://s3.amazonaws.com/clickhouse-test-reports/36376/1cb1c7275cb53769ab826772db9b71361bb3e413/stress_test__thread__actions_/clickhouse-server.clean.log) in [PR](https://github.com/ClickHouse/ClickHouse/pull/36376), I found that the one cache class was initialized twice, it throws a exception. Although the cause of this problem is not clear, there should be code logic of repeatedly loading disk in ClickHouse, so we need to make special judgment for this situation. [#36737](https://github.com/ClickHouse/ClickHouse/pull/36737) ([Han Shukai](https://github.com/KinderRiven)).
|
||||||
|
* Fix vertical merges in wide parts. Previously an exception `There is no column` can be thrown during merge. [#36707](https://github.com/ClickHouse/ClickHouse/pull/36707) ([Anton Popov](https://github.com/CurtizJ)).
|
||||||
|
* Fix server reload on port change (do not wait for current connections from query context). [#36700](https://github.com/ClickHouse/ClickHouse/pull/36700) ([Azat Khuzhin](https://github.com/azat)).
|
||||||
|
* Experimental feature: In the previous [PR](https://github.com/ClickHouse/ClickHouse/pull/36376), I found that testing (stateless tests, flaky check (address, actions)) is timeout. Moreover, testing locally can also trigger unstable system deadlocks. This problem still exists when using the latest source code of master. [#36697](https://github.com/ClickHouse/ClickHouse/pull/36697) ([Han Shukai](https://github.com/KinderRiven)).
|
||||||
|
* Experimental feature: Fix server restart if cache configuration changed. [#36685](https://github.com/ClickHouse/ClickHouse/pull/36685) ([Kseniia Sumarokova](https://github.com/kssenii)).
|
||||||
|
* Fix possible heap-use-after-free in schema inference. Closes [#36661](https://github.com/ClickHouse/ClickHouse/issues/36661). [#36679](https://github.com/ClickHouse/ClickHouse/pull/36679) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||||
|
* Fixed parsing of query settings in `CREATE` query when engine is not specified. Fixes https://github.com/ClickHouse/ClickHouse/pull/34187#issuecomment-1103812419. [#36642](https://github.com/ClickHouse/ClickHouse/pull/36642) ([tavplubix](https://github.com/tavplubix)).
|
||||||
|
* Experimental feature: Fix merges of wide parts with type `Object`. [#36637](https://github.com/ClickHouse/ClickHouse/pull/36637) ([Anton Popov](https://github.com/CurtizJ)).
|
||||||
|
* Fix format crash when default expression follow EPHEMERAL not literal. Closes [#36618](https://github.com/ClickHouse/ClickHouse/issues/36618). [#36633](https://github.com/ClickHouse/ClickHouse/pull/36633) ([flynn](https://github.com/ucasfl)).
|
||||||
|
* Fix `Missing column` exception which could happen while using `INTERPOLATE` with `ENGINE = MergeTree` table. [#36549](https://github.com/ClickHouse/ClickHouse/pull/36549) ([Yakov Olkhovskiy](https://github.com/yakov-olkhovskiy)).
|
||||||
|
* Fix potential error with literals in `WHERE` for join queries. Close [#36279](https://github.com/ClickHouse/ClickHouse/issues/36279). [#36542](https://github.com/ClickHouse/ClickHouse/pull/36542) ([Vladimir C](https://github.com/vdimir)).
|
||||||
|
* Fix offset update ReadBufferFromEncryptedFile, which could cause undefined behaviour. [#36493](https://github.com/ClickHouse/ClickHouse/pull/36493) ([Kseniia Sumarokova](https://github.com/kssenii)).
|
||||||
|
* Fix hostname sanity checks for Keeper cluster configuration. Add `keeper_server.host_checks_enabled` config to enable/disable those checks. [#36492](https://github.com/ClickHouse/ClickHouse/pull/36492) ([Antonio Andelic](https://github.com/antonio2368)).
|
||||||
|
* Fix usage of executable user defined functions in GROUP BY. Before executable user defined functions cannot be used as expressions in GROUP BY. Closes [#36448](https://github.com/ClickHouse/ClickHouse/issues/36448). [#36486](https://github.com/ClickHouse/ClickHouse/pull/36486) ([Maksim Kita](https://github.com/kitaisreal)).
|
||||||
|
* Fix possible exception with unknown packet from server in client. [#36481](https://github.com/ClickHouse/ClickHouse/pull/36481) ([Kseniia Sumarokova](https://github.com/kssenii)).
|
||||||
|
* Experimental feature (please never use `system.session_log`, it is going to be removed): Add missing enum values in system.session_log table. Closes [#36474](https://github.com/ClickHouse/ClickHouse/issues/36474). [#36480](https://github.com/ClickHouse/ClickHouse/pull/36480) ([Memo](https://github.com/Joeywzr)).
|
||||||
|
* Fix bug in s3Cluster schema inference that let to the fact that not all data was read in the select from s3Cluster. The bug appeared in https://github.com/ClickHouse/ClickHouse/pull/35544. [#36434](https://github.com/ClickHouse/ClickHouse/pull/36434) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||||
|
* Fix nullptr dereference in JOIN and COLUMNS matcher. This fixes [#36416](https://github.com/ClickHouse/ClickHouse/issues/36416). This is for https://github.com/ClickHouse/ClickHouse/pull/36417. [#36430](https://github.com/ClickHouse/ClickHouse/pull/36430) ([Amos Bird](https://github.com/amosbird)).
|
||||||
|
* Fix dictionary reload for `ClickHouseDictionarySource` if it contains scalar subqueries. [#36390](https://github.com/ClickHouse/ClickHouse/pull/36390) ([lthaooo](https://github.com/lthaooo)).
|
||||||
|
* Fix assertion in JOIN, close [#36199](https://github.com/ClickHouse/ClickHouse/issues/36199). [#36201](https://github.com/ClickHouse/ClickHouse/pull/36201) ([Vladimir C](https://github.com/vdimir)).
|
||||||
|
* Queries with aliases inside special operators returned parsing error (was broken in 22.1). Example: `SELECT substring('test' AS t, 1, 1)`. [#36167](https://github.com/ClickHouse/ClickHouse/pull/36167) ([Maksim Kita](https://github.com/kitaisreal)).
|
||||||
|
* Experimental feature: Fix insertion of complex JSONs with nested arrays to columns of type `Object`. [#36077](https://github.com/ClickHouse/ClickHouse/pull/36077) ([Anton Popov](https://github.com/CurtizJ)).
|
||||||
|
* Fix ALTER DROP COLUMN of nested column with compact parts (i.e. `ALTER TABLE x DROP COLUMN n`, when there is column `n.d`). [#35797](https://github.com/ClickHouse/ClickHouse/pull/35797) ([Azat Khuzhin](https://github.com/azat)).
|
||||||
|
* Fix substring function range error length when `offset` and `length` is negative constant and `s` is not constant. [#33861](https://github.com/ClickHouse/ClickHouse/pull/33861) ([RogerYK](https://github.com/RogerYK)).
|
||||||
|
|
||||||
|
|
||||||
|
### <a id="224"></a> ClickHouse release 22.4, 2022-04-19
|
||||||
|
|
||||||
#### Backward Incompatible Change
|
#### Backward Incompatible Change
|
||||||
|
|
||||||
|
@ -1,32 +1,6 @@
|
|||||||
cmake_minimum_required(VERSION 3.14)
|
cmake_minimum_required(VERSION 3.14)
|
||||||
|
|
||||||
foreach(policy
|
project(ClickHouse LANGUAGES C CXX ASM)
|
||||||
CMP0023
|
|
||||||
CMP0048 # CMake 3.0
|
|
||||||
CMP0074 # CMake 3.12
|
|
||||||
CMP0077
|
|
||||||
CMP0079
|
|
||||||
)
|
|
||||||
if(POLICY ${policy})
|
|
||||||
cmake_policy(SET ${policy} NEW)
|
|
||||||
endif()
|
|
||||||
endforeach()
|
|
||||||
|
|
||||||
# set default policy
|
|
||||||
foreach(default_policy_var_name
|
|
||||||
# make option() honor normal variables for BUILD_SHARED_LIBS:
|
|
||||||
# - re2
|
|
||||||
# - snappy
|
|
||||||
CMAKE_POLICY_DEFAULT_CMP0077
|
|
||||||
# Google Test from sources uses too old cmake, 2.6.x, and CMP0022 should
|
|
||||||
# set, to avoid using deprecated LINK_INTERFACE_LIBRARIES(_<CONFIG>)? over
|
|
||||||
# INTERFACE_LINK_LIBRARIES.
|
|
||||||
CMAKE_POLICY_DEFAULT_CMP0022
|
|
||||||
)
|
|
||||||
set(${default_policy_var_name} NEW)
|
|
||||||
endforeach()
|
|
||||||
|
|
||||||
project(ClickHouse)
|
|
||||||
|
|
||||||
# If turned off: e.g. when ENABLE_FOO is ON, but FOO tool was not found, the CMake will continue.
|
# If turned off: e.g. when ENABLE_FOO is ON, but FOO tool was not found, the CMake will continue.
|
||||||
option(FAIL_ON_UNSUPPORTED_OPTIONS_COMBINATION
|
option(FAIL_ON_UNSUPPORTED_OPTIONS_COMBINATION
|
||||||
@ -39,11 +13,10 @@ else()
|
|||||||
set(RECONFIGURE_MESSAGE_LEVEL WARNING)
|
set(RECONFIGURE_MESSAGE_LEVEL WARNING)
|
||||||
endif()
|
endif()
|
||||||
|
|
||||||
enable_language(C CXX ASM)
|
|
||||||
|
|
||||||
include (cmake/arch.cmake)
|
include (cmake/arch.cmake)
|
||||||
include (cmake/target.cmake)
|
include (cmake/target.cmake)
|
||||||
include (cmake/tools.cmake)
|
include (cmake/tools.cmake)
|
||||||
|
include (cmake/ccache.cmake)
|
||||||
include (cmake/clang_tidy.cmake)
|
include (cmake/clang_tidy.cmake)
|
||||||
include (cmake/git_status.cmake)
|
include (cmake/git_status.cmake)
|
||||||
|
|
||||||
@ -52,7 +25,6 @@ include (cmake/git_status.cmake)
|
|||||||
macro (export)
|
macro (export)
|
||||||
endmacro ()
|
endmacro ()
|
||||||
|
|
||||||
set(CMAKE_MODULE_PATH ${CMAKE_MODULE_PATH} "${CMAKE_CURRENT_SOURCE_DIR}/cmake/Modules/")
|
|
||||||
set(CMAKE_EXPORT_COMPILE_COMMANDS 1) # Write compile_commands.json
|
set(CMAKE_EXPORT_COMPILE_COMMANDS 1) # Write compile_commands.json
|
||||||
set(CMAKE_LINK_DEPENDS_NO_SHARED 1) # Do not relink all depended targets on .so
|
set(CMAKE_LINK_DEPENDS_NO_SHARED 1) # Do not relink all depended targets on .so
|
||||||
set(CMAKE_CONFIGURATION_TYPES "RelWithDebInfo;Debug;Release;MinSizeRel" CACHE STRING "" FORCE)
|
set(CMAKE_CONFIGURATION_TYPES "RelWithDebInfo;Debug;Release;MinSizeRel" CACHE STRING "" FORCE)
|
||||||
@ -67,8 +39,6 @@ if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/sysroot/README.md")
|
|||||||
message (FATAL_ERROR "Submodules are not initialized. Run\n\tgit submodule update --init --recursive")
|
message (FATAL_ERROR "Submodules are not initialized. Run\n\tgit submodule update --init --recursive")
|
||||||
endif ()
|
endif ()
|
||||||
|
|
||||||
include (cmake/ccache.cmake)
|
|
||||||
|
|
||||||
# Take care to add prlimit in command line before ccache, or else ccache thinks that
|
# Take care to add prlimit in command line before ccache, or else ccache thinks that
|
||||||
# prlimit is compiler, and clang++ is its input file, and refuses to work with
|
# prlimit is compiler, and clang++ is its input file, and refuses to work with
|
||||||
# multiple inputs, e.g in ccache log:
|
# multiple inputs, e.g in ccache log:
|
||||||
@ -161,20 +131,22 @@ add_library(global-libs INTERFACE)
|
|||||||
include (cmake/fuzzer.cmake)
|
include (cmake/fuzzer.cmake)
|
||||||
include (cmake/sanitize.cmake)
|
include (cmake/sanitize.cmake)
|
||||||
|
|
||||||
if (CMAKE_GENERATOR STREQUAL "Ninja" AND NOT DISABLE_COLORED_BUILD)
|
option(ENABLE_COLORED_BUILD "Enable colors in compiler output" ON)
|
||||||
|
|
||||||
|
set (CMAKE_COLOR_MAKEFILE ${ENABLE_COLORED_BUILD}) # works only for the makefile generator
|
||||||
|
|
||||||
|
if (ENABLE_COLORED_BUILD AND CMAKE_GENERATOR STREQUAL "Ninja")
|
||||||
# Turn on colored output. https://github.com/ninja-build/ninja/wiki/FAQ
|
# Turn on colored output. https://github.com/ninja-build/ninja/wiki/FAQ
|
||||||
set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fdiagnostics-color=always")
|
set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fdiagnostics-color=always")
|
||||||
set (CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -fdiagnostics-color=always")
|
set (CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -fdiagnostics-color=always")
|
||||||
|
# ... such manually setting of flags can be removed once CMake supports a variable to
|
||||||
|
# activate colors in *all* build systems: https://gitlab.kitware.com/cmake/cmake/-/issues/15502
|
||||||
endif ()
|
endif ()
|
||||||
|
|
||||||
include (cmake/check_flags.cmake)
|
include (cmake/check_flags.cmake)
|
||||||
include (cmake/add_warning.cmake)
|
include (cmake/add_warning.cmake)
|
||||||
|
|
||||||
set (COMMON_WARNING_FLAGS "${COMMON_WARNING_FLAGS} -Wall") # -Werror and many more is also added inside cmake/warnings.cmake
|
|
||||||
|
|
||||||
if (COMPILER_CLANG)
|
if (COMPILER_CLANG)
|
||||||
# clang: warning: argument unused during compilation: '-specs=/usr/share/dpkg/no-pie-compile.specs' [-Wunused-command-line-argument]
|
|
||||||
set (COMMON_WARNING_FLAGS "${COMMON_WARNING_FLAGS} -Wno-unused-command-line-argument")
|
|
||||||
# generate ranges for fast "addr2line" search
|
# generate ranges for fast "addr2line" search
|
||||||
if (NOT CMAKE_BUILD_TYPE_UC STREQUAL "RELEASE")
|
if (NOT CMAKE_BUILD_TYPE_UC STREQUAL "RELEASE")
|
||||||
set(COMPILER_FLAGS "${COMPILER_FLAGS} -gdwarf-aranges")
|
set(COMPILER_FLAGS "${COMPILER_FLAGS} -gdwarf-aranges")
|
||||||
@ -371,12 +343,11 @@ set (COMPILER_FLAGS "${COMPILER_FLAGS}")
|
|||||||
# Our built-in unwinder only supports DWARF version up to 4.
|
# Our built-in unwinder only supports DWARF version up to 4.
|
||||||
set (DEBUG_INFO_FLAGS "-g -gdwarf-4")
|
set (DEBUG_INFO_FLAGS "-g -gdwarf-4")
|
||||||
|
|
||||||
set (CMAKE_BUILD_COLOR_MAKEFILE ON)
|
set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${COMPILER_FLAGS}")
|
||||||
set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${COMPILER_FLAGS} ${PLATFORM_EXTRA_CXX_FLAG} ${COMMON_WARNING_FLAGS} ${CXX_WARNING_FLAGS}")
|
|
||||||
set (CMAKE_CXX_FLAGS_RELWITHDEBINFO "${CMAKE_CXX_FLAGS_RELWITHDEBINFO} -O3 ${DEBUG_INFO_FLAGS} ${CMAKE_CXX_FLAGS_ADD}")
|
set (CMAKE_CXX_FLAGS_RELWITHDEBINFO "${CMAKE_CXX_FLAGS_RELWITHDEBINFO} -O3 ${DEBUG_INFO_FLAGS} ${CMAKE_CXX_FLAGS_ADD}")
|
||||||
set (CMAKE_CXX_FLAGS_DEBUG "${CMAKE_CXX_FLAGS_DEBUG} -O0 ${DEBUG_INFO_FLAGS} -fno-inline ${CMAKE_CXX_FLAGS_ADD}")
|
set (CMAKE_CXX_FLAGS_DEBUG "${CMAKE_CXX_FLAGS_DEBUG} -O0 ${DEBUG_INFO_FLAGS} -fno-inline ${CMAKE_CXX_FLAGS_ADD}")
|
||||||
|
|
||||||
set (CMAKE_C_FLAGS "${CMAKE_C_FLAGS} ${COMPILER_FLAGS} ${COMMON_WARNING_FLAGS} ${CMAKE_C_FLAGS_ADD}")
|
set (CMAKE_C_FLAGS "${CMAKE_C_FLAGS} ${COMPILER_FLAGS} ${CMAKE_C_FLAGS_ADD}")
|
||||||
set (CMAKE_C_FLAGS_RELWITHDEBINFO "${CMAKE_C_FLAGS_RELWITHDEBINFO} -O3 ${DEBUG_INFO_FLAGS} ${CMAKE_C_FLAGS_ADD}")
|
set (CMAKE_C_FLAGS_RELWITHDEBINFO "${CMAKE_C_FLAGS_RELWITHDEBINFO} -O3 ${DEBUG_INFO_FLAGS} ${CMAKE_C_FLAGS_ADD}")
|
||||||
set (CMAKE_C_FLAGS_DEBUG "${CMAKE_C_FLAGS_DEBUG} -O0 ${DEBUG_INFO_FLAGS} -fno-inline ${CMAKE_C_FLAGS_ADD}")
|
set (CMAKE_C_FLAGS_DEBUG "${CMAKE_C_FLAGS_DEBUG} -O0 ${DEBUG_INFO_FLAGS} -fno-inline ${CMAKE_C_FLAGS_ADD}")
|
||||||
|
|
||||||
@ -423,13 +394,6 @@ endif ()
|
|||||||
# Turns on all external libs like s3, kafka, ODBC, ...
|
# Turns on all external libs like s3, kafka, ODBC, ...
|
||||||
option(ENABLE_LIBRARIES "Enable all external libraries by default" ON)
|
option(ENABLE_LIBRARIES "Enable all external libraries by default" ON)
|
||||||
|
|
||||||
if (NOT (OS_LINUX OR OS_DARWIN))
|
|
||||||
# Using system libs can cause a lot of warnings in includes (on macro expansion).
|
|
||||||
option(WERROR "Enable -Werror compiler option" OFF)
|
|
||||||
else ()
|
|
||||||
option(WERROR "Enable -Werror compiler option" ON)
|
|
||||||
endif ()
|
|
||||||
|
|
||||||
# Increase stack size on Musl. We need big stack for our recursive-descend parser.
|
# Increase stack size on Musl. We need big stack for our recursive-descend parser.
|
||||||
if (USE_MUSL)
|
if (USE_MUSL)
|
||||||
set (CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -Wl,-z,stack-size=2097152")
|
set (CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -Wl,-z,stack-size=2097152")
|
||||||
@ -447,6 +411,13 @@ elseif (OS_FREEBSD)
|
|||||||
endif ()
|
endif ()
|
||||||
link_libraries(global-group)
|
link_libraries(global-group)
|
||||||
|
|
||||||
|
if (NOT (OS_LINUX OR OS_DARWIN))
|
||||||
|
# Using system libs can cause a lot of warnings in includes (on macro expansion).
|
||||||
|
option(WERROR "Enable -Werror compiler option" OFF)
|
||||||
|
else ()
|
||||||
|
option(WERROR "Enable -Werror compiler option" ON)
|
||||||
|
endif ()
|
||||||
|
|
||||||
if (WERROR)
|
if (WERROR)
|
||||||
# Don't pollute CMAKE_CXX_FLAGS with -Werror as it will break some CMake checks.
|
# Don't pollute CMAKE_CXX_FLAGS with -Werror as it will break some CMake checks.
|
||||||
# Instead, adopt modern cmake usage requirement.
|
# Instead, adopt modern cmake usage requirement.
|
||||||
@ -455,7 +426,7 @@ endif ()
|
|||||||
|
|
||||||
# Make this extra-checks for correct library dependencies.
|
# Make this extra-checks for correct library dependencies.
|
||||||
if (OS_LINUX AND NOT SANITIZE)
|
if (OS_LINUX AND NOT SANITIZE)
|
||||||
target_link_options(global-group INTERFACE "-Wl,--no-undefined")
|
target_link_options(global-group INTERFACE "LINKER:--no-undefined")
|
||||||
endif ()
|
endif ()
|
||||||
|
|
||||||
######################################
|
######################################
|
||||||
@ -466,7 +437,7 @@ set (CMAKE_POSTFIX_VARIABLE "CMAKE_${CMAKE_BUILD_TYPE_UC}_POSTFIX")
|
|||||||
|
|
||||||
if (USE_STATIC_LIBRARIES)
|
if (USE_STATIC_LIBRARIES)
|
||||||
set (CMAKE_POSITION_INDEPENDENT_CODE OFF)
|
set (CMAKE_POSITION_INDEPENDENT_CODE OFF)
|
||||||
if (OS_LINUX AND NOT ARCH_ARM)
|
if (OS_LINUX AND NOT ARCH_AARCH64)
|
||||||
# Slightly more efficient code can be generated
|
# Slightly more efficient code can be generated
|
||||||
# It's disabled for ARM because otherwise ClickHouse cannot run on Android.
|
# It's disabled for ARM because otherwise ClickHouse cannot run on Android.
|
||||||
set (CMAKE_CXX_FLAGS_RELWITHDEBINFO "${CMAKE_CXX_FLAGS_RELWITHDEBINFO} -fno-pie")
|
set (CMAKE_CXX_FLAGS_RELWITHDEBINFO "${CMAKE_CXX_FLAGS_RELWITHDEBINFO} -fno-pie")
|
||||||
@ -500,8 +471,7 @@ endif ()
|
|||||||
message (STATUS
|
message (STATUS
|
||||||
"Building for: ${CMAKE_SYSTEM} ${CMAKE_SYSTEM_PROCESSOR} ${CMAKE_LIBRARY_ARCHITECTURE} ;
|
"Building for: ${CMAKE_SYSTEM} ${CMAKE_SYSTEM_PROCESSOR} ${CMAKE_LIBRARY_ARCHITECTURE} ;
|
||||||
USE_STATIC_LIBRARIES=${USE_STATIC_LIBRARIES}
|
USE_STATIC_LIBRARIES=${USE_STATIC_LIBRARIES}
|
||||||
SPLIT_SHARED=${SPLIT_SHARED_LIBRARIES}
|
SPLIT_SHARED=${SPLIT_SHARED_LIBRARIES}")
|
||||||
CCACHE=${CCACHE_FOUND} ${CCACHE_VERSION}")
|
|
||||||
|
|
||||||
include (GNUInstallDirs)
|
include (GNUInstallDirs)
|
||||||
|
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
#include <string>
|
#include <string>
|
||||||
#include <string.h>
|
#include <cstring>
|
||||||
|
|
||||||
#include <Poco/UTF8Encoding.h>
|
#include <Poco/UTF8Encoding.h>
|
||||||
#include <Poco/NumberParser.h>
|
#include <Poco/NumberParser.h>
|
||||||
@ -12,7 +12,7 @@
|
|||||||
#define JSON_MAX_DEPTH 100
|
#define JSON_MAX_DEPTH 100
|
||||||
|
|
||||||
|
|
||||||
POCO_IMPLEMENT_EXCEPTION(JSONException, Poco::Exception, "JSONException")
|
POCO_IMPLEMENT_EXCEPTION(JSONException, Poco::Exception, "JSONException") // NOLINT(cert-err60-cpp, modernize-use-noexcept, hicpp-use-noexcept)
|
||||||
|
|
||||||
|
|
||||||
/// Прочитать беззнаковое целое в простом формате из не-0-terminated строки.
|
/// Прочитать беззнаковое целое в простом формате из не-0-terminated строки.
|
||||||
|
@ -5,7 +5,7 @@
|
|||||||
#include <algorithm>
|
#include <algorithm>
|
||||||
|
|
||||||
#include <cassert>
|
#include <cassert>
|
||||||
#include <string.h>
|
#include <cstring>
|
||||||
#include <unistd.h>
|
#include <unistd.h>
|
||||||
#include <sys/select.h>
|
#include <sys/select.h>
|
||||||
#include <sys/time.h>
|
#include <sys/time.h>
|
||||||
|
@ -378,4 +378,4 @@ void ReplxxLineReader::enableBracketedPaste()
|
|||||||
{
|
{
|
||||||
bracketed_paste_enabled = true;
|
bracketed_paste_enabled = true;
|
||||||
rx.enable_bracketed_paste();
|
rx.enable_bracketed_paste();
|
||||||
};
|
}
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
#include <base/demangle.h>
|
#include <base/demangle.h>
|
||||||
|
|
||||||
#include <stdlib.h>
|
#include <cstdlib>
|
||||||
#include <cxxabi.h>
|
#include <cxxabi.h>
|
||||||
|
|
||||||
static DemangleResult tryDemangle(const char * name, int & status)
|
static DemangleResult tryDemangle(const char * name, int & status)
|
||||||
|
@ -3,7 +3,7 @@
|
|||||||
#include <cstddef>
|
#include <cstddef>
|
||||||
#include <cstdlib>
|
#include <cstdlib>
|
||||||
#include <cstring>
|
#include <cstring>
|
||||||
#include <errno.h>
|
#include <cerrno>
|
||||||
|
|
||||||
|
|
||||||
void * mremap_fallback(
|
void * mremap_fallback(
|
||||||
|
@ -169,9 +169,9 @@ obstacle to adoption, that text has been removed.
|
|||||||
|
|
||||||
*/
|
*/
|
||||||
|
|
||||||
#include <math.h>
|
#include <cmath>
|
||||||
#include <stdint.h>
|
#include <cstdint>
|
||||||
#include <stdio.h>
|
#include <cstdio>
|
||||||
|
|
||||||
double preciseExp10(double x)
|
double preciseExp10(double x)
|
||||||
{
|
{
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
#include <base/sleep.h>
|
#include <base/sleep.h>
|
||||||
|
|
||||||
#include <time.h>
|
#include <ctime>
|
||||||
#include <errno.h>
|
#include <cerrno>
|
||||||
|
|
||||||
#if defined(OS_DARWIN)
|
#if defined(OS_DARWIN)
|
||||||
#include <mach/mach.h>
|
#include <mach/mach.h>
|
||||||
|
@ -1,14 +1,19 @@
|
|||||||
#pragma once
|
#pragma once
|
||||||
#include <cstddef>
|
#include <cstddef>
|
||||||
|
|
||||||
#ifdef HAS_RESERVED_IDENTIFIER
|
|
||||||
#pragma clang diagnostic ignored "-Wreserved-identifier"
|
|
||||||
#endif
|
|
||||||
|
|
||||||
constexpr size_t KiB = 1024;
|
constexpr size_t KiB = 1024;
|
||||||
constexpr size_t MiB = 1024 * KiB;
|
constexpr size_t MiB = 1024 * KiB;
|
||||||
constexpr size_t GiB = 1024 * MiB;
|
constexpr size_t GiB = 1024 * MiB;
|
||||||
|
|
||||||
|
#ifdef HAS_RESERVED_IDENTIFIER
|
||||||
|
# pragma clang diagnostic push
|
||||||
|
# pragma clang diagnostic ignored "-Wreserved-identifier"
|
||||||
|
#endif
|
||||||
|
|
||||||
constexpr size_t operator"" _KiB(unsigned long long val) { return val * KiB; }
|
constexpr size_t operator"" _KiB(unsigned long long val) { return val * KiB; }
|
||||||
constexpr size_t operator"" _MiB(unsigned long long val) { return val * MiB; }
|
constexpr size_t operator"" _MiB(unsigned long long val) { return val * MiB; }
|
||||||
constexpr size_t operator"" _GiB(unsigned long long val) { return val * GiB; }
|
constexpr size_t operator"" _GiB(unsigned long long val) { return val * GiB; }
|
||||||
|
|
||||||
|
#ifdef HAS_RESERVED_IDENTIFIER
|
||||||
|
# pragma clang diagnostic pop
|
||||||
|
#endif
|
||||||
|
@ -5,7 +5,6 @@ if (CMAKE_SYSTEM_PROCESSOR MATCHES "amd64|x86_64")
|
|||||||
set (ARCH_AMD64 1)
|
set (ARCH_AMD64 1)
|
||||||
elseif (CMAKE_SYSTEM_PROCESSOR MATCHES "^(aarch64.*|AARCH64.*|arm64.*|ARM64.*)")
|
elseif (CMAKE_SYSTEM_PROCESSOR MATCHES "^(aarch64.*|AARCH64.*|arm64.*|ARM64.*)")
|
||||||
set (ARCH_AARCH64 1)
|
set (ARCH_AARCH64 1)
|
||||||
set (ARCH_ARM 1)
|
|
||||||
elseif (CMAKE_SYSTEM_PROCESSOR MATCHES "^(ppc64le.*|PPC64LE.*)")
|
elseif (CMAKE_SYSTEM_PROCESSOR MATCHES "^(ppc64le.*|PPC64LE.*)")
|
||||||
set (ARCH_PPC64LE 1)
|
set (ARCH_PPC64LE 1)
|
||||||
elseif (CMAKE_SYSTEM_PROCESSOR MATCHES "riscv64")
|
elseif (CMAKE_SYSTEM_PROCESSOR MATCHES "riscv64")
|
||||||
|
@ -2,11 +2,11 @@
|
|||||||
|
|
||||||
# NOTE: has nothing common with DBMS_TCP_PROTOCOL_VERSION,
|
# NOTE: has nothing common with DBMS_TCP_PROTOCOL_VERSION,
|
||||||
# only DBMS_TCP_PROTOCOL_VERSION should be incremented on protocol changes.
|
# only DBMS_TCP_PROTOCOL_VERSION should be incremented on protocol changes.
|
||||||
SET(VERSION_REVISION 54462)
|
SET(VERSION_REVISION 54463)
|
||||||
SET(VERSION_MAJOR 22)
|
SET(VERSION_MAJOR 22)
|
||||||
SET(VERSION_MINOR 5)
|
SET(VERSION_MINOR 6)
|
||||||
SET(VERSION_PATCH 1)
|
SET(VERSION_PATCH 1)
|
||||||
SET(VERSION_GITHASH 77a82cc090dd5dba2d995946e82a12a2cadaaff3)
|
SET(VERSION_GITHASH df0cb0620985eb5ec59760cc76f7736e5b6209bb)
|
||||||
SET(VERSION_DESCRIBE v22.5.1.1-testing)
|
SET(VERSION_DESCRIBE v22.6.1.1-testing)
|
||||||
SET(VERSION_STRING 22.5.1.1)
|
SET(VERSION_STRING 22.6.1.1)
|
||||||
# end of autochange
|
# end of autochange
|
||||||
|
@ -1,56 +1,53 @@
|
|||||||
if (CMAKE_CXX_COMPILER_LAUNCHER MATCHES "ccache" OR CMAKE_CXX_COMPILER_LAUNCHER MATCHES "ccache")
|
# Setup integration with ccache to speed up builds, see https://ccache.dev/
|
||||||
set(COMPILER_MATCHES_CCACHE 1)
|
|
||||||
else()
|
|
||||||
set(COMPILER_MATCHES_CCACHE 0)
|
|
||||||
endif()
|
|
||||||
|
|
||||||
if ((ENABLE_CCACHE OR NOT DEFINED ENABLE_CCACHE) AND NOT COMPILER_MATCHES_CCACHE)
|
if (CMAKE_CXX_COMPILER_LAUNCHER MATCHES "ccache" OR CMAKE_C_COMPILER_LAUNCHER MATCHES "ccache")
|
||||||
find_program (CCACHE_FOUND ccache)
|
# custom compiler launcher already defined, most likely because cmake was invoked with like "-DCMAKE_CXX_COMPILER_LAUNCHER=ccache" or
|
||||||
if (CCACHE_FOUND)
|
# via environment variable --> respect setting and trust that the launcher was specified correctly
|
||||||
set(ENABLE_CCACHE_BY_DEFAULT 1)
|
message(STATUS "Using custom C compiler launcher: ${CMAKE_C_COMPILER_LAUNCHER}")
|
||||||
else()
|
message(STATUS "Using custom C++ compiler launcher: ${CMAKE_CXX_COMPILER_LAUNCHER}")
|
||||||
set(ENABLE_CCACHE_BY_DEFAULT 0)
|
|
||||||
endif()
|
|
||||||
endif()
|
|
||||||
|
|
||||||
if (NOT CCACHE_FOUND AND NOT DEFINED ENABLE_CCACHE AND NOT COMPILER_MATCHES_CCACHE)
|
|
||||||
message(WARNING "CCache is not found. We recommend setting it up if you build ClickHouse from source often. "
|
|
||||||
"Setting it up will significantly reduce compilation time for 2nd and consequent builds")
|
|
||||||
endif()
|
|
||||||
|
|
||||||
# https://ccache.dev/
|
|
||||||
option(ENABLE_CCACHE "Speedup re-compilations using ccache (external tool)" ${ENABLE_CCACHE_BY_DEFAULT})
|
|
||||||
|
|
||||||
if (NOT ENABLE_CCACHE)
|
|
||||||
return()
|
return()
|
||||||
endif()
|
endif()
|
||||||
|
|
||||||
if (CCACHE_FOUND AND NOT COMPILER_MATCHES_CCACHE)
|
option(ENABLE_CCACHE "Speedup re-compilations using ccache (external tool)" ON)
|
||||||
execute_process(COMMAND ${CCACHE_FOUND} "-V" OUTPUT_VARIABLE CCACHE_VERSION)
|
|
||||||
string(REGEX REPLACE "ccache version ([0-9\\.]+).*" "\\1" CCACHE_VERSION ${CCACHE_VERSION})
|
|
||||||
|
|
||||||
if (CCACHE_VERSION VERSION_GREATER "3.2.0" OR NOT CMAKE_CXX_COMPILER_ID STREQUAL "Clang")
|
if (NOT ENABLE_CCACHE)
|
||||||
message(STATUS "Using ${CCACHE_FOUND} ${CCACHE_VERSION}")
|
message(STATUS "Using ccache: no (disabled via configuration)")
|
||||||
set(LAUNCHER ${CCACHE_FOUND})
|
return()
|
||||||
|
endif()
|
||||||
|
|
||||||
# debian (debhelpers) set SOURCE_DATE_EPOCH environment variable, that is
|
find_program (CCACHE_EXECUTABLE ccache)
|
||||||
# filled from the debian/changelog or current time.
|
|
||||||
#
|
|
||||||
# - 4.0+ ccache always includes this environment variable into the hash
|
|
||||||
# of the manifest, which do not allow to use previous cache,
|
|
||||||
# - 4.2+ ccache ignores SOURCE_DATE_EPOCH for every file w/o __DATE__/__TIME__
|
|
||||||
#
|
|
||||||
# Exclude SOURCE_DATE_EPOCH env for ccache versions between [4.0, 4.2).
|
|
||||||
if (CCACHE_VERSION VERSION_GREATER_EQUAL "4.0" AND CCACHE_VERSION VERSION_LESS "4.2")
|
|
||||||
message(STATUS "Ignore SOURCE_DATE_EPOCH for ccache")
|
|
||||||
set(LAUNCHER env -u SOURCE_DATE_EPOCH ${CCACHE_FOUND})
|
|
||||||
endif()
|
|
||||||
|
|
||||||
set (CMAKE_CXX_COMPILER_LAUNCHER ${LAUNCHER} ${CMAKE_CXX_COMPILER_LAUNCHER})
|
if (NOT CCACHE_EXECUTABLE)
|
||||||
set (CMAKE_C_COMPILER_LAUNCHER ${LAUNCHER} ${CMAKE_C_COMPILER_LAUNCHER})
|
message(${RECONFIGURE_MESSAGE_LEVEL} "Using ccache: no (Could not find find ccache. To significantly reduce compile times for the 2nd, 3rd, etc. build, it is highly recommended to install ccache. To suppress this message, run cmake with -DENABLE_CCACHE=0)")
|
||||||
else ()
|
return()
|
||||||
message(${RECONFIGURE_MESSAGE_LEVEL} "Not using ${CCACHE_FOUND} ${CCACHE_VERSION} bug: https://bugzilla.samba.org/show_bug.cgi?id=8118")
|
endif()
|
||||||
endif ()
|
|
||||||
elseif (NOT CCACHE_FOUND AND NOT COMPILER_MATCHES_CCACHE)
|
execute_process(COMMAND ${CCACHE_EXECUTABLE} "-V" OUTPUT_VARIABLE CCACHE_VERSION)
|
||||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Cannot use ccache")
|
string(REGEX REPLACE "ccache version ([0-9\\.]+).*" "\\1" CCACHE_VERSION ${CCACHE_VERSION})
|
||||||
endif ()
|
|
||||||
|
set (CCACHE_MINIMUM_VERSION 3.3)
|
||||||
|
|
||||||
|
if (CCACHE_VERSION VERSION_LESS_EQUAL ${CCACHE_MINIMUM_VERSION})
|
||||||
|
message(${RECONFIGURE_MESSAGE_LEVEL} "Using ccache: no (found ${CCACHE_EXECUTABLE} (version ${CCACHE_VERSION}), the minimum required version is ${CCACHE_MINIMUM_VERSION}")
|
||||||
|
return()
|
||||||
|
endif()
|
||||||
|
|
||||||
|
message(STATUS "Using ccache: ${CCACHE_EXECUTABLE} (version ${CCACHE_VERSION})")
|
||||||
|
set(LAUNCHER ${CCACHE_EXECUTABLE})
|
||||||
|
|
||||||
|
# Work around a well-intended but unfortunate behavior of ccache 4.0 & 4.1 with
|
||||||
|
# environment variable SOURCE_DATE_EPOCH. This variable provides an alternative
|
||||||
|
# to source-code embedded timestamps (__DATE__/__TIME__) and therefore helps with
|
||||||
|
# reproducible builds (*). SOURCE_DATE_EPOCH is set automatically by the
|
||||||
|
# distribution, e.g. Debian. Ccache 4.0 & 4.1 incorporate SOURCE_DATE_EPOCH into
|
||||||
|
# the hash calculation regardless they contain timestamps or not. This invalidates
|
||||||
|
# the cache whenever SOURCE_DATE_EPOCH changes. As a fix, ignore SOURCE_DATE_EPOCH.
|
||||||
|
#
|
||||||
|
# (*) https://reproducible-builds.org/specs/source-date-epoch/
|
||||||
|
if (CCACHE_VERSION VERSION_GREATER_EQUAL "4.0" AND CCACHE_VERSION VERSION_LESS "4.2")
|
||||||
|
message(STATUS "Ignore SOURCE_DATE_EPOCH for ccache 4.0 / 4.1")
|
||||||
|
set(LAUNCHER env -u SOURCE_DATE_EPOCH ${CCACHE_EXECUTABLE})
|
||||||
|
endif()
|
||||||
|
|
||||||
|
set (CMAKE_CXX_COMPILER_LAUNCHER ${LAUNCHER} ${CMAKE_CXX_COMPILER_LAUNCHER})
|
||||||
|
set (CMAKE_C_COMPILER_LAUNCHER ${LAUNCHER} ${CMAKE_C_COMPILER_LAUNCHER})
|
||||||
|
@ -3,6 +3,5 @@ include (CheckCCompilerFlag)
|
|||||||
|
|
||||||
check_cxx_compiler_flag("-Wreserved-identifier" HAS_RESERVED_IDENTIFIER)
|
check_cxx_compiler_flag("-Wreserved-identifier" HAS_RESERVED_IDENTIFIER)
|
||||||
check_cxx_compiler_flag("-Wsuggest-destructor-override" HAS_SUGGEST_DESTRUCTOR_OVERRIDE)
|
check_cxx_compiler_flag("-Wsuggest-destructor-override" HAS_SUGGEST_DESTRUCTOR_OVERRIDE)
|
||||||
check_cxx_compiler_flag("-Wshadow" HAS_SHADOW)
|
|
||||||
check_cxx_compiler_flag("-Wsuggest-override" HAS_SUGGEST_OVERRIDE)
|
check_cxx_compiler_flag("-Wsuggest-override" HAS_SUGGEST_OVERRIDE)
|
||||||
check_cxx_compiler_flag("-Xclang -fuse-ctor-homing" HAS_USE_CTOR_HOMING)
|
check_cxx_compiler_flag("-Xclang -fuse-ctor-homing" HAS_USE_CTOR_HOMING)
|
||||||
|
@ -31,7 +31,11 @@ if (ARCH_NATIVE)
|
|||||||
elseif (ARCH_AARCH64)
|
elseif (ARCH_AARCH64)
|
||||||
set (COMPILER_FLAGS "${COMPILER_FLAGS} -march=armv8-a+crc")
|
set (COMPILER_FLAGS "${COMPILER_FLAGS} -march=armv8-a+crc")
|
||||||
|
|
||||||
else ()
|
elseif (ARCH_PPC64LE)
|
||||||
|
# Note that gcc and clang have support for x86 SSE2 intrinsics when building for PowerPC
|
||||||
|
set (COMPILER_FLAGS "${COMPILER_FLAGS} -maltivec -mcpu=power8 -D__SSE2__=1 -DNO_WARN_X86_INTRINSICS")
|
||||||
|
|
||||||
|
elseif (ARCH_AMD64)
|
||||||
set (TEST_FLAG "-mssse3")
|
set (TEST_FLAG "-mssse3")
|
||||||
set (CMAKE_REQUIRED_FLAGS "${TEST_FLAG} -O0")
|
set (CMAKE_REQUIRED_FLAGS "${TEST_FLAG} -O0")
|
||||||
check_cxx_source_compiles("
|
check_cxx_source_compiles("
|
||||||
@ -60,10 +64,6 @@ else ()
|
|||||||
set (COMPILER_FLAGS "${COMPILER_FLAGS} ${TEST_FLAG}")
|
set (COMPILER_FLAGS "${COMPILER_FLAGS} ${TEST_FLAG}")
|
||||||
endif ()
|
endif ()
|
||||||
|
|
||||||
if (ARCH_PPC64LE)
|
|
||||||
set (COMPILER_FLAGS "${COMPILER_FLAGS} -maltivec -mcpu=power8 -D__SSE2__=1 -DNO_WARN_X86_INTRINSICS")
|
|
||||||
endif ()
|
|
||||||
|
|
||||||
set (TEST_FLAG "-msse4.2")
|
set (TEST_FLAG "-msse4.2")
|
||||||
set (CMAKE_REQUIRED_FLAGS "${TEST_FLAG} -O0")
|
set (CMAKE_REQUIRED_FLAGS "${TEST_FLAG} -O0")
|
||||||
check_cxx_source_compiles("
|
check_cxx_source_compiles("
|
||||||
@ -93,7 +93,6 @@ else ()
|
|||||||
endif ()
|
endif ()
|
||||||
|
|
||||||
set (TEST_FLAG "-mpopcnt")
|
set (TEST_FLAG "-mpopcnt")
|
||||||
|
|
||||||
set (CMAKE_REQUIRED_FLAGS "${TEST_FLAG} -O0")
|
set (CMAKE_REQUIRED_FLAGS "${TEST_FLAG} -O0")
|
||||||
check_cxx_source_compiles("
|
check_cxx_source_compiles("
|
||||||
int main() {
|
int main() {
|
||||||
@ -186,6 +185,8 @@ else ()
|
|||||||
set (X86_INTRINSICS_FLAGS "${X86_INTRINSICS_FLAGS} -mavx512f -mavx512bw -mavx512vl -mprefer-vector-width=256")
|
set (X86_INTRINSICS_FLAGS "${X86_INTRINSICS_FLAGS} -mavx512f -mavx512bw -mavx512vl -mprefer-vector-width=256")
|
||||||
endif ()
|
endif ()
|
||||||
endif ()
|
endif ()
|
||||||
|
else ()
|
||||||
|
# RISC-V + exotic platforms
|
||||||
endif ()
|
endif ()
|
||||||
|
|
||||||
cmake_pop_check_state ()
|
cmake_pop_check_state ()
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
macro(add_glob cur_list)
|
macro(add_glob cur_list)
|
||||||
file(GLOB __tmp RELATIVE ${CMAKE_CURRENT_SOURCE_DIR} ${ARGN})
|
file(GLOB __tmp CONFIGURE_DEPENDS RELATIVE ${CMAKE_CURRENT_SOURCE_DIR} ${ARGN})
|
||||||
list(APPEND ${cur_list} ${__tmp})
|
list(APPEND ${cur_list} ${__tmp})
|
||||||
endmacro()
|
endmacro()
|
||||||
|
|
||||||
|
@ -1,5 +0,0 @@
|
|||||||
function(generate_code TEMPLATE_FILE)
|
|
||||||
foreach(NAME IN LISTS ARGN)
|
|
||||||
configure_file (${TEMPLATE_FILE}.cpp.in ${CMAKE_CURRENT_BINARY_DIR}/generated/${TEMPLATE_FILE}_${NAME}.cpp)
|
|
||||||
endforeach()
|
|
||||||
endfunction()
|
|
@ -1,17 +1,22 @@
|
|||||||
# Print the status of the git repository (if git is available).
|
# Print the status of the git repository (if git is available).
|
||||||
# This is useful for troubleshooting build failure reports
|
# This is useful for troubleshooting build failure reports
|
||||||
|
|
||||||
find_package(Git)
|
find_package(Git)
|
||||||
|
|
||||||
if (Git_FOUND)
|
if (Git_FOUND)
|
||||||
|
|
||||||
execute_process(
|
execute_process(
|
||||||
COMMAND ${GIT_EXECUTABLE} rev-parse HEAD
|
COMMAND ${GIT_EXECUTABLE} rev-parse HEAD
|
||||||
WORKING_DIRECTORY ${CMAKE_SOURCE_DIR}
|
WORKING_DIRECTORY ${CMAKE_SOURCE_DIR}
|
||||||
OUTPUT_VARIABLE GIT_COMMIT_ID
|
OUTPUT_VARIABLE GIT_COMMIT_ID
|
||||||
OUTPUT_STRIP_TRAILING_WHITESPACE)
|
OUTPUT_STRIP_TRAILING_WHITESPACE)
|
||||||
|
|
||||||
message(STATUS "HEAD's commit hash ${GIT_COMMIT_ID}")
|
message(STATUS "HEAD's commit hash ${GIT_COMMIT_ID}")
|
||||||
|
|
||||||
execute_process(
|
execute_process(
|
||||||
COMMAND ${GIT_EXECUTABLE} status
|
COMMAND ${GIT_EXECUTABLE} status
|
||||||
WORKING_DIRECTORY ${CMAKE_SOURCE_DIR})
|
WORKING_DIRECTORY ${CMAKE_SOURCE_DIR} OUTPUT_STRIP_TRAILING_WHITESPACE)
|
||||||
|
|
||||||
else()
|
else()
|
||||||
message(STATUS "The git program could not be found.")
|
message(STATUS "Git could not be found.")
|
||||||
endif()
|
endif()
|
||||||
|
@ -27,7 +27,7 @@ macro(clickhouse_strip_binary)
|
|||||||
)
|
)
|
||||||
|
|
||||||
install(PROGRAMS ${STRIP_DESTINATION_DIR}/bin/${STRIP_TARGET} DESTINATION ${CMAKE_INSTALL_BINDIR} COMPONENT clickhouse)
|
install(PROGRAMS ${STRIP_DESTINATION_DIR}/bin/${STRIP_TARGET} DESTINATION ${CMAKE_INSTALL_BINDIR} COMPONENT clickhouse)
|
||||||
install(FILES ${STRIP_DESTINATION_DIR}/lib/debug/bin/${STRIP_TARGET}.debug DESTINATION ${CMAKE_INSTALL_LIBDIR}/debug/${CMAKE_INSTALL_FULL_BINDIR}/${STRIP_TARGET}.debug COMPONENT clickhouse)
|
install(FILES ${STRIP_DESTINATION_DIR}/lib/debug/bin/${STRIP_TARGET}.debug DESTINATION ${CMAKE_INSTALL_LIBDIR}/debug/${CMAKE_INSTALL_FULL_BINDIR} COMPONENT clickhouse)
|
||||||
endmacro()
|
endmacro()
|
||||||
|
|
||||||
|
|
||||||
|
@ -15,6 +15,8 @@ elseif (CMAKE_SYSTEM_NAME MATCHES "Darwin")
|
|||||||
elseif (CMAKE_SYSTEM_NAME MATCHES "SunOS")
|
elseif (CMAKE_SYSTEM_NAME MATCHES "SunOS")
|
||||||
set (OS_SUNOS 1)
|
set (OS_SUNOS 1)
|
||||||
add_definitions(-D OS_SUNOS)
|
add_definitions(-D OS_SUNOS)
|
||||||
|
else ()
|
||||||
|
message (FATAL_ERROR "Platform ${CMAKE_SYSTEM_NAME} is not supported")
|
||||||
endif ()
|
endif ()
|
||||||
|
|
||||||
if (CMAKE_CROSSCOMPILING)
|
if (CMAKE_CROSSCOMPILING)
|
||||||
|
@ -6,67 +6,65 @@ elseif (CMAKE_CXX_COMPILER_ID MATCHES "AppleClang")
|
|||||||
set (COMPILER_CLANG 1) # Safe to treat AppleClang as a regular Clang, in general.
|
set (COMPILER_CLANG 1) # Safe to treat AppleClang as a regular Clang, in general.
|
||||||
elseif (CMAKE_CXX_COMPILER_ID MATCHES "Clang")
|
elseif (CMAKE_CXX_COMPILER_ID MATCHES "Clang")
|
||||||
set (COMPILER_CLANG 1)
|
set (COMPILER_CLANG 1)
|
||||||
|
else ()
|
||||||
|
message (FATAL_ERROR "Compiler ${CMAKE_CXX_COMPILER_ID} is not supported")
|
||||||
endif ()
|
endif ()
|
||||||
|
|
||||||
execute_process(COMMAND ${CMAKE_CXX_COMPILER} --version)
|
# Print details to output
|
||||||
|
execute_process(COMMAND ${CMAKE_CXX_COMPILER} --version OUTPUT_VARIABLE COMPILER_SELF_IDENTIFICATION OUTPUT_STRIP_TRAILING_WHITESPACE)
|
||||||
|
message (STATUS "Using compiler:\n${COMPILER_SELF_IDENTIFICATION}")
|
||||||
|
|
||||||
|
# Require minimum compiler versions
|
||||||
|
set (CLANG_MINIMUM_VERSION 12)
|
||||||
|
set (XCODE_MINIMUM_VERSION 12.0)
|
||||||
|
set (APPLE_CLANG_MINIMUM_VERSION 12.0.0)
|
||||||
|
set (GCC_MINIMUM_VERSION 11)
|
||||||
|
|
||||||
if (COMPILER_GCC)
|
if (COMPILER_GCC)
|
||||||
# Require minimum version of gcc
|
|
||||||
set (GCC_MINIMUM_VERSION 11)
|
|
||||||
if (CMAKE_CXX_COMPILER_VERSION VERSION_LESS ${GCC_MINIMUM_VERSION})
|
if (CMAKE_CXX_COMPILER_VERSION VERSION_LESS ${GCC_MINIMUM_VERSION})
|
||||||
message (FATAL_ERROR "GCC version must be at least ${GCC_MINIMUM_VERSION}. For example, if GCC ${GCC_MINIMUM_VERSION} is available under gcc-${GCC_MINIMUM_VERSION}, g++-${GCC_MINIMUM_VERSION} names, do the following: export CC=gcc-${GCC_MINIMUM_VERSION} CXX=g++-${GCC_MINIMUM_VERSION}; rm -rf CMakeCache.txt CMakeFiles; and re run cmake or ./release.")
|
message (FATAL_ERROR "Compilation with GCC version ${CMAKE_CXX_COMPILER_VERSION} is unsupported, the minimum required version is ${GCC_MINIMUM_VERSION}.")
|
||||||
endif ()
|
endif ()
|
||||||
|
|
||||||
message (WARNING "GCC compiler is not officially supported for ClickHouse. You should migrate to clang.")
|
message (WARNING "Compilation with GCC is unsupported. Please use Clang instead.")
|
||||||
|
|
||||||
elseif (COMPILER_CLANG)
|
elseif (COMPILER_CLANG)
|
||||||
# Require minimum version of clang/apple-clang
|
|
||||||
if (CMAKE_CXX_COMPILER_ID MATCHES "AppleClang")
|
if (CMAKE_CXX_COMPILER_ID MATCHES "AppleClang")
|
||||||
# (Experimental!) Specify "-DALLOW_APPLECLANG=ON" when running CMake configuration step, if you want to experiment with using it.
|
# (Experimental!) Specify "-DALLOW_APPLECLANG=ON" when running CMake configuration step, if you want to experiment with using it.
|
||||||
if (NOT ALLOW_APPLECLANG AND NOT DEFINED ENV{ALLOW_APPLECLANG})
|
if (NOT ALLOW_APPLECLANG AND NOT DEFINED ENV{ALLOW_APPLECLANG})
|
||||||
message (FATAL_ERROR "AppleClang is not supported, you should install clang from brew. See the instruction: https://clickhouse.com/docs/en/development/build-osx/")
|
message (FATAL_ERROR "Compilation with AppleClang is unsupported. Please use vanilla Clang, e.g. from Homebrew.")
|
||||||
endif ()
|
endif ()
|
||||||
|
|
||||||
# AppleClang 10.0.1 (Xcode 10.2) corresponds to LLVM/Clang upstream version 7.0.0
|
# For a mapping between XCode / AppleClang / vanilla Clang versions, see https://en.wikipedia.org/wiki/Xcode
|
||||||
# AppleClang 11.0.0 (Xcode 11.0) corresponds to LLVM/Clang upstream version 8.0.0
|
|
||||||
set (XCODE_MINIMUM_VERSION 10.2)
|
|
||||||
set (APPLE_CLANG_MINIMUM_VERSION 10.0.1)
|
|
||||||
if (CMAKE_CXX_COMPILER_VERSION VERSION_LESS ${APPLE_CLANG_MINIMUM_VERSION})
|
if (CMAKE_CXX_COMPILER_VERSION VERSION_LESS ${APPLE_CLANG_MINIMUM_VERSION})
|
||||||
message (FATAL_ERROR "AppleClang compiler version must be at least ${APPLE_CLANG_MINIMUM_VERSION} (Xcode ${XCODE_MINIMUM_VERSION}).")
|
message (FATAL_ERROR "Compilation with AppleClang version ${CMAKE_CXX_COMPILER_VERSION} is unsupported, the minimum required version is ${APPLE_CLANG_MINIMUM_VERSION} (Xcode ${XCODE_MINIMUM_VERSION}).")
|
||||||
elseif (CMAKE_CXX_COMPILER_VERSION VERSION_LESS 11.0.0)
|
|
||||||
# char8_t is available starting (upstream vanilla) Clang 7, but prior to Clang 8,
|
|
||||||
# it is not enabled by -std=c++20 and can be enabled with an explicit -fchar8_t.
|
|
||||||
set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fchar8_t")
|
|
||||||
set (CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -fchar8_t")
|
|
||||||
endif ()
|
endif ()
|
||||||
else ()
|
else ()
|
||||||
set (CLANG_MINIMUM_VERSION 12)
|
|
||||||
if (CMAKE_CXX_COMPILER_VERSION VERSION_LESS ${CLANG_MINIMUM_VERSION})
|
if (CMAKE_CXX_COMPILER_VERSION VERSION_LESS ${CLANG_MINIMUM_VERSION})
|
||||||
message (FATAL_ERROR "Clang version must be at least ${CLANG_MINIMUM_VERSION}.")
|
message (FATAL_ERROR "Compilation with Clang version ${CMAKE_CXX_COMPILER_VERSION} is unsupported, the minimum required version is ${CLANG_MINIMUM_VERSION}.")
|
||||||
endif ()
|
endif ()
|
||||||
endif ()
|
endif ()
|
||||||
else ()
|
|
||||||
message (WARNING "You are using an unsupported compiler. Compilation has only been tested with Clang and GCC.")
|
|
||||||
endif ()
|
endif ()
|
||||||
|
|
||||||
|
# Linker
|
||||||
|
|
||||||
string (REGEX MATCHALL "[0-9]+" COMPILER_VERSION_LIST ${CMAKE_CXX_COMPILER_VERSION})
|
string (REGEX MATCHALL "[0-9]+" COMPILER_VERSION_LIST ${CMAKE_CXX_COMPILER_VERSION})
|
||||||
list (GET COMPILER_VERSION_LIST 0 COMPILER_VERSION_MAJOR)
|
list (GET COMPILER_VERSION_LIST 0 COMPILER_VERSION_MAJOR)
|
||||||
|
|
||||||
# Linker
|
|
||||||
|
|
||||||
# Example values: `lld-10`, `gold`.
|
# Example values: `lld-10`, `gold`.
|
||||||
option (LINKER_NAME "Linker name or full path")
|
option (LINKER_NAME "Linker name or full path")
|
||||||
|
|
||||||
if (COMPILER_GCC AND NOT LINKER_NAME)
|
if (NOT LINKER_NAME)
|
||||||
find_program (LLD_PATH NAMES "ld.lld")
|
if (COMPILER_GCC)
|
||||||
find_program (GOLD_PATH NAMES "ld.gold")
|
find_program (LLD_PATH NAMES "ld.lld")
|
||||||
elseif (NOT LINKER_NAME)
|
find_program (GOLD_PATH NAMES "ld.gold")
|
||||||
find_program (LLD_PATH NAMES "ld.lld-${COMPILER_VERSION_MAJOR}" "lld-${COMPILER_VERSION_MAJOR}" "ld.lld" "lld")
|
elseif (COMPILER_CLANG)
|
||||||
find_program (GOLD_PATH NAMES "ld.gold" "gold")
|
find_program (LLD_PATH NAMES "ld.lld-${COMPILER_VERSION_MAJOR}" "lld-${COMPILER_VERSION_MAJOR}" "ld.lld" "lld")
|
||||||
endif ()
|
find_program (GOLD_PATH NAMES "ld.gold" "gold")
|
||||||
|
endif ()
|
||||||
|
endif()
|
||||||
|
|
||||||
if (OS_LINUX AND NOT LINKER_NAME)
|
if (OS_LINUX AND NOT LINKER_NAME)
|
||||||
# We prefer LLD linker over Gold or BFD on Linux.
|
# prefer lld linker over gold or ld on linux
|
||||||
if (LLD_PATH)
|
if (LLD_PATH)
|
||||||
if (COMPILER_GCC)
|
if (COMPILER_GCC)
|
||||||
# GCC driver requires one of supported linker names like "lld".
|
# GCC driver requires one of supported linker names like "lld".
|
||||||
@ -87,9 +85,10 @@ if (OS_LINUX AND NOT LINKER_NAME)
|
|||||||
endif ()
|
endif ()
|
||||||
endif ()
|
endif ()
|
||||||
endif ()
|
endif ()
|
||||||
|
# TODO: allow different linker on != OS_LINUX
|
||||||
|
|
||||||
if (LINKER_NAME)
|
if (LINKER_NAME)
|
||||||
if (COMPILER_CLANG AND (CMAKE_CXX_COMPILER_VERSION VERSION_GREATER 12.0.0 OR CMAKE_CXX_COMPILER_VERSION VERSION_EQUAL 12.0.0))
|
if (COMPILER_CLANG)
|
||||||
find_program (LLD_PATH NAMES ${LINKER_NAME})
|
find_program (LLD_PATH NAMES ${LINKER_NAME})
|
||||||
if (NOT LLD_PATH)
|
if (NOT LLD_PATH)
|
||||||
message (FATAL_ERROR "Using linker ${LINKER_NAME} but can't find its path.")
|
message (FATAL_ERROR "Using linker ${LINKER_NAME} but can't find its path.")
|
||||||
@ -101,9 +100,14 @@ if (LINKER_NAME)
|
|||||||
set (CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} -fuse-ld=${LINKER_NAME}")
|
set (CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} -fuse-ld=${LINKER_NAME}")
|
||||||
endif ()
|
endif ()
|
||||||
|
|
||||||
message(STATUS "Using custom linker by name: ${LINKER_NAME}")
|
|
||||||
endif ()
|
endif ()
|
||||||
|
|
||||||
|
if (LINKER_NAME)
|
||||||
|
message(STATUS "Using linker: ${LINKER_NAME}")
|
||||||
|
else()
|
||||||
|
message(STATUS "Using linker: <default>")
|
||||||
|
endif()
|
||||||
|
|
||||||
# Archiver
|
# Archiver
|
||||||
|
|
||||||
if (COMPILER_GCC)
|
if (COMPILER_GCC)
|
||||||
@ -116,6 +120,8 @@ if (LLVM_AR_PATH)
|
|||||||
set (CMAKE_AR "${LLVM_AR_PATH}")
|
set (CMAKE_AR "${LLVM_AR_PATH}")
|
||||||
endif ()
|
endif ()
|
||||||
|
|
||||||
|
message(STATUS "Using archiver: ${CMAKE_AR}")
|
||||||
|
|
||||||
# Ranlib
|
# Ranlib
|
||||||
|
|
||||||
if (COMPILER_GCC)
|
if (COMPILER_GCC)
|
||||||
@ -128,6 +134,8 @@ if (LLVM_RANLIB_PATH)
|
|||||||
set (CMAKE_RANLIB "${LLVM_RANLIB_PATH}")
|
set (CMAKE_RANLIB "${LLVM_RANLIB_PATH}")
|
||||||
endif ()
|
endif ()
|
||||||
|
|
||||||
|
message(STATUS "Using ranlib: ${CMAKE_RANLIB}")
|
||||||
|
|
||||||
# Install Name Tool
|
# Install Name Tool
|
||||||
|
|
||||||
if (COMPILER_GCC)
|
if (COMPILER_GCC)
|
||||||
@ -140,6 +148,8 @@ if (LLVM_INSTALL_NAME_TOOL_PATH)
|
|||||||
set (CMAKE_INSTALL_NAME_TOOL "${LLVM_INSTALL_NAME_TOOL_PATH}")
|
set (CMAKE_INSTALL_NAME_TOOL "${LLVM_INSTALL_NAME_TOOL_PATH}")
|
||||||
endif ()
|
endif ()
|
||||||
|
|
||||||
|
message(STATUS "Using install-name-tool: ${CMAKE_INSTALL_NAME_TOOL}")
|
||||||
|
|
||||||
# Objcopy
|
# Objcopy
|
||||||
|
|
||||||
if (COMPILER_GCC)
|
if (COMPILER_GCC)
|
||||||
@ -148,29 +158,13 @@ else ()
|
|||||||
find_program (OBJCOPY_PATH NAMES "llvm-objcopy-${COMPILER_VERSION_MAJOR}" "llvm-objcopy" "objcopy")
|
find_program (OBJCOPY_PATH NAMES "llvm-objcopy-${COMPILER_VERSION_MAJOR}" "llvm-objcopy" "objcopy")
|
||||||
endif ()
|
endif ()
|
||||||
|
|
||||||
if (NOT OBJCOPY_PATH AND OS_DARWIN)
|
|
||||||
find_program (BREW_PATH NAMES "brew")
|
|
||||||
if (BREW_PATH)
|
|
||||||
execute_process (COMMAND ${BREW_PATH} --prefix llvm ERROR_QUIET OUTPUT_STRIP_TRAILING_WHITESPACE OUTPUT_VARIABLE LLVM_PREFIX)
|
|
||||||
if (LLVM_PREFIX)
|
|
||||||
find_program (OBJCOPY_PATH NAMES "llvm-objcopy" PATHS "${LLVM_PREFIX}/bin" NO_DEFAULT_PATH)
|
|
||||||
endif ()
|
|
||||||
if (NOT OBJCOPY_PATH)
|
|
||||||
execute_process (COMMAND ${BREW_PATH} --prefix binutils ERROR_QUIET OUTPUT_STRIP_TRAILING_WHITESPACE OUTPUT_VARIABLE BINUTILS_PREFIX)
|
|
||||||
if (BINUTILS_PREFIX)
|
|
||||||
find_program (OBJCOPY_PATH NAMES "objcopy" PATHS "${BINUTILS_PREFIX}/bin" NO_DEFAULT_PATH)
|
|
||||||
endif ()
|
|
||||||
endif ()
|
|
||||||
endif ()
|
|
||||||
endif ()
|
|
||||||
|
|
||||||
if (OBJCOPY_PATH)
|
if (OBJCOPY_PATH)
|
||||||
message (STATUS "Using objcopy: ${OBJCOPY_PATH}")
|
message (STATUS "Using objcopy: ${OBJCOPY_PATH}")
|
||||||
else ()
|
else ()
|
||||||
message (FATAL_ERROR "Cannot find objcopy.")
|
message (FATAL_ERROR "Cannot find objcopy.")
|
||||||
endif ()
|
endif ()
|
||||||
|
|
||||||
# Strip (FIXME copypaste)
|
# Strip
|
||||||
|
|
||||||
if (COMPILER_GCC)
|
if (COMPILER_GCC)
|
||||||
find_program (STRIP_PATH NAMES "llvm-strip" "llvm-strip-13" "llvm-strip-12" "llvm-strip-11" "strip")
|
find_program (STRIP_PATH NAMES "llvm-strip" "llvm-strip-13" "llvm-strip-12" "llvm-strip-11" "strip")
|
||||||
@ -178,22 +172,6 @@ else ()
|
|||||||
find_program (STRIP_PATH NAMES "llvm-strip-${COMPILER_VERSION_MAJOR}" "llvm-strip" "strip")
|
find_program (STRIP_PATH NAMES "llvm-strip-${COMPILER_VERSION_MAJOR}" "llvm-strip" "strip")
|
||||||
endif ()
|
endif ()
|
||||||
|
|
||||||
if (NOT STRIP_PATH AND OS_DARWIN)
|
|
||||||
find_program (BREW_PATH NAMES "brew")
|
|
||||||
if (BREW_PATH)
|
|
||||||
execute_process (COMMAND ${BREW_PATH} --prefix llvm ERROR_QUIET OUTPUT_STRIP_TRAILING_WHITESPACE OUTPUT_VARIABLE LLVM_PREFIX)
|
|
||||||
if (LLVM_PREFIX)
|
|
||||||
find_program (STRIP_PATH NAMES "llvm-strip" PATHS "${LLVM_PREFIX}/bin" NO_DEFAULT_PATH)
|
|
||||||
endif ()
|
|
||||||
if (NOT STRIP_PATH)
|
|
||||||
execute_process (COMMAND ${BREW_PATH} --prefix binutils ERROR_QUIET OUTPUT_STRIP_TRAILING_WHITESPACE OUTPUT_VARIABLE BINUTILS_PREFIX)
|
|
||||||
if (BINUTILS_PREFIX)
|
|
||||||
find_program (STRIP_PATH NAMES "strip" PATHS "${BINUTILS_PREFIX}/bin" NO_DEFAULT_PATH)
|
|
||||||
endif ()
|
|
||||||
endif ()
|
|
||||||
endif ()
|
|
||||||
endif ()
|
|
||||||
|
|
||||||
if (STRIP_PATH)
|
if (STRIP_PATH)
|
||||||
message (STATUS "Using strip: ${STRIP_PATH}")
|
message (STATUS "Using strip: ${STRIP_PATH}")
|
||||||
else ()
|
else ()
|
||||||
|
@ -7,12 +7,7 @@
|
|||||||
# - sometimes warnings from 3rd party libraries may come from macro substitutions in our code
|
# - sometimes warnings from 3rd party libraries may come from macro substitutions in our code
|
||||||
# and we have to wrap them with #pragma GCC/clang diagnostic ignored
|
# and we have to wrap them with #pragma GCC/clang diagnostic ignored
|
||||||
|
|
||||||
set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wextra")
|
set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wall -Wextra")
|
||||||
|
|
||||||
# Add some warnings that are not available even with -Wall -Wextra -Wpedantic.
|
|
||||||
# Intended for exploration of new compiler warnings that may be found useful.
|
|
||||||
# Applies to clang only
|
|
||||||
option (WEVERYTHING "Enable -Weverything option with some exceptions." ON)
|
|
||||||
|
|
||||||
# Control maximum size of stack frames. It can be important if the code is run in fibers with small stack size.
|
# Control maximum size of stack frames. It can be important if the code is run in fibers with small stack size.
|
||||||
# Only in release build because debug has too large stack frames.
|
# Only in release build because debug has too large stack frames.
|
||||||
@ -21,80 +16,42 @@ if ((NOT CMAKE_BUILD_TYPE_UC STREQUAL "DEBUG") AND (NOT SANITIZE) AND (NOT CMAKE
|
|||||||
endif ()
|
endif ()
|
||||||
|
|
||||||
if (COMPILER_CLANG)
|
if (COMPILER_CLANG)
|
||||||
|
# Add some warnings that are not available even with -Wall -Wextra -Wpedantic.
|
||||||
|
# We want to get everything out of the compiler for code quality.
|
||||||
|
add_warning(everything)
|
||||||
|
|
||||||
add_warning(pedantic)
|
add_warning(pedantic)
|
||||||
no_warning(vla-extension)
|
no_warning(vla-extension)
|
||||||
no_warning(zero-length-array)
|
no_warning(zero-length-array)
|
||||||
no_warning(c11-extensions)
|
no_warning(c11-extensions)
|
||||||
|
no_warning(unused-command-line-argument)
|
||||||
if (WEVERYTHING)
|
no_warning(c++98-compat-pedantic)
|
||||||
add_warning(everything)
|
no_warning(c++98-compat)
|
||||||
no_warning(c++98-compat-pedantic)
|
no_warning(c99-extensions)
|
||||||
no_warning(c++98-compat)
|
no_warning(conversion)
|
||||||
no_warning(c99-extensions)
|
no_warning(ctad-maybe-unsupported) # clang 9+, linux-only
|
||||||
no_warning(conversion)
|
no_warning(deprecated-dynamic-exception-spec)
|
||||||
no_warning(ctad-maybe-unsupported) # clang 9+, linux-only
|
no_warning(disabled-macro-expansion)
|
||||||
no_warning(deprecated-dynamic-exception-spec)
|
no_warning(documentation-unknown-command)
|
||||||
no_warning(disabled-macro-expansion)
|
no_warning(double-promotion)
|
||||||
no_warning(documentation-unknown-command)
|
no_warning(exit-time-destructors)
|
||||||
no_warning(double-promotion)
|
no_warning(float-equal)
|
||||||
no_warning(exit-time-destructors)
|
no_warning(global-constructors)
|
||||||
no_warning(float-equal)
|
no_warning(missing-prototypes)
|
||||||
no_warning(global-constructors)
|
no_warning(missing-variable-declarations)
|
||||||
no_warning(missing-prototypes)
|
no_warning(nested-anon-types)
|
||||||
no_warning(missing-variable-declarations)
|
no_warning(packed)
|
||||||
no_warning(nested-anon-types)
|
no_warning(padded)
|
||||||
no_warning(packed)
|
no_warning(return-std-move-in-c++11) # clang 7+
|
||||||
no_warning(padded)
|
no_warning(shift-sign-overflow)
|
||||||
no_warning(return-std-move-in-c++11) # clang 7+
|
no_warning(sign-conversion)
|
||||||
no_warning(shift-sign-overflow)
|
no_warning(switch-enum)
|
||||||
no_warning(sign-conversion)
|
no_warning(undefined-func-template)
|
||||||
no_warning(switch-enum)
|
no_warning(unused-template)
|
||||||
no_warning(undefined-func-template)
|
no_warning(vla)
|
||||||
no_warning(unused-template)
|
no_warning(weak-template-vtables)
|
||||||
no_warning(vla)
|
no_warning(weak-vtables)
|
||||||
no_warning(weak-template-vtables)
|
# TODO Enable conversion, sign-conversion, double-promotion warnings.
|
||||||
no_warning(weak-vtables)
|
|
||||||
|
|
||||||
# TODO Enable conversion, sign-conversion, double-promotion warnings.
|
|
||||||
else ()
|
|
||||||
add_warning(comma)
|
|
||||||
add_warning(conditional-uninitialized)
|
|
||||||
add_warning(covered-switch-default)
|
|
||||||
add_warning(deprecated)
|
|
||||||
add_warning(embedded-directive)
|
|
||||||
add_warning(empty-init-stmt) # linux-only
|
|
||||||
add_warning(extra-semi-stmt) # linux-only
|
|
||||||
add_warning(extra-semi)
|
|
||||||
add_warning(gnu-case-range)
|
|
||||||
add_warning(inconsistent-missing-destructor-override)
|
|
||||||
add_warning(newline-eof)
|
|
||||||
add_warning(old-style-cast)
|
|
||||||
add_warning(range-loop-analysis)
|
|
||||||
add_warning(redundant-parens)
|
|
||||||
add_warning(reserved-id-macro)
|
|
||||||
add_warning(shadow-field)
|
|
||||||
add_warning(shadow-uncaptured-local)
|
|
||||||
add_warning(shadow)
|
|
||||||
add_warning(string-plus-int)
|
|
||||||
add_warning(undef)
|
|
||||||
add_warning(unreachable-code-return)
|
|
||||||
add_warning(unreachable-code)
|
|
||||||
add_warning(unused-exception-parameter)
|
|
||||||
add_warning(unused-macros)
|
|
||||||
add_warning(unused-member-function)
|
|
||||||
add_warning(unneeded-internal-declaration)
|
|
||||||
add_warning(implicit-int-float-conversion)
|
|
||||||
add_warning(no-delete-null-pointer-checks)
|
|
||||||
add_warning(anon-enum-enum-conversion)
|
|
||||||
add_warning(assign-enum)
|
|
||||||
add_warning(bitwise-op-parentheses)
|
|
||||||
add_warning(int-in-bool-context)
|
|
||||||
add_warning(sometimes-uninitialized)
|
|
||||||
add_warning(tautological-bitwise-compare)
|
|
||||||
|
|
||||||
# XXX: libstdc++ has some of these for 3way compare
|
|
||||||
add_warning(zero-as-null-pointer-constant)
|
|
||||||
endif ()
|
|
||||||
elseif (COMPILER_GCC)
|
elseif (COMPILER_GCC)
|
||||||
# Add compiler options only to c++ compiler
|
# Add compiler options only to c++ compiler
|
||||||
function(add_cxx_compile_options option)
|
function(add_cxx_compile_options option)
|
||||||
|
2
contrib/CMakeLists.txt
vendored
2
contrib/CMakeLists.txt
vendored
@ -140,6 +140,7 @@ add_contrib (libpq-cmake libpq)
|
|||||||
add_contrib (nuraft-cmake NuRaft)
|
add_contrib (nuraft-cmake NuRaft)
|
||||||
add_contrib (fast_float-cmake fast_float)
|
add_contrib (fast_float-cmake fast_float)
|
||||||
add_contrib (datasketches-cpp-cmake datasketches-cpp)
|
add_contrib (datasketches-cpp-cmake datasketches-cpp)
|
||||||
|
add_contrib (hashidsxx-cmake hashidsxx)
|
||||||
|
|
||||||
option(ENABLE_NLP "Enable NLP functions support" ${ENABLE_LIBRARIES})
|
option(ENABLE_NLP "Enable NLP functions support" ${ENABLE_LIBRARIES})
|
||||||
if (ENABLE_NLP)
|
if (ENABLE_NLP)
|
||||||
@ -152,6 +153,7 @@ endif()
|
|||||||
|
|
||||||
add_contrib (sqlite-cmake sqlite-amalgamation)
|
add_contrib (sqlite-cmake sqlite-amalgamation)
|
||||||
add_contrib (s2geometry-cmake s2geometry)
|
add_contrib (s2geometry-cmake s2geometry)
|
||||||
|
add_contrib (eigen-cmake eigen)
|
||||||
|
|
||||||
# Put all targets defined here and in subdirectories under "contrib/<immediate-subdir>" folders in GUI-based IDEs.
|
# Put all targets defined here and in subdirectories under "contrib/<immediate-subdir>" folders in GUI-based IDEs.
|
||||||
# Some of third-party projects may override CMAKE_FOLDER or FOLDER property of their targets, so they would not appear
|
# Some of third-party projects may override CMAKE_FOLDER or FOLDER property of their targets, so they would not appear
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
if(ARCH_AMD64 OR ARCH_ARM)
|
if(ARCH_AMD64 OR ARCH_AARCH64)
|
||||||
option (ENABLE_BASE64 "Enable base64" ${ENABLE_LIBRARIES})
|
option (ENABLE_BASE64 "Enable base64" ${ENABLE_LIBRARIES})
|
||||||
elseif(ENABLE_BASE64)
|
elseif(ENABLE_BASE64)
|
||||||
message (${RECONFIGURE_MESSAGE_LEVEL} "base64 library is only supported on x86_64 and aarch64")
|
message (${RECONFIGURE_MESSAGE_LEVEL} "base64 library is only supported on x86_64 and aarch64")
|
||||||
|
@ -114,7 +114,7 @@ if (SANITIZE AND (SANITIZE STREQUAL "address" OR SANITIZE STREQUAL "thread"))
|
|||||||
"${LIBRARY_DIR}/libs/context/src/continuation.cpp"
|
"${LIBRARY_DIR}/libs/context/src/continuation.cpp"
|
||||||
)
|
)
|
||||||
endif()
|
endif()
|
||||||
if (ARCH_ARM)
|
if (ARCH_AARCH64)
|
||||||
set (SRCS_CONTEXT ${SRCS_CONTEXT}
|
set (SRCS_CONTEXT ${SRCS_CONTEXT}
|
||||||
"${LIBRARY_DIR}/libs/context/src/asm/jump_arm64_aapcs_elf_gas.S"
|
"${LIBRARY_DIR}/libs/context/src/asm/jump_arm64_aapcs_elf_gas.S"
|
||||||
"${LIBRARY_DIR}/libs/context/src/asm/make_arm64_aapcs_elf_gas.S"
|
"${LIBRARY_DIR}/libs/context/src/asm/make_arm64_aapcs_elf_gas.S"
|
||||||
|
1
contrib/eigen
vendored
Submodule
1
contrib/eigen
vendored
Submodule
@ -0,0 +1 @@
|
|||||||
|
Subproject commit 3147391d946bb4b6c68edd901f2add6ac1f31f8c
|
16
contrib/eigen-cmake/CMakeLists.txt
Normal file
16
contrib/eigen-cmake/CMakeLists.txt
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
set(EIGEN_LIBRARY_DIR "${ClickHouse_SOURCE_DIR}/contrib/eigen")
|
||||||
|
|
||||||
|
add_library (_eigen INTERFACE)
|
||||||
|
|
||||||
|
# Only include MPL2 code from Eigen library
|
||||||
|
target_compile_definitions(_eigen INTERFACE EIGEN_MPL2_ONLY)
|
||||||
|
|
||||||
|
# Clang by default mimics gcc 4.2.1 compatibility but Eigen checks __GNUC__ version to enable
|
||||||
|
# a workaround for bug https://gcc.gnu.org/bugzilla/show_bug.cgi?id=72867 fixed in 6.3
|
||||||
|
# So we fake gcc > 6.3 when building with clang
|
||||||
|
if (COMPILER_CLANG AND ARCH_PPC64LE)
|
||||||
|
target_compile_options(_eigen INTERFACE -fgnuc-version=6.4)
|
||||||
|
endif()
|
||||||
|
|
||||||
|
target_include_directories (_eigen SYSTEM INTERFACE ${EIGEN_LIBRARY_DIR})
|
||||||
|
add_library(ch_contrib::eigen ALIAS _eigen)
|
1
contrib/hashidsxx
vendored
Submodule
1
contrib/hashidsxx
vendored
Submodule
@ -0,0 +1 @@
|
|||||||
|
Subproject commit 783f6911ccfdaca83e3cfac084c4aad888a80cee
|
14
contrib/hashidsxx-cmake/CMakeLists.txt
Normal file
14
contrib/hashidsxx-cmake/CMakeLists.txt
Normal file
@ -0,0 +1,14 @@
|
|||||||
|
set (LIBRARY_DIR "${ClickHouse_SOURCE_DIR}/contrib/hashidsxx")
|
||||||
|
|
||||||
|
set (SRCS
|
||||||
|
"${LIBRARY_DIR}/hashids.cpp"
|
||||||
|
)
|
||||||
|
|
||||||
|
set (HDRS
|
||||||
|
"${LIBRARY_DIR}/hashids.h"
|
||||||
|
)
|
||||||
|
|
||||||
|
add_library(_hashidsxx ${SRCS} ${HDRS})
|
||||||
|
target_include_directories(_hashidsxx SYSTEM PUBLIC "${LIBRARY_DIR}")
|
||||||
|
|
||||||
|
add_library(ch_contrib::hashidsxx ALIAS _hashidsxx)
|
@ -1,5 +1,5 @@
|
|||||||
if (SANITIZE OR NOT (
|
if (SANITIZE OR NOT (
|
||||||
((OS_LINUX OR OS_FREEBSD) AND (ARCH_AMD64 OR ARCH_ARM OR ARCH_PPC64LE OR ARCH_RISCV64)) OR
|
((OS_LINUX OR OS_FREEBSD) AND (ARCH_AMD64 OR ARCH_AARCH64 OR ARCH_PPC64LE OR ARCH_RISCV64)) OR
|
||||||
(OS_DARWIN AND (CMAKE_BUILD_TYPE_UC STREQUAL "RELWITHDEBINFO" OR CMAKE_BUILD_TYPE_UC STREQUAL "DEBUG"))
|
(OS_DARWIN AND (CMAKE_BUILD_TYPE_UC STREQUAL "RELWITHDEBINFO" OR CMAKE_BUILD_TYPE_UC STREQUAL "DEBUG"))
|
||||||
))
|
))
|
||||||
if (ENABLE_JEMALLOC)
|
if (ENABLE_JEMALLOC)
|
||||||
@ -141,7 +141,7 @@ if (ARCH_AMD64)
|
|||||||
else()
|
else()
|
||||||
set(JEMALLOC_INCLUDE_PREFIX "${JEMALLOC_INCLUDE_PREFIX}_x86_64")
|
set(JEMALLOC_INCLUDE_PREFIX "${JEMALLOC_INCLUDE_PREFIX}_x86_64")
|
||||||
endif()
|
endif()
|
||||||
elseif (ARCH_ARM)
|
elseif (ARCH_AARCH64)
|
||||||
set(JEMALLOC_INCLUDE_PREFIX "${JEMALLOC_INCLUDE_PREFIX}_aarch64")
|
set(JEMALLOC_INCLUDE_PREFIX "${JEMALLOC_INCLUDE_PREFIX}_aarch64")
|
||||||
elseif (ARCH_PPC64LE)
|
elseif (ARCH_PPC64LE)
|
||||||
set(JEMALLOC_INCLUDE_PREFIX "${JEMALLOC_INCLUDE_PREFIX}_ppc64le")
|
set(JEMALLOC_INCLUDE_PREFIX "${JEMALLOC_INCLUDE_PREFIX}_ppc64le")
|
||||||
@ -170,7 +170,13 @@ endif ()
|
|||||||
target_compile_definitions(_jemalloc PRIVATE -DJEMALLOC_PROF=1)
|
target_compile_definitions(_jemalloc PRIVATE -DJEMALLOC_PROF=1)
|
||||||
|
|
||||||
if (USE_UNWIND)
|
if (USE_UNWIND)
|
||||||
target_compile_definitions (_jemalloc PRIVATE -DJEMALLOC_PROF_LIBUNWIND=1)
|
# jemalloc provides support for two different libunwind flavors: the original HP libunwind and the one coming with gcc / g++ / libstdc++.
|
||||||
|
# The latter is identified by `JEMALLOC_PROF_LIBGCC` and uses `_Unwind_Backtrace` method instead of `unw_backtrace`.
|
||||||
|
# At the time ClickHouse uses LLVM libunwind which follows libgcc's way of backtracing.
|
||||||
|
|
||||||
|
# ClickHouse has to provide `unw_backtrace` method by the means of [commit 8e2b31e](https://github.com/ClickHouse/libunwind/commit/8e2b31e766dd502f6df74909e04a7dbdf5182eb1).
|
||||||
|
|
||||||
|
target_compile_definitions (_jemalloc PRIVATE -DJEMALLOC_PROF_LIBGCC=1)
|
||||||
target_link_libraries (_jemalloc PRIVATE unwind)
|
target_link_libraries (_jemalloc PRIVATE unwind)
|
||||||
endif ()
|
endif ()
|
||||||
|
|
||||||
|
@ -7,9 +7,9 @@ CHECK_FUNCTION_EXISTS(nanosleep HAVE_NANOSLEEP)
|
|||||||
SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fno-strict-aliasing")
|
SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fno-strict-aliasing")
|
||||||
SET(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -fno-strict-aliasing")
|
SET(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -fno-strict-aliasing")
|
||||||
|
|
||||||
IF(ENABLE_SSE STREQUAL ON AND NOT ARCH_PPC64LE AND NOT ARCH_AARCH64 AND NOT ARCH_ARM)
|
IF(ENABLE_SSE STREQUAL ON AND NOT ARCH_PPC64LE AND NOT ARCH_AARCH64 AND NOT ARCH_AARCH64)
|
||||||
SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -msse4.2")
|
SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -msse4.2")
|
||||||
ENDIF(ENABLE_SSE STREQUAL ON AND NOT ARCH_PPC64LE AND NOT ARCH_AARCH64 AND NOT ARCH_ARM)
|
ENDIF(ENABLE_SSE STREQUAL ON AND NOT ARCH_PPC64LE AND NOT ARCH_AARCH64 AND NOT ARCH_AARCH64)
|
||||||
|
|
||||||
IF(NOT TEST_HDFS_PREFIX)
|
IF(NOT TEST_HDFS_PREFIX)
|
||||||
SET(TEST_HDFS_PREFIX "./" CACHE STRING "default directory prefix used for test." FORCE)
|
SET(TEST_HDFS_PREFIX "./" CACHE STRING "default directory prefix used for test." FORCE)
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
if(NOT ARCH_ARM AND NOT OS_FREEBSD AND NOT APPLE AND NOT ARCH_PPC64LE)
|
if(NOT ARCH_AARCH64 AND NOT OS_FREEBSD AND NOT APPLE AND NOT ARCH_PPC64LE)
|
||||||
option(ENABLE_HDFS "Enable HDFS" ${ENABLE_LIBRARIES})
|
option(ENABLE_HDFS "Enable HDFS" ${ENABLE_LIBRARIES})
|
||||||
elseif(ENABLE_HDFS)
|
elseif(ENABLE_HDFS)
|
||||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Cannot use HDFS3 with current configuration")
|
message (${RECONFIGURE_MESSAGE_LEVEL} "Cannot use HDFS3 with current configuration")
|
||||||
|
2
contrib/poco
vendored
2
contrib/poco
vendored
@ -1 +1 @@
|
|||||||
Subproject commit 6c1a233744d13414e8e8db396c75177b857b2c22
|
Subproject commit de35b9fd72b57127abdc3a5beaf0e320d767e356
|
44
docker/docs/release/Dockerfile
Normal file
44
docker/docs/release/Dockerfile
Normal file
@ -0,0 +1,44 @@
|
|||||||
|
# docker build -t clickhouse/docs-release .
|
||||||
|
FROM ubuntu:20.04
|
||||||
|
|
||||||
|
# ARG for quick switch to a given ubuntu mirror
|
||||||
|
ARG apt_archive="http://archive.ubuntu.com"
|
||||||
|
RUN sed -i "s|http://archive.ubuntu.com|$apt_archive|g" /etc/apt/sources.list
|
||||||
|
|
||||||
|
ENV LANG=C.UTF-8
|
||||||
|
|
||||||
|
RUN apt-get update \
|
||||||
|
&& DEBIAN_FRONTEND=noninteractive apt-get install --yes --no-install-recommends \
|
||||||
|
wget \
|
||||||
|
bash \
|
||||||
|
python \
|
||||||
|
curl \
|
||||||
|
python3-requests \
|
||||||
|
sudo \
|
||||||
|
git \
|
||||||
|
openssl \
|
||||||
|
python3-pip \
|
||||||
|
software-properties-common \
|
||||||
|
fonts-arphic-ukai \
|
||||||
|
fonts-arphic-uming \
|
||||||
|
fonts-ipafont-mincho \
|
||||||
|
fonts-ipafont-gothic \
|
||||||
|
fonts-unfonts-core \
|
||||||
|
xvfb \
|
||||||
|
ssh-client \
|
||||||
|
&& apt-get autoremove --yes \
|
||||||
|
&& apt-get clean \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
RUN pip3 install --ignore-installed --upgrade setuptools pip virtualenv
|
||||||
|
|
||||||
|
# We create the most popular default 1000:1000 ubuntu user to not have ssh issues when running with UID==1000
|
||||||
|
RUN useradd --create-home --uid 1000 --user-group ubuntu \
|
||||||
|
&& ssh-keyscan -t rsa github.com >> /etc/ssh/ssh_known_hosts
|
||||||
|
|
||||||
|
COPY run.sh /
|
||||||
|
|
||||||
|
ENV REPO_PATH=/repo_path
|
||||||
|
ENV OUTPUT_PATH=/output_path
|
||||||
|
|
||||||
|
CMD ["/bin/bash", "/run.sh"]
|
12
docker/docs/release/run.sh
Normal file
12
docker/docs/release/run.sh
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
cd "$REPO_PATH/docs/tools"
|
||||||
|
if ! [ -d venv ]; then
|
||||||
|
mkdir -p venv
|
||||||
|
virtualenv -p "$(which python3)" venv
|
||||||
|
source venv/bin/activate
|
||||||
|
python3 -m pip install --ignore-installed -r requirements.txt
|
||||||
|
fi
|
||||||
|
source venv/bin/activate
|
||||||
|
./release.sh 2>&1 | tee "$OUTPUT_PATH/output.log"
|
@ -1,8 +1,4 @@
|
|||||||
{
|
{
|
||||||
"docker/packager/deb": {
|
|
||||||
"name": "clickhouse/deb-builder",
|
|
||||||
"dependent": []
|
|
||||||
},
|
|
||||||
"docker/packager/binary": {
|
"docker/packager/binary": {
|
||||||
"name": "clickhouse/binary-builder",
|
"name": "clickhouse/binary-builder",
|
||||||
"dependent": [
|
"dependent": [
|
||||||
@ -150,5 +146,9 @@
|
|||||||
"name": "clickhouse/docs-builder",
|
"name": "clickhouse/docs-builder",
|
||||||
"dependent": [
|
"dependent": [
|
||||||
]
|
]
|
||||||
|
},
|
||||||
|
"docker/docs/release": {
|
||||||
|
"name": "clickhouse/docs-release",
|
||||||
|
"dependent": []
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -97,12 +97,15 @@ RUN add-apt-repository ppa:ubuntu-toolchain-r/test --yes \
|
|||||||
|
|
||||||
# Architecture of the image when BuildKit/buildx is used
|
# Architecture of the image when BuildKit/buildx is used
|
||||||
ARG TARGETARCH
|
ARG TARGETARCH
|
||||||
ARG NFPM_VERSION=2.15.0
|
ARG NFPM_VERSION=2.15.1
|
||||||
|
|
||||||
RUN arch=${TARGETARCH:-amd64} \
|
RUN arch=${TARGETARCH:-amd64} \
|
||||||
&& curl -Lo /tmp/nfpm.deb "https://github.com/goreleaser/nfpm/releases/download/v${NFPM_VERSION}/nfpm_${arch}.deb" \
|
&& curl -Lo /tmp/nfpm.deb "https://github.com/goreleaser/nfpm/releases/download/v${NFPM_VERSION}/nfpm_${arch}.deb" \
|
||||||
&& dpkg -i /tmp/nfpm.deb \
|
&& dpkg -i /tmp/nfpm.deb \
|
||||||
&& rm /tmp/nfpm.deb
|
&& rm /tmp/nfpm.deb
|
||||||
|
|
||||||
|
RUN mkdir /workdir && chmod 777 /workdir
|
||||||
|
WORKDIR /workdir
|
||||||
|
|
||||||
COPY build.sh /
|
COPY build.sh /
|
||||||
CMD ["bash", "-c", "/build.sh 2>&1"]
|
CMD ["bash", "-c", "/build.sh 2>&1"]
|
||||||
|
@ -1,18 +1,18 @@
|
|||||||
#!/usr/bin/env bash
|
#!/usr/bin/env bash
|
||||||
|
set -x -e
|
||||||
|
|
||||||
exec &> >(ts)
|
exec &> >(ts)
|
||||||
set -x -e
|
|
||||||
|
|
||||||
cache_status () {
|
cache_status () {
|
||||||
ccache --show-config ||:
|
ccache --show-config ||:
|
||||||
ccache --show-stats ||:
|
ccache --show-stats ||:
|
||||||
}
|
}
|
||||||
|
|
||||||
git config --global --add safe.directory /build
|
[ -O /build ] || git config --global --add safe.directory /build
|
||||||
|
|
||||||
mkdir -p build/cmake/toolchain/darwin-x86_64
|
mkdir -p /build/cmake/toolchain/darwin-x86_64
|
||||||
tar xJf MacOSX11.0.sdk.tar.xz -C build/cmake/toolchain/darwin-x86_64 --strip-components=1
|
tar xJf /MacOSX11.0.sdk.tar.xz -C /build/cmake/toolchain/darwin-x86_64 --strip-components=1
|
||||||
ln -sf darwin-x86_64 build/cmake/toolchain/darwin-aarch64
|
ln -sf darwin-x86_64 /build/cmake/toolchain/darwin-aarch64
|
||||||
|
|
||||||
# Uncomment to debug ccache. Don't put ccache log in /output right away, or it
|
# Uncomment to debug ccache. Don't put ccache log in /output right away, or it
|
||||||
# will be confusingly packed into the "performance" package.
|
# will be confusingly packed into the "performance" package.
|
||||||
@ -20,8 +20,8 @@ ln -sf darwin-x86_64 build/cmake/toolchain/darwin-aarch64
|
|||||||
# export CCACHE_DEBUG=1
|
# export CCACHE_DEBUG=1
|
||||||
|
|
||||||
|
|
||||||
mkdir -p build/build_docker
|
mkdir -p /build/build_docker
|
||||||
cd build/build_docker
|
cd /build/build_docker
|
||||||
rm -f CMakeCache.txt
|
rm -f CMakeCache.txt
|
||||||
# Read cmake arguments into array (possibly empty)
|
# Read cmake arguments into array (possibly empty)
|
||||||
read -ra CMAKE_FLAGS <<< "${CMAKE_FLAGS:-}"
|
read -ra CMAKE_FLAGS <<< "${CMAKE_FLAGS:-}"
|
||||||
@ -61,10 +61,10 @@ fi
|
|||||||
|
|
||||||
if [ "coverity" == "$COMBINED_OUTPUT" ]
|
if [ "coverity" == "$COMBINED_OUTPUT" ]
|
||||||
then
|
then
|
||||||
mkdir -p /opt/cov-analysis
|
mkdir -p /workdir/cov-analysis
|
||||||
|
|
||||||
wget --post-data "token=$COVERITY_TOKEN&project=ClickHouse%2FClickHouse" -qO- https://scan.coverity.com/download/linux64 | tar xz -C /opt/cov-analysis --strip-components 1
|
wget --post-data "token=$COVERITY_TOKEN&project=ClickHouse%2FClickHouse" -qO- https://scan.coverity.com/download/linux64 | tar xz -C /workdir/cov-analysis --strip-components 1
|
||||||
export PATH=$PATH:/opt/cov-analysis/bin
|
export PATH=$PATH:/workdir/cov-analysis/bin
|
||||||
cov-configure --config ./coverity.config --template --comptype clangcc --compiler "$CC"
|
cov-configure --config ./coverity.config --template --comptype clangcc --compiler "$CC"
|
||||||
SCAN_WRAPPER="cov-build --config ./coverity.config --dir cov-int"
|
SCAN_WRAPPER="cov-build --config ./coverity.config --dir cov-int"
|
||||||
fi
|
fi
|
||||||
@ -89,16 +89,36 @@ mv ./src/unit_tests_dbms /output ||: # may not exist for some binary builds
|
|||||||
find . -name '*.so' -print -exec mv '{}' /output \;
|
find . -name '*.so' -print -exec mv '{}' /output \;
|
||||||
find . -name '*.so.*' -print -exec mv '{}' /output \;
|
find . -name '*.so.*' -print -exec mv '{}' /output \;
|
||||||
|
|
||||||
# Different files for performance test.
|
prepare_combined_output () {
|
||||||
if [ "performance" == "$COMBINED_OUTPUT" ]
|
local OUTPUT
|
||||||
then
|
OUTPUT="$1"
|
||||||
cp -r ../tests/performance /output
|
|
||||||
cp -r ../tests/config/top_level_domains /output
|
|
||||||
cp -r ../docker/test/performance-comparison/config /output ||:
|
|
||||||
rm /output/unit_tests_dbms ||:
|
|
||||||
rm /output/clickhouse-odbc-bridge ||:
|
|
||||||
|
|
||||||
cp -r ../docker/test/performance-comparison /output/scripts ||:
|
mkdir -p "$OUTPUT"/config
|
||||||
|
cp /build/programs/server/config.xml "$OUTPUT"/config
|
||||||
|
cp /build/programs/server/users.xml "$OUTPUT"/config
|
||||||
|
cp -r --dereference /build/programs/server/config.d "$OUTPUT"/config
|
||||||
|
}
|
||||||
|
|
||||||
|
# Different files for performance test.
|
||||||
|
if [ "$WITH_PERFORMANCE" == 1 ]
|
||||||
|
then
|
||||||
|
PERF_OUTPUT=/workdir/performance/output
|
||||||
|
mkdir -p "$PERF_OUTPUT"
|
||||||
|
cp -r ../tests/performance "$PERF_OUTPUT"
|
||||||
|
cp -r ../tests/config/top_level_domains "$PERF_OUTPUT"
|
||||||
|
cp -r ../docker/test/performance-comparison/config "$PERF_OUTPUT" ||:
|
||||||
|
for SRC in /output/clickhouse*; do
|
||||||
|
# Copy all clickhouse* files except packages and bridges
|
||||||
|
[[ "$SRC" != *.* ]] && [[ "$SRC" != *-bridge ]] && \
|
||||||
|
cp -d "$SRC" "$PERF_OUTPUT"
|
||||||
|
done
|
||||||
|
if [ -x "$PERF_OUTPUT"/clickhouse-keeper ]; then
|
||||||
|
# Replace standalone keeper by symlink
|
||||||
|
ln -sf clickhouse "$PERF_OUTPUT"/clickhouse-keeper
|
||||||
|
fi
|
||||||
|
|
||||||
|
cp -r ../docker/test/performance-comparison "$PERF_OUTPUT"/scripts ||:
|
||||||
|
prepare_combined_output "$PERF_OUTPUT"
|
||||||
|
|
||||||
# We have to know the revision that corresponds to this binary build.
|
# We have to know the revision that corresponds to this binary build.
|
||||||
# It is not the nominal SHA from pull/*/head, but the pull/*/merge, which is
|
# It is not the nominal SHA from pull/*/head, but the pull/*/merge, which is
|
||||||
@ -111,22 +131,23 @@ then
|
|||||||
# for a given nominal SHA, but it is not accessible outside Yandex.
|
# for a given nominal SHA, but it is not accessible outside Yandex.
|
||||||
# This is why we add this repository snapshot from CI to the performance test
|
# This is why we add this repository snapshot from CI to the performance test
|
||||||
# package.
|
# package.
|
||||||
mkdir /output/ch
|
mkdir "$PERF_OUTPUT"/ch
|
||||||
git -C /output/ch init --bare
|
git -C "$PERF_OUTPUT"/ch init --bare
|
||||||
git -C /output/ch remote add origin /build
|
git -C "$PERF_OUTPUT"/ch remote add origin /build
|
||||||
git -C /output/ch fetch --no-tags --depth 50 origin HEAD:pr
|
git -C "$PERF_OUTPUT"/ch fetch --no-tags --depth 50 origin HEAD:pr
|
||||||
git -C /output/ch fetch --no-tags --depth 50 origin master:master
|
git -C "$PERF_OUTPUT"/ch fetch --no-tags --depth 50 origin master:master
|
||||||
git -C /output/ch reset --soft pr
|
git -C "$PERF_OUTPUT"/ch reset --soft pr
|
||||||
git -C /output/ch log -5
|
git -C "$PERF_OUTPUT"/ch log -5
|
||||||
|
(
|
||||||
|
cd "$PERF_OUTPUT"/..
|
||||||
|
tar -cv -I pigz -f /output/performance.tgz output
|
||||||
|
)
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# May be set for split build or for performance test.
|
# May be set for split build or for performance test.
|
||||||
if [ "" != "$COMBINED_OUTPUT" ]
|
if [ "" != "$COMBINED_OUTPUT" ]
|
||||||
then
|
then
|
||||||
mkdir -p /output/config
|
prepare_combined_output /output
|
||||||
cp ../programs/server/config.xml /output/config
|
|
||||||
cp ../programs/server/users.xml /output/config
|
|
||||||
cp -r --dereference ../programs/server/config.d /output/config
|
|
||||||
tar -cv -I pigz -f "$COMBINED_OUTPUT.tgz" /output
|
tar -cv -I pigz -f "$COMBINED_OUTPUT.tgz" /output
|
||||||
rm -r /output/*
|
rm -r /output/*
|
||||||
mv "$COMBINED_OUTPUT.tgz" /output
|
mv "$COMBINED_OUTPUT.tgz" /output
|
||||||
@ -138,13 +159,6 @@ then
|
|||||||
mv "coverity-scan.tgz" /output
|
mv "coverity-scan.tgz" /output
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Also build fuzzers if any sanitizer specified
|
|
||||||
# if [ -n "$SANITIZER" ]
|
|
||||||
# then
|
|
||||||
# # Currently we are in build/build_docker directory
|
|
||||||
# ../docker/packager/other/fuzzer.sh
|
|
||||||
# fi
|
|
||||||
|
|
||||||
cache_status
|
cache_status
|
||||||
|
|
||||||
if [ "${CCACHE_DEBUG:-}" == "1" ]
|
if [ "${CCACHE_DEBUG:-}" == "1" ]
|
||||||
@ -159,3 +173,5 @@ then
|
|||||||
# files in place, and will fail because this directory is not writable.
|
# files in place, and will fail because this directory is not writable.
|
||||||
tar -cv -I pixz -f /output/ccache.log.txz "$CCACHE_LOGFILE"
|
tar -cv -I pixz -f /output/ccache.log.txz "$CCACHE_LOGFILE"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
ls -l /output
|
||||||
|
@ -1,81 +0,0 @@
|
|||||||
# rebuild in #33610
|
|
||||||
# docker build -t clickhouse/deb-builder .
|
|
||||||
FROM ubuntu:20.04
|
|
||||||
|
|
||||||
# ARG for quick switch to a given ubuntu mirror
|
|
||||||
ARG apt_archive="http://archive.ubuntu.com"
|
|
||||||
RUN sed -i "s|http://archive.ubuntu.com|$apt_archive|g" /etc/apt/sources.list
|
|
||||||
|
|
||||||
ENV DEBIAN_FRONTEND=noninteractive LLVM_VERSION=13
|
|
||||||
|
|
||||||
RUN apt-get update \
|
|
||||||
&& apt-get install ca-certificates lsb-release wget gnupg apt-transport-https \
|
|
||||||
--yes --no-install-recommends --verbose-versions \
|
|
||||||
&& export LLVM_PUBKEY_HASH="bda960a8da687a275a2078d43c111d66b1c6a893a3275271beedf266c1ff4a0cdecb429c7a5cccf9f486ea7aa43fd27f" \
|
|
||||||
&& wget -nv -O /tmp/llvm-snapshot.gpg.key https://apt.llvm.org/llvm-snapshot.gpg.key \
|
|
||||||
&& echo "${LLVM_PUBKEY_HASH} /tmp/llvm-snapshot.gpg.key" | sha384sum -c \
|
|
||||||
&& apt-key add /tmp/llvm-snapshot.gpg.key \
|
|
||||||
&& export CODENAME="$(lsb_release --codename --short | tr 'A-Z' 'a-z')" \
|
|
||||||
&& echo "deb [trusted=yes] http://apt.llvm.org/${CODENAME}/ llvm-toolchain-${CODENAME}-${LLVM_VERSION} main" >> \
|
|
||||||
/etc/apt/sources.list
|
|
||||||
|
|
||||||
# initial packages
|
|
||||||
RUN apt-get update \
|
|
||||||
&& apt-get install \
|
|
||||||
bash \
|
|
||||||
fakeroot \
|
|
||||||
ccache \
|
|
||||||
curl \
|
|
||||||
software-properties-common \
|
|
||||||
--yes --no-install-recommends
|
|
||||||
|
|
||||||
# Architecture of the image when BuildKit/buildx is used
|
|
||||||
ARG TARGETARCH
|
|
||||||
|
|
||||||
# Special dpkg-deb (https://github.com/ClickHouse-Extras/dpkg) version which is able
|
|
||||||
# to compress files using pigz (https://zlib.net/pigz/) instead of gzip.
|
|
||||||
# Significantly increase deb packaging speed and compatible with old systems
|
|
||||||
RUN arch=${TARGETARCH:-amd64} \
|
|
||||||
&& curl -Lo /usr/bin/dpkg-deb https://github.com/ClickHouse-Extras/dpkg/releases/download/1.21.1-clickhouse/dpkg-deb-${arch}
|
|
||||||
|
|
||||||
RUN apt-get update \
|
|
||||||
&& apt-get install \
|
|
||||||
alien \
|
|
||||||
clang-${LLVM_VERSION} \
|
|
||||||
clang-tidy-${LLVM_VERSION} \
|
|
||||||
cmake \
|
|
||||||
debhelper \
|
|
||||||
devscripts \
|
|
||||||
gdb \
|
|
||||||
git \
|
|
||||||
gperf \
|
|
||||||
lld-${LLVM_VERSION} \
|
|
||||||
llvm-${LLVM_VERSION} \
|
|
||||||
llvm-${LLVM_VERSION}-dev \
|
|
||||||
moreutils \
|
|
||||||
ninja-build \
|
|
||||||
perl \
|
|
||||||
pigz \
|
|
||||||
pixz \
|
|
||||||
pkg-config \
|
|
||||||
tzdata \
|
|
||||||
--yes --no-install-recommends
|
|
||||||
|
|
||||||
# NOTE: Seems like gcc-11 is too new for ubuntu20 repository
|
|
||||||
RUN add-apt-repository ppa:ubuntu-toolchain-r/test --yes \
|
|
||||||
&& apt-get update \
|
|
||||||
&& apt-get install gcc-11 g++-11 --yes
|
|
||||||
|
|
||||||
|
|
||||||
# These symlinks are required:
|
|
||||||
# /usr/bin/ld.lld: by gcc to find lld compiler
|
|
||||||
# /usr/bin/aarch64-linux-gnu-obj*: for debug symbols stripping
|
|
||||||
RUN ln -s /usr/bin/lld-${LLVM_VERSION} /usr/bin/ld.lld \
|
|
||||||
&& ln -sf /usr/lib/llvm-${LLVM_VERSION}/bin/llvm-objcopy /usr/bin/aarch64-linux-gnu-strip \
|
|
||||||
&& ln -sf /usr/lib/llvm-${LLVM_VERSION}/bin/llvm-objcopy /usr/bin/aarch64-linux-gnu-objcopy \
|
|
||||||
&& ln -sf /usr/lib/llvm-${LLVM_VERSION}/bin/llvm-objdump /usr/bin/aarch64-linux-gnu-objdump
|
|
||||||
|
|
||||||
|
|
||||||
COPY build.sh /
|
|
||||||
|
|
||||||
CMD ["/bin/bash", "/build.sh"]
|
|
@ -1,58 +0,0 @@
|
|||||||
#!/usr/bin/env bash
|
|
||||||
|
|
||||||
set -x -e
|
|
||||||
|
|
||||||
# Uncomment to debug ccache.
|
|
||||||
# export CCACHE_LOGFILE=/build/ccache.log
|
|
||||||
# export CCACHE_DEBUG=1
|
|
||||||
|
|
||||||
ccache --show-config ||:
|
|
||||||
ccache --show-stats ||:
|
|
||||||
ccache --zero-stats ||:
|
|
||||||
|
|
||||||
read -ra ALIEN_PKGS <<< "${ALIEN_PKGS:-}"
|
|
||||||
build/release "${ALIEN_PKGS[@]}" | ts '%Y-%m-%d %H:%M:%S'
|
|
||||||
mv /*.deb /output
|
|
||||||
mv -- *.changes /output
|
|
||||||
mv -- *.buildinfo /output
|
|
||||||
mv /*.rpm /output ||: # if exists
|
|
||||||
mv /*.tgz /output ||: # if exists
|
|
||||||
|
|
||||||
if [ -n "$BINARY_OUTPUT" ] && { [ "$BINARY_OUTPUT" = "programs" ] || [ "$BINARY_OUTPUT" = "tests" ] ;}
|
|
||||||
then
|
|
||||||
echo "Place $BINARY_OUTPUT to output"
|
|
||||||
mkdir /output/binary ||: # if exists
|
|
||||||
mv /build/obj-*/programs/clickhouse* /output/binary
|
|
||||||
|
|
||||||
if [ "$BINARY_OUTPUT" = "tests" ]
|
|
||||||
then
|
|
||||||
mv /build/obj-*/src/unit_tests_dbms /output/binary
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Also build fuzzers if any sanitizer specified
|
|
||||||
# if [ -n "$SANITIZER" ]
|
|
||||||
# then
|
|
||||||
# # Script is supposed that we are in build directory.
|
|
||||||
# mkdir -p build/build_docker
|
|
||||||
# cd build/build_docker
|
|
||||||
# # Launching build script
|
|
||||||
# ../docker/packager/other/fuzzer.sh
|
|
||||||
# cd
|
|
||||||
# fi
|
|
||||||
|
|
||||||
ccache --show-config ||:
|
|
||||||
ccache --show-stats ||:
|
|
||||||
|
|
||||||
if [ "${CCACHE_DEBUG:-}" == "1" ]
|
|
||||||
then
|
|
||||||
find /build -name '*.ccache-*' -print0 \
|
|
||||||
| tar -c -I pixz -f /output/ccache-debug.txz --null -T -
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ -n "$CCACHE_LOGFILE" ]
|
|
||||||
then
|
|
||||||
# Compress the log as well, or else the CI will try to compress all log
|
|
||||||
# files in place, and will fail because this directory is not writable.
|
|
||||||
tar -cv -I pixz -f /output/ccache.log.txz "$CCACHE_LOGFILE"
|
|
||||||
fi
|
|
@ -1,36 +0,0 @@
|
|||||||
#!/usr/bin/env bash
|
|
||||||
|
|
||||||
# This script is responsible for building all fuzzers, and copy them to output directory
|
|
||||||
# as an archive.
|
|
||||||
# Script is supposed that we are in build directory.
|
|
||||||
|
|
||||||
set -x -e
|
|
||||||
|
|
||||||
printenv
|
|
||||||
|
|
||||||
# Delete previous cache, because we add a new flags -DENABLE_FUZZING=1 and -DFUZZER=libfuzzer
|
|
||||||
rm -f CMakeCache.txt
|
|
||||||
read -ra CMAKE_FLAGS <<< "${CMAKE_FLAGS:-}"
|
|
||||||
# Hope, that the most part of files will be in cache, so we just link new executables
|
|
||||||
# Please, add or change flags directly in cmake
|
|
||||||
cmake --debug-trycompile --verbose=1 -DCMAKE_VERBOSE_MAKEFILE=1 -LA -DCMAKE_C_COMPILER="$CC" -DCMAKE_CXX_COMPILER="$CXX" \
|
|
||||||
-DSANITIZE="$SANITIZER" -DENABLE_FUZZING=1 -DFUZZER='libfuzzer' -DENABLE_PROTOBUF=1 "${CMAKE_FLAGS[@]}" ..
|
|
||||||
|
|
||||||
FUZZER_TARGETS=$(find ../src -name '*_fuzzer.cpp' -execdir basename {} .cpp ';' | tr '\n' ' ')
|
|
||||||
|
|
||||||
NUM_JOBS=$(($(nproc || grep -c ^processor /proc/cpuinfo)))
|
|
||||||
|
|
||||||
mkdir -p /output/fuzzers
|
|
||||||
for FUZZER_TARGET in $FUZZER_TARGETS
|
|
||||||
do
|
|
||||||
# shellcheck disable=SC2086 # No quotes because I want it to expand to nothing if empty.
|
|
||||||
ninja $NINJA_FLAGS $FUZZER_TARGET -j $NUM_JOBS
|
|
||||||
# Find this binary in build directory and strip it
|
|
||||||
FUZZER_PATH=$(find ./src -name "$FUZZER_TARGET")
|
|
||||||
strip --strip-unneeded "$FUZZER_PATH"
|
|
||||||
mv "$FUZZER_PATH" /output/fuzzers
|
|
||||||
done
|
|
||||||
|
|
||||||
|
|
||||||
tar -zcvf /output/fuzzers.tar.gz /output/fuzzers
|
|
||||||
rm -rf /output/fuzzers
|
|
@ -5,8 +5,10 @@ import os
|
|||||||
import argparse
|
import argparse
|
||||||
import logging
|
import logging
|
||||||
import sys
|
import sys
|
||||||
|
from typing import List
|
||||||
|
|
||||||
SCRIPT_PATH = os.path.realpath(__file__)
|
SCRIPT_PATH = os.path.realpath(__file__)
|
||||||
|
IMAGE_TYPE = "binary"
|
||||||
|
|
||||||
|
|
||||||
def check_image_exists_locally(image_name):
|
def check_image_exists_locally(image_name):
|
||||||
@ -38,8 +40,40 @@ def build_image(image_name, filepath):
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def pre_build(repo_path: str, env_variables: List[str]):
|
||||||
|
if "WITH_PERFORMANCE=1" in env_variables:
|
||||||
|
current_branch = subprocess.check_output(
|
||||||
|
"git branch --show-current", shell=True, encoding="utf-8"
|
||||||
|
).strip()
|
||||||
|
is_shallow = (
|
||||||
|
subprocess.check_output(
|
||||||
|
"git rev-parse --is-shallow-repository", shell=True, encoding="utf-8"
|
||||||
|
)
|
||||||
|
== "true\n"
|
||||||
|
)
|
||||||
|
if is_shallow:
|
||||||
|
# I've spent quite some time on looking around the problem, and my
|
||||||
|
# conclusion is: in the current state the easiest way to go is to force
|
||||||
|
# unshallow repository for performance artifacts.
|
||||||
|
# To change it we need to rework our performance tests docker image
|
||||||
|
raise Exception("shallow repository is not suitable for performance builds")
|
||||||
|
if current_branch != "master":
|
||||||
|
cmd = (
|
||||||
|
f"git -C {repo_path} fetch --no-recurse-submodules "
|
||||||
|
"--no-tags origin master:master"
|
||||||
|
)
|
||||||
|
logging.info("Getting master branch for performance artifact: ''%s'", cmd)
|
||||||
|
subprocess.check_call(cmd, shell=True)
|
||||||
|
|
||||||
|
|
||||||
def run_docker_image_with_env(
|
def run_docker_image_with_env(
|
||||||
image_name, output, env_variables, ch_root, ccache_dir, docker_image_version
|
image_name,
|
||||||
|
as_root,
|
||||||
|
output,
|
||||||
|
env_variables,
|
||||||
|
ch_root,
|
||||||
|
ccache_dir,
|
||||||
|
docker_image_version,
|
||||||
):
|
):
|
||||||
env_part = " -e ".join(env_variables)
|
env_part = " -e ".join(env_variables)
|
||||||
if env_part:
|
if env_part:
|
||||||
@ -50,8 +84,13 @@ def run_docker_image_with_env(
|
|||||||
else:
|
else:
|
||||||
interactive = ""
|
interactive = ""
|
||||||
|
|
||||||
|
if as_root:
|
||||||
|
user = "0:0"
|
||||||
|
else:
|
||||||
|
user = f"{os.geteuid()}:{os.getegid()}"
|
||||||
|
|
||||||
cmd = (
|
cmd = (
|
||||||
f"docker run --network=host --rm --volume={output}:/output "
|
f"docker run --network=host --user={user} --rm --volume={output}:/output "
|
||||||
f"--volume={ch_root}:/build --volume={ccache_dir}:/ccache {env_part} "
|
f"--volume={ch_root}:/build --volume={ccache_dir}:/ccache {env_part} "
|
||||||
f"{interactive} {image_name}:{docker_image_version}"
|
f"{interactive} {image_name}:{docker_image_version}"
|
||||||
)
|
)
|
||||||
@ -75,7 +114,6 @@ def parse_env_variables(
|
|||||||
compiler,
|
compiler,
|
||||||
sanitizer,
|
sanitizer,
|
||||||
package_type,
|
package_type,
|
||||||
image_type,
|
|
||||||
cache,
|
cache,
|
||||||
distcc_hosts,
|
distcc_hosts,
|
||||||
split_binary,
|
split_binary,
|
||||||
@ -153,7 +191,7 @@ def parse_env_variables(
|
|||||||
|
|
||||||
cxx = cc.replace("gcc", "g++").replace("clang", "clang++")
|
cxx = cc.replace("gcc", "g++").replace("clang", "clang++")
|
||||||
|
|
||||||
if image_type == "deb":
|
if package_type == "deb":
|
||||||
result.append("MAKE_DEB=true")
|
result.append("MAKE_DEB=true")
|
||||||
cmake_flags.append("-DENABLE_TESTS=0")
|
cmake_flags.append("-DENABLE_TESTS=0")
|
||||||
cmake_flags.append("-DENABLE_UTILS=0")
|
cmake_flags.append("-DENABLE_UTILS=0")
|
||||||
@ -165,6 +203,7 @@ def parse_env_variables(
|
|||||||
cmake_flags.append("-DCMAKE_INSTALL_LOCALSTATEDIR=/var")
|
cmake_flags.append("-DCMAKE_INSTALL_LOCALSTATEDIR=/var")
|
||||||
if is_release_build(build_type, package_type, sanitizer, split_binary):
|
if is_release_build(build_type, package_type, sanitizer, split_binary):
|
||||||
cmake_flags.append("-DINSTALL_STRIPPED_BINARIES=ON")
|
cmake_flags.append("-DINSTALL_STRIPPED_BINARIES=ON")
|
||||||
|
result.append("WITH_PERFORMANCE=1")
|
||||||
if is_cross_arm:
|
if is_cross_arm:
|
||||||
cmake_flags.append("-DBUILD_STANDALONE_KEEPER=1")
|
cmake_flags.append("-DBUILD_STANDALONE_KEEPER=1")
|
||||||
else:
|
else:
|
||||||
@ -176,10 +215,7 @@ def parse_env_variables(
|
|||||||
cmake_flags.append(f"-DCMAKE_CXX_COMPILER={cxx}")
|
cmake_flags.append(f"-DCMAKE_CXX_COMPILER={cxx}")
|
||||||
|
|
||||||
# Create combined output archive for split build and for performance tests.
|
# Create combined output archive for split build and for performance tests.
|
||||||
if package_type == "performance":
|
if package_type == "coverity":
|
||||||
result.append("COMBINED_OUTPUT=performance")
|
|
||||||
cmake_flags.append("-DENABLE_TESTS=0")
|
|
||||||
elif package_type == "coverity":
|
|
||||||
result.append("COMBINED_OUTPUT=coverity")
|
result.append("COMBINED_OUTPUT=coverity")
|
||||||
result.append('COVERITY_TOKEN="$COVERITY_TOKEN"')
|
result.append('COVERITY_TOKEN="$COVERITY_TOKEN"')
|
||||||
elif split_binary:
|
elif split_binary:
|
||||||
@ -258,27 +294,30 @@ def parse_env_variables(
|
|||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
def dir_name(name: str) -> str:
|
||||||
|
if not os.path.isabs(name):
|
||||||
|
name = os.path.abspath(os.path.join(os.getcwd(), name))
|
||||||
|
return name
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
logging.basicConfig(level=logging.INFO, format="%(asctime)s %(message)s")
|
logging.basicConfig(level=logging.INFO, format="%(asctime)s %(message)s")
|
||||||
parser = argparse.ArgumentParser(
|
parser = argparse.ArgumentParser(
|
||||||
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
|
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
|
||||||
description="ClickHouse building script using prebuilt Docker image",
|
description="ClickHouse building script using prebuilt Docker image",
|
||||||
)
|
)
|
||||||
# 'performance' creates a combined .tgz with server
|
|
||||||
# and configs to be used for performance test.
|
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--package-type",
|
"--package-type",
|
||||||
choices=["deb", "binary", "performance", "coverity"],
|
choices=["deb", "binary", "coverity"],
|
||||||
required=True,
|
required=True,
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--clickhouse-repo-path",
|
"--clickhouse-repo-path",
|
||||||
default=os.path.join(
|
default=os.path.join(os.path.dirname(SCRIPT_PATH), os.pardir, os.pardir),
|
||||||
os.path.dirname(os.path.abspath(__file__)), os.pardir, os.pardir
|
type=dir_name,
|
||||||
),
|
|
||||||
help="ClickHouse git repository",
|
help="ClickHouse git repository",
|
||||||
)
|
)
|
||||||
parser.add_argument("--output-dir", required=True)
|
parser.add_argument("--output-dir", type=dir_name, required=True)
|
||||||
parser.add_argument("--build-type", choices=("debug", ""), default="")
|
parser.add_argument("--build-type", choices=("debug", ""), default="")
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--compiler",
|
"--compiler",
|
||||||
@ -315,6 +354,7 @@ if __name__ == "__main__":
|
|||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--ccache_dir",
|
"--ccache_dir",
|
||||||
default=os.getenv("HOME", "") + "/.ccache",
|
default=os.getenv("HOME", "") + "/.ccache",
|
||||||
|
type=dir_name,
|
||||||
help="a directory with ccache",
|
help="a directory with ccache",
|
||||||
)
|
)
|
||||||
parser.add_argument("--distcc-hosts", nargs="+")
|
parser.add_argument("--distcc-hosts", nargs="+")
|
||||||
@ -330,39 +370,28 @@ if __name__ == "__main__":
|
|||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--docker-image-version", default="latest", help="docker image tag to use"
|
"--docker-image-version", default="latest", help="docker image tag to use"
|
||||||
)
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--as-root", action="store_true", help="if the container should run as root"
|
||||||
|
)
|
||||||
|
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
if not os.path.isabs(args.output_dir):
|
|
||||||
args.output_dir = os.path.abspath(os.path.join(os.getcwd(), args.output_dir))
|
|
||||||
|
|
||||||
image_type = (
|
image_name = f"clickhouse/{IMAGE_TYPE}-builder"
|
||||||
"binary"
|
|
||||||
if args.package_type in ("performance", "coverity")
|
|
||||||
else args.package_type
|
|
||||||
)
|
|
||||||
image_name = "clickhouse/binary-builder"
|
|
||||||
|
|
||||||
if not os.path.isabs(args.clickhouse_repo_path):
|
ch_root = args.clickhouse_repo_path
|
||||||
ch_root = os.path.abspath(os.path.join(os.getcwd(), args.clickhouse_repo_path))
|
|
||||||
else:
|
|
||||||
ch_root = args.clickhouse_repo_path
|
|
||||||
|
|
||||||
if args.additional_pkgs and image_type != "deb":
|
if args.additional_pkgs and args.package_type != "deb":
|
||||||
raise Exception("Can build additional packages only in deb build")
|
raise Exception("Can build additional packages only in deb build")
|
||||||
|
|
||||||
if args.with_binaries != "" and image_type != "deb":
|
if args.with_binaries != "" and args.package_type != "deb":
|
||||||
raise Exception("Can add additional binaries only in deb build")
|
raise Exception("Can add additional binaries only in deb build")
|
||||||
|
|
||||||
if args.with_binaries != "" and image_type == "deb":
|
if args.with_binaries != "" and args.package_type == "deb":
|
||||||
logging.info("Should place %s to output", args.with_binaries)
|
logging.info("Should place %s to output", args.with_binaries)
|
||||||
|
|
||||||
dockerfile = os.path.join(ch_root, "docker/packager", image_type, "Dockerfile")
|
dockerfile = os.path.join(ch_root, "docker/packager", IMAGE_TYPE, "Dockerfile")
|
||||||
image_with_version = image_name + ":" + args.docker_image_version
|
image_with_version = image_name + ":" + args.docker_image_version
|
||||||
if (
|
if not check_image_exists_locally(image_name) or args.force_build_image:
|
||||||
image_type != "freebsd"
|
|
||||||
and not check_image_exists_locally(image_name)
|
|
||||||
or args.force_build_image
|
|
||||||
):
|
|
||||||
if not pull_image(image_with_version) or args.force_build_image:
|
if not pull_image(image_with_version) or args.force_build_image:
|
||||||
build_image(image_with_version, dockerfile)
|
build_image(image_with_version, dockerfile)
|
||||||
env_prepared = parse_env_variables(
|
env_prepared = parse_env_variables(
|
||||||
@ -370,7 +399,6 @@ if __name__ == "__main__":
|
|||||||
args.compiler,
|
args.compiler,
|
||||||
args.sanitizer,
|
args.sanitizer,
|
||||||
args.package_type,
|
args.package_type,
|
||||||
image_type,
|
|
||||||
args.cache,
|
args.cache,
|
||||||
args.distcc_hosts,
|
args.distcc_hosts,
|
||||||
args.split_binary,
|
args.split_binary,
|
||||||
@ -383,8 +411,10 @@ if __name__ == "__main__":
|
|||||||
args.with_binaries,
|
args.with_binaries,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
pre_build(args.clickhouse_repo_path, env_prepared)
|
||||||
run_docker_image_with_env(
|
run_docker_image_with_env(
|
||||||
image_name,
|
image_name,
|
||||||
|
args.as_root,
|
||||||
args.output_dir,
|
args.output_dir,
|
||||||
env_prepared,
|
env_prepared,
|
||||||
ch_root,
|
ch_root,
|
||||||
|
@ -177,6 +177,8 @@ function clone_submodules
|
|||||||
contrib/jemalloc
|
contrib/jemalloc
|
||||||
contrib/replxx
|
contrib/replxx
|
||||||
contrib/wyhash
|
contrib/wyhash
|
||||||
|
contrib/eigen
|
||||||
|
contrib/hashidsxx
|
||||||
)
|
)
|
||||||
|
|
||||||
git submodule sync
|
git submodule sync
|
||||||
|
@ -88,7 +88,8 @@ RUN python3 -m pip install \
|
|||||||
urllib3 \
|
urllib3 \
|
||||||
requests-kerberos \
|
requests-kerberos \
|
||||||
pyhdfs \
|
pyhdfs \
|
||||||
azure-storage-blob
|
azure-storage-blob \
|
||||||
|
meilisearch
|
||||||
|
|
||||||
COPY modprobe.sh /usr/local/bin/modprobe
|
COPY modprobe.sh /usr/local/bin/modprobe
|
||||||
COPY dockerd-entrypoint.sh /usr/local/bin/
|
COPY dockerd-entrypoint.sh /usr/local/bin/
|
||||||
|
@ -0,0 +1,16 @@
|
|||||||
|
version: '2.3'
|
||||||
|
services:
|
||||||
|
meili1:
|
||||||
|
image: getmeili/meilisearch:latest
|
||||||
|
restart: always
|
||||||
|
ports:
|
||||||
|
- ${MEILI_EXTERNAL_PORT}:${MEILI_INTERNAL_PORT}
|
||||||
|
|
||||||
|
meili_secure:
|
||||||
|
image: getmeili/meilisearch:latest
|
||||||
|
restart: always
|
||||||
|
ports:
|
||||||
|
- ${MEILI_SECURE_EXTERNAL_PORT}:${MEILI_SECURE_INTERNAL_PORT}
|
||||||
|
environment:
|
||||||
|
MEILI_MASTER_KEY: "password"
|
||||||
|
|
@ -3,6 +3,7 @@ set -ex
|
|||||||
set -o pipefail
|
set -o pipefail
|
||||||
trap "exit" INT TERM
|
trap "exit" INT TERM
|
||||||
trap 'kill $(jobs -pr) ||:' EXIT
|
trap 'kill $(jobs -pr) ||:' EXIT
|
||||||
|
BUILD_NAME=${BUILD_NAME:-package_release}
|
||||||
|
|
||||||
mkdir db0 ||:
|
mkdir db0 ||:
|
||||||
mkdir left ||:
|
mkdir left ||:
|
||||||
@ -26,7 +27,10 @@ function download
|
|||||||
{
|
{
|
||||||
# Historically there were various paths for the performance test package.
|
# Historically there were various paths for the performance test package.
|
||||||
# Test all of them.
|
# Test all of them.
|
||||||
declare -a urls_to_try=("https://s3.amazonaws.com/clickhouse-builds/$left_pr/$left_sha/performance/performance.tgz")
|
declare -a urls_to_try=(
|
||||||
|
"https://s3.amazonaws.com/clickhouse-builds/$left_pr/$left_sha/$BUILD_NAME/performance.tgz"
|
||||||
|
"https://s3.amazonaws.com/clickhouse-builds/$left_pr/$left_sha/performance/performance.tgz"
|
||||||
|
)
|
||||||
|
|
||||||
for path in "${urls_to_try[@]}"
|
for path in "${urls_to_try[@]}"
|
||||||
do
|
do
|
||||||
@ -41,7 +45,7 @@ function download
|
|||||||
# download anything, for example in some manual runs. In this case, SHAs are not set.
|
# download anything, for example in some manual runs. In this case, SHAs are not set.
|
||||||
if ! [ "$left_sha" = "$right_sha" ]
|
if ! [ "$left_sha" = "$right_sha" ]
|
||||||
then
|
then
|
||||||
wget -nv -nd -c "$left_path" -O- | tar -C left --strip-components=1 -zxv &
|
wget -nv -nd -c "$left_path" -O- | tar -C left --no-same-owner --strip-components=1 -zxv &
|
||||||
elif [ "$right_sha" != "" ]
|
elif [ "$right_sha" != "" ]
|
||||||
then
|
then
|
||||||
mkdir left ||:
|
mkdir left ||:
|
||||||
|
@ -5,6 +5,7 @@ CHPC_CHECK_START_TIMESTAMP="$(date +%s)"
|
|||||||
export CHPC_CHECK_START_TIMESTAMP
|
export CHPC_CHECK_START_TIMESTAMP
|
||||||
|
|
||||||
S3_URL=${S3_URL:="https://clickhouse-builds.s3.amazonaws.com"}
|
S3_URL=${S3_URL:="https://clickhouse-builds.s3.amazonaws.com"}
|
||||||
|
BUILD_NAME=${BUILD_NAME:-package_release}
|
||||||
|
|
||||||
COMMON_BUILD_PREFIX="/clickhouse_build_check"
|
COMMON_BUILD_PREFIX="/clickhouse_build_check"
|
||||||
if [[ $S3_URL == *"s3.amazonaws.com"* ]]; then
|
if [[ $S3_URL == *"s3.amazonaws.com"* ]]; then
|
||||||
@ -64,7 +65,12 @@ function find_reference_sha
|
|||||||
# Historically there were various path for the performance test package,
|
# Historically there were various path for the performance test package,
|
||||||
# test all of them.
|
# test all of them.
|
||||||
unset found
|
unset found
|
||||||
declare -a urls_to_try=("https://s3.amazonaws.com/clickhouse-builds/0/$REF_SHA/performance/performance.tgz")
|
declare -a urls_to_try=(
|
||||||
|
"https://s3.amazonaws.com/clickhouse-builds/0/$REF_SHA/$BUILD_NAME/performance.tgz"
|
||||||
|
# FIXME: the following link is left there for backward compatibility.
|
||||||
|
# We should remove it after 2022-11-01
|
||||||
|
"https://s3.amazonaws.com/clickhouse-builds/0/$REF_SHA/performance/performance.tgz"
|
||||||
|
)
|
||||||
for path in "${urls_to_try[@]}"
|
for path in "${urls_to_try[@]}"
|
||||||
do
|
do
|
||||||
if curl_with_retry "$path"
|
if curl_with_retry "$path"
|
||||||
@ -88,13 +94,13 @@ chmod 777 workspace output
|
|||||||
cd workspace
|
cd workspace
|
||||||
|
|
||||||
# Download the package for the version we are going to test.
|
# Download the package for the version we are going to test.
|
||||||
if curl_with_retry "$S3_URL/$PR_TO_TEST/$SHA_TO_TEST$COMMON_BUILD_PREFIX/performance/performance.tgz"
|
if curl_with_retry "$S3_URL/$PR_TO_TEST/$SHA_TO_TEST$COMMON_BUILD_PREFIX/$BUILD_NAME/performance.tgz"
|
||||||
then
|
then
|
||||||
right_path="$S3_URL/$PR_TO_TEST/$SHA_TO_TEST$COMMON_BUILD_PREFIX/performance/performance.tgz"
|
right_path="$S3_URL/$PR_TO_TEST/$SHA_TO_TEST$COMMON_BUILD_PREFIX/$BUILD_NAME/performance.tgz"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
mkdir right
|
mkdir right
|
||||||
wget -nv -nd -c "$right_path" -O- | tar -C right --strip-components=1 -zxv
|
wget -nv -nd -c "$right_path" -O- | tar -C right --no-same-owner --strip-components=1 -zxv
|
||||||
|
|
||||||
# Find reference revision if not specified explicitly
|
# Find reference revision if not specified explicitly
|
||||||
if [ "$REF_SHA" == "" ]; then find_reference_sha; fi
|
if [ "$REF_SHA" == "" ]; then find_reference_sha; fi
|
||||||
@ -155,7 +161,7 @@ ulimit -c unlimited
|
|||||||
cat /proc/sys/kernel/core_pattern
|
cat /proc/sys/kernel/core_pattern
|
||||||
|
|
||||||
# Start the main comparison script.
|
# Start the main comparison script.
|
||||||
{ \
|
{
|
||||||
time ../download.sh "$REF_PR" "$REF_SHA" "$PR_TO_TEST" "$SHA_TO_TEST" && \
|
time ../download.sh "$REF_PR" "$REF_SHA" "$PR_TO_TEST" "$SHA_TO_TEST" && \
|
||||||
time stage=configure "$script_path"/compare.sh ; \
|
time stage=configure "$script_path"/compare.sh ; \
|
||||||
} 2>&1 | ts "$(printf '%%Y-%%m-%%d %%H:%%M:%%S\t')" | tee compare.log
|
} 2>&1 | ts "$(printf '%%Y-%%m-%%d %%H:%%M:%%S\t')" | tee compare.log
|
||||||
@ -178,4 +184,6 @@ ls -lath
|
|||||||
report analyze benchmark metrics \
|
report analyze benchmark metrics \
|
||||||
./*.core.dmp ./*.core
|
./*.core.dmp ./*.core
|
||||||
|
|
||||||
cp compare.log /output
|
# If the files aren't same, copy it
|
||||||
|
cmp --silent compare.log /output/compare.log || \
|
||||||
|
cp compare.log /output
|
||||||
|
@ -9,11 +9,6 @@ cmake .. \
|
|||||||
-DCMAKE_C_COMPILER=$(which clang-13) \
|
-DCMAKE_C_COMPILER=$(which clang-13) \
|
||||||
-DCMAKE_CXX_COMPILER=$(which clang++-13) \
|
-DCMAKE_CXX_COMPILER=$(which clang++-13) \
|
||||||
-DCMAKE_BUILD_TYPE=Debug \
|
-DCMAKE_BUILD_TYPE=Debug \
|
||||||
-DENABLE_CLICKHOUSE_ALL=OFF \
|
|
||||||
-DENABLE_CLICKHOUSE_SERVER=ON \
|
|
||||||
-DENABLE_CLICKHOUSE_CLIENT=ON \
|
|
||||||
-DENABLE_LIBRARIES=OFF \
|
|
||||||
-DUSE_UNWIND=ON \
|
|
||||||
-DENABLE_UTILS=OFF \
|
-DENABLE_UTILS=OFF \
|
||||||
-DENABLE_TESTS=OFF
|
-DENABLE_TESTS=OFF
|
||||||
```
|
```
|
||||||
|
@ -13,7 +13,7 @@ then
|
|||||||
elif [ "${ARCH}" = "aarch64" ]
|
elif [ "${ARCH}" = "aarch64" ]
|
||||||
then
|
then
|
||||||
DIR="aarch64"
|
DIR="aarch64"
|
||||||
elif [ "${ARCH}" = "powerpc64le" ]
|
elif [ "${ARCH}" = "powerpc64le" ] || [ "${ARCH}" = "ppc64le" ]
|
||||||
then
|
then
|
||||||
DIR="powerpc64le"
|
DIR="powerpc64le"
|
||||||
fi
|
fi
|
||||||
@ -25,7 +25,7 @@ then
|
|||||||
elif [ "${ARCH}" = "aarch64" ]
|
elif [ "${ARCH}" = "aarch64" ]
|
||||||
then
|
then
|
||||||
DIR="freebsd-aarch64"
|
DIR="freebsd-aarch64"
|
||||||
elif [ "${ARCH}" = "powerpc64le" ]
|
elif [ "${ARCH}" = "powerpc64le" ] || [ "${ARCH}" = "ppc64le" ]
|
||||||
then
|
then
|
||||||
DIR="freebsd-powerpc64le"
|
DIR="freebsd-powerpc64le"
|
||||||
fi
|
fi
|
||||||
|
@ -178,6 +178,19 @@
|
|||||||
* Fix segfault in Avro that appears after the second insert into file. [#33566](https://github.com/ClickHouse/ClickHouse/pull/33566) ([Kruglov Pavel](https://github.com/Avogar)).
|
* Fix segfault in Avro that appears after the second insert into file. [#33566](https://github.com/ClickHouse/ClickHouse/pull/33566) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||||
* Fix wrong database for JOIN w/o explicit database in distributed queries (Fixes: [#10471](https://github.com/ClickHouse/ClickHouse/issues/10471)). [#33611](https://github.com/ClickHouse/ClickHouse/pull/33611) ([Azat Khuzhin](https://github.com/azat)).
|
* Fix wrong database for JOIN w/o explicit database in distributed queries (Fixes: [#10471](https://github.com/ClickHouse/ClickHouse/issues/10471)). [#33611](https://github.com/ClickHouse/ClickHouse/pull/33611) ([Azat Khuzhin](https://github.com/azat)).
|
||||||
|
|
||||||
|
#### Bug Fix (user-visible misbehaviour in official stable or prestable release):
|
||||||
|
|
||||||
|
* Fix possible crash (or incorrect result) in case of `LowCardinality` arguments of window function. Fixes [#31114](https://github.com/ClickHouse/ClickHouse/issues/31114). [#31888](https://github.com/ClickHouse/ClickHouse/pull/31888) ([Nikolai Kochetov](https://github.com/KochetovNicolai)).
|
||||||
|
|
||||||
|
#### Bug Fix (v21.9.4.35-stable)
|
||||||
|
|
||||||
|
* Fix [#32964](https://github.com/ClickHouse/ClickHouse/issues/32964). [#32965](https://github.com/ClickHouse/ClickHouse/pull/32965) ([save-my-heart](https://github.com/save-my-heart)).
|
||||||
|
|
||||||
|
#### NO CL CATEGORY
|
||||||
|
|
||||||
|
* Fix Regular Expression while key path search. [#33023](https://github.com/ClickHouse/ClickHouse/pull/33023) ([mreddy017](https://github.com/mreddy017)).
|
||||||
|
* - Allow to split GraphiteMergeTree rollup rules for plain/tagged metrics (optional rule_type field). [#33494](https://github.com/ClickHouse/ClickHouse/pull/33494) ([Michail Safronov](https://github.com/msaf1980)).
|
||||||
|
|
||||||
#### NO CL ENTRY
|
#### NO CL ENTRY
|
||||||
|
|
||||||
* NO CL ENTRY: 'Update CHANGELOG.md'. [#32472](https://github.com/ClickHouse/ClickHouse/pull/32472) ([Rich Raposa](https://github.com/rfraposa)).
|
* NO CL ENTRY: 'Update CHANGELOG.md'. [#32472](https://github.com/ClickHouse/ClickHouse/pull/32472) ([Rich Raposa](https://github.com/rfraposa)).
|
||||||
@ -198,19 +211,6 @@
|
|||||||
* NO CL ENTRY: 'Added Superwall to adopters list'. [#33573](https://github.com/ClickHouse/ClickHouse/pull/33573) ([Justin Hilliard](https://github.com/jahilliard)).
|
* NO CL ENTRY: 'Added Superwall to adopters list'. [#33573](https://github.com/ClickHouse/ClickHouse/pull/33573) ([Justin Hilliard](https://github.com/jahilliard)).
|
||||||
* NO CL ENTRY: 'Revert "Ignore parse failure of opentelemetry header"'. [#33594](https://github.com/ClickHouse/ClickHouse/pull/33594) ([Nikita Mikhaylov](https://github.com/nikitamikhaylov)).
|
* NO CL ENTRY: 'Revert "Ignore parse failure of opentelemetry header"'. [#33594](https://github.com/ClickHouse/ClickHouse/pull/33594) ([Nikita Mikhaylov](https://github.com/nikitamikhaylov)).
|
||||||
|
|
||||||
#### Bug Fix (user-visible misbehaviour in official stable or prestable release):
|
|
||||||
|
|
||||||
* Fix possible crash (or incorrect result) in case of `LowCardinality` arguments of window function. Fixes [#31114](https://github.com/ClickHouse/ClickHouse/issues/31114). [#31888](https://github.com/ClickHouse/ClickHouse/pull/31888) ([Nikolai Kochetov](https://github.com/KochetovNicolai)).
|
|
||||||
|
|
||||||
#### NO CL CATEGORY
|
|
||||||
|
|
||||||
* Fix Regular Expression while key path search. [#33023](https://github.com/ClickHouse/ClickHouse/pull/33023) ([mreddy017](https://github.com/mreddy017)).
|
|
||||||
* - Allow to split GraphiteMergeTree rollup rules for plain/tagged metrics (optional rule_type field). [#33494](https://github.com/ClickHouse/ClickHouse/pull/33494) ([Michail Safronov](https://github.com/msaf1980)).
|
|
||||||
|
|
||||||
#### Bug Fix (v21.9.4.35-stable)
|
|
||||||
|
|
||||||
* Fix [#32964](https://github.com/ClickHouse/ClickHouse/issues/32964). [#32965](https://github.com/ClickHouse/ClickHouse/pull/32965) ([save-my-heart](https://github.com/save-my-heart)).
|
|
||||||
|
|
||||||
#### New Feature / New Tool
|
#### New Feature / New Tool
|
||||||
|
|
||||||
* Tool for collecting diagnostics data. [#33175](https://github.com/ClickHouse/ClickHouse/pull/33175) ([Alexander Burmak](https://github.com/Alex-Burmak)).
|
* Tool for collecting diagnostics data. [#33175](https://github.com/ClickHouse/ClickHouse/pull/33175) ([Alexander Burmak](https://github.com/Alex-Burmak)).
|
||||||
|
@ -38,6 +38,7 @@
|
|||||||
|
|
||||||
#### Improvement
|
#### Improvement
|
||||||
* Now ReplicatedMergeTree can recover data when some of its disks are broken. [#13544](https://github.com/ClickHouse/ClickHouse/pull/13544) ([Amos Bird](https://github.com/amosbird)).
|
* Now ReplicatedMergeTree can recover data when some of its disks are broken. [#13544](https://github.com/ClickHouse/ClickHouse/pull/13544) ([Amos Bird](https://github.com/amosbird)).
|
||||||
|
* Dynamic reload of server TLS certificates on config reload. Closes [#15764](https://github.com/ClickHouse/ClickHouse/issues/15764). [#15765](https://github.com/ClickHouse/ClickHouse/pull/15765) ([johnskopis](https://github.com/johnskopis)).
|
||||||
* Merge [#15765](https://github.com/ClickHouse/ClickHouse/issues/15765) (Dynamic reload of server TLS certificates on config reload) cc @johnskopis. [#31257](https://github.com/ClickHouse/ClickHouse/pull/31257) ([Filatenkov Artur](https://github.com/FArthur-cmd)).
|
* Merge [#15765](https://github.com/ClickHouse/ClickHouse/issues/15765) (Dynamic reload of server TLS certificates on config reload) cc @johnskopis. [#31257](https://github.com/ClickHouse/ClickHouse/pull/31257) ([Filatenkov Artur](https://github.com/FArthur-cmd)).
|
||||||
* Added `UUID` data type support for functions `hex`, `bin`. [#32170](https://github.com/ClickHouse/ClickHouse/pull/32170) ([Frank Chen](https://github.com/FrankChen021)).
|
* Added `UUID` data type support for functions `hex`, `bin`. [#32170](https://github.com/ClickHouse/ClickHouse/pull/32170) ([Frank Chen](https://github.com/FrankChen021)).
|
||||||
* Support `optimize_read_in_order` if prefix of sorting key is already sorted. E.g. if we have sorting key `ORDER BY (a, b)` in table and query with `WHERE a = const ORDER BY b` clauses, now it will be applied reading in order of sorting key instead of full sort. [#32748](https://github.com/ClickHouse/ClickHouse/pull/32748) ([Anton Popov](https://github.com/CurtizJ)).
|
* Support `optimize_read_in_order` if prefix of sorting key is already sorted. E.g. if we have sorting key `ORDER BY (a, b)` in table and query with `WHERE a = const ORDER BY b` clauses, now it will be applied reading in order of sorting key instead of full sort. [#32748](https://github.com/ClickHouse/ClickHouse/pull/32748) ([Anton Popov](https://github.com/CurtizJ)).
|
||||||
@ -194,15 +195,16 @@
|
|||||||
* Fixed the assertion in case of using `allow_experimental_parallel_reading_from_replicas` with `max_parallel_replicas` equals to 1. This fixes [#34525](https://github.com/ClickHouse/ClickHouse/issues/34525). [#34613](https://github.com/ClickHouse/ClickHouse/pull/34613) ([Nikita Mikhaylov](https://github.com/nikitamikhaylov)).
|
* Fixed the assertion in case of using `allow_experimental_parallel_reading_from_replicas` with `max_parallel_replicas` equals to 1. This fixes [#34525](https://github.com/ClickHouse/ClickHouse/issues/34525). [#34613](https://github.com/ClickHouse/ClickHouse/pull/34613) ([Nikita Mikhaylov](https://github.com/nikitamikhaylov)).
|
||||||
* - Add Debug workflow to get variables for all actions on demand - Fix lack of pr_info.number for some edge case. [#34644](https://github.com/ClickHouse/ClickHouse/pull/34644) ([Mikhail f. Shiryaev](https://github.com/Felixoid)).
|
* - Add Debug workflow to get variables for all actions on demand - Fix lack of pr_info.number for some edge case. [#34644](https://github.com/ClickHouse/ClickHouse/pull/34644) ([Mikhail f. Shiryaev](https://github.com/Felixoid)).
|
||||||
|
|
||||||
|
#### NO CL CATEGORY
|
||||||
|
|
||||||
|
* Reverting to previous docker images, will take a closer look at failing tests from [#34373](https://github.com/ClickHouse/ClickHouse/issues/34373). [#34413](https://github.com/ClickHouse/ClickHouse/pull/34413) ([Mikhail f. Shiryaev](https://github.com/Felixoid)).
|
||||||
|
|
||||||
#### NO CL ENTRY
|
#### NO CL ENTRY
|
||||||
|
|
||||||
|
* NO CL ENTRY: 'Switch gosu to su-exec'. [#33563](https://github.com/ClickHouse/ClickHouse/pull/33563) ([Anselmo D. Adams](https://github.com/anselmodadams)).
|
||||||
* NO CL ENTRY: 'Revert "Additionally check remote_fs_execute_merges_on_single_replica_time_threshold inside ReplicatedMergeTreeQueue"'. [#34201](https://github.com/ClickHouse/ClickHouse/pull/34201) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
* NO CL ENTRY: 'Revert "Additionally check remote_fs_execute_merges_on_single_replica_time_threshold inside ReplicatedMergeTreeQueue"'. [#34201](https://github.com/ClickHouse/ClickHouse/pull/34201) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
* NO CL ENTRY: 'Revert "Add func tests run with s3"'. [#34211](https://github.com/ClickHouse/ClickHouse/pull/34211) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
* NO CL ENTRY: 'Revert "Add func tests run with s3"'. [#34211](https://github.com/ClickHouse/ClickHouse/pull/34211) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
* NO CL ENTRY: 'Revert "Add pool to WriteBufferFromS3"'. [#34212](https://github.com/ClickHouse/ClickHouse/pull/34212) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
* NO CL ENTRY: 'Revert "Add pool to WriteBufferFromS3"'. [#34212](https://github.com/ClickHouse/ClickHouse/pull/34212) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
* NO CL ENTRY: 'Add support agreement page and snippets.'. [#34512](https://github.com/ClickHouse/ClickHouse/pull/34512) ([Tom Risse](https://github.com/flickerbox-tom)).
|
* NO CL ENTRY: 'Add support agreement page and snippets.'. [#34512](https://github.com/ClickHouse/ClickHouse/pull/34512) ([Tom Risse](https://github.com/flickerbox-tom)).
|
||||||
* NO CL ENTRY: 'Add Gigasheet to adopters'. [#34589](https://github.com/ClickHouse/ClickHouse/pull/34589) ([Brian Hunter](https://github.com/bjhunter)).
|
* NO CL ENTRY: 'Add Gigasheet to adopters'. [#34589](https://github.com/ClickHouse/ClickHouse/pull/34589) ([Brian Hunter](https://github.com/bjhunter)).
|
||||||
|
|
||||||
#### NO CL CATEGORY
|
|
||||||
|
|
||||||
* Reverting to previous docker images, will take a closer look at failing tests from [#34373](https://github.com/ClickHouse/ClickHouse/issues/34373). [#34413](https://github.com/ClickHouse/ClickHouse/pull/34413) ([Mikhail f. Shiryaev](https://github.com/Felixoid)).
|
|
||||||
|
|
||||||
|
@ -1,6 +1,2 @@
|
|||||||
### ClickHouse release v22.3.2.2-lts FIXME as compared to v22.3.1.1262-prestable
|
### ClickHouse release v22.3.2.2-lts FIXME as compared to v22.3.1.1262-prestable
|
||||||
|
|
||||||
#### Bug Fix (user-visible misbehaviour in official stable or prestable release)
|
|
||||||
|
|
||||||
* Fix bug in S3 zero-copy replication which can lead to errors like `Found parts with the same min block and with the same max block as the missing part` after concurrent fetch/drop table. [#35348](https://github.com/ClickHouse/ClickHouse/pull/35348) ([alesapin](https://github.com/alesapin)).
|
|
||||||
|
|
||||||
|
@ -8,7 +8,6 @@
|
|||||||
* Backported in [#36244](https://github.com/ClickHouse/ClickHouse/issues/36244): Fix usage of quota with asynchronous inserts. [#35645](https://github.com/ClickHouse/ClickHouse/pull/35645) ([Anton Popov](https://github.com/CurtizJ)).
|
* Backported in [#36244](https://github.com/ClickHouse/ClickHouse/issues/36244): Fix usage of quota with asynchronous inserts. [#35645](https://github.com/ClickHouse/ClickHouse/pull/35645) ([Anton Popov](https://github.com/CurtizJ)).
|
||||||
* Backported in [#36240](https://github.com/ClickHouse/ClickHouse/issues/36240): Fix possible loss of subcolumns in type `Object`. [#35682](https://github.com/ClickHouse/ClickHouse/pull/35682) ([Anton Popov](https://github.com/CurtizJ)).
|
* Backported in [#36240](https://github.com/ClickHouse/ClickHouse/issues/36240): Fix possible loss of subcolumns in type `Object`. [#35682](https://github.com/ClickHouse/ClickHouse/pull/35682) ([Anton Popov](https://github.com/CurtizJ)).
|
||||||
* Backported in [#36242](https://github.com/ClickHouse/ClickHouse/issues/36242): Fix possible `Can't adjust last granule` exception while reading subcolumns of type `Object`. [#35687](https://github.com/ClickHouse/ClickHouse/pull/35687) ([Anton Popov](https://github.com/CurtizJ)).
|
* Backported in [#36242](https://github.com/ClickHouse/ClickHouse/issues/36242): Fix possible `Can't adjust last granule` exception while reading subcolumns of type `Object`. [#35687](https://github.com/ClickHouse/ClickHouse/pull/35687) ([Anton Popov](https://github.com/CurtizJ)).
|
||||||
* Backported in [#35938](https://github.com/ClickHouse/ClickHouse/issues/35938): Avoid processing per-column TTL multiple times. [#35820](https://github.com/ClickHouse/ClickHouse/pull/35820) ([Azat Khuzhin](https://github.com/azat)).
|
|
||||||
* Backported in [#36147](https://github.com/ClickHouse/ClickHouse/issues/36147): Fix reading from `Kafka` tables when `kafka_num_consumers > 1` and `kafka_thread_per_consumer = 0`. Returns parallel & multithreaded reading, accidentally broken in 21.11. Closes [#35153](https://github.com/ClickHouse/ClickHouse/issues/35153). [#35973](https://github.com/ClickHouse/ClickHouse/pull/35973) ([filimonov](https://github.com/filimonov)).
|
* Backported in [#36147](https://github.com/ClickHouse/ClickHouse/issues/36147): Fix reading from `Kafka` tables when `kafka_num_consumers > 1` and `kafka_thread_per_consumer = 0`. Returns parallel & multithreaded reading, accidentally broken in 21.11. Closes [#35153](https://github.com/ClickHouse/ClickHouse/issues/35153). [#35973](https://github.com/ClickHouse/ClickHouse/pull/35973) ([filimonov](https://github.com/filimonov)).
|
||||||
* Backported in [#36276](https://github.com/ClickHouse/ClickHouse/issues/36276): Fix reading of empty arrays in reverse order (in queries with descending sorting by prefix of primary key). [#36215](https://github.com/ClickHouse/ClickHouse/pull/36215) ([Anton Popov](https://github.com/CurtizJ)).
|
* Backported in [#36276](https://github.com/ClickHouse/ClickHouse/issues/36276): Fix reading of empty arrays in reverse order (in queries with descending sorting by prefix of primary key). [#36215](https://github.com/ClickHouse/ClickHouse/pull/36215) ([Anton Popov](https://github.com/CurtizJ)).
|
||||||
|
|
||||||
|
@ -2,6 +2,5 @@
|
|||||||
|
|
||||||
#### Bug Fix (user-visible misbehaviour in official stable or prestable release)
|
#### Bug Fix (user-visible misbehaviour in official stable or prestable release)
|
||||||
|
|
||||||
* Backported in [#36525](https://github.com/ClickHouse/ClickHouse/issues/36525): Queries with aliases inside special operators returned parsing error (was broken in 22.1). Example: `SELECT substring('test' AS t, 1, 1)`. [#36167](https://github.com/ClickHouse/ClickHouse/pull/36167) ([Maksim Kita](https://github.com/kitaisreal)).
|
|
||||||
* Backported in [#36795](https://github.com/ClickHouse/ClickHouse/issues/36795): Fix vertical merges in wide parts. Previously an exception `There is no column` can be thrown during merge. [#36707](https://github.com/ClickHouse/ClickHouse/pull/36707) ([Anton Popov](https://github.com/CurtizJ)).
|
* Backported in [#36795](https://github.com/ClickHouse/ClickHouse/issues/36795): Fix vertical merges in wide parts. Previously an exception `There is no column` can be thrown during merge. [#36707](https://github.com/ClickHouse/ClickHouse/pull/36707) ([Anton Popov](https://github.com/CurtizJ)).
|
||||||
|
|
||||||
|
@ -150,6 +150,10 @@
|
|||||||
* Check a number of required reports in BuilderSpecialReport. [#36413](https://github.com/ClickHouse/ClickHouse/pull/36413) ([Mikhail f. Shiryaev](https://github.com/Felixoid)).
|
* Check a number of required reports in BuilderSpecialReport. [#36413](https://github.com/ClickHouse/ClickHouse/pull/36413) ([Mikhail f. Shiryaev](https://github.com/Felixoid)).
|
||||||
* Add a labeling for `Revert` PRs. [#36422](https://github.com/ClickHouse/ClickHouse/pull/36422) ([Mikhail f. Shiryaev](https://github.com/Felixoid)).
|
* Add a labeling for `Revert` PRs. [#36422](https://github.com/ClickHouse/ClickHouse/pull/36422) ([Mikhail f. Shiryaev](https://github.com/Felixoid)).
|
||||||
|
|
||||||
|
#### Bug Fix (prestable release)
|
||||||
|
|
||||||
|
* call RemoteQueryExecutor with original_query instead of an rewritten query, elimate the AMBIGUOUS_COLUMN_NAME exception. [#35748](https://github.com/ClickHouse/ClickHouse/pull/35748) ([lgbo](https://github.com/lgbo-ustc)).
|
||||||
|
|
||||||
#### Bug Fix (user-visible misbehaviour in official stable or prestable release)
|
#### Bug Fix (user-visible misbehaviour in official stable or prestable release)
|
||||||
|
|
||||||
* Disallow ALTER TTL for engines that does not support it, to avoid breaking ATTACH TABLE (closes [#33344](https://github.com/ClickHouse/ClickHouse/issues/33344)). [#33391](https://github.com/ClickHouse/ClickHouse/pull/33391) ([zhongyuankai](https://github.com/zhongyuankai)).
|
* Disallow ALTER TTL for engines that does not support it, to avoid breaking ATTACH TABLE (closes [#33344](https://github.com/ClickHouse/ClickHouse/issues/33344)). [#33391](https://github.com/ClickHouse/ClickHouse/pull/33391) ([zhongyuankai](https://github.com/zhongyuankai)).
|
||||||
@ -158,7 +162,6 @@
|
|||||||
* Fix mutations in tables with enabled sparse columns. [#35284](https://github.com/ClickHouse/ClickHouse/pull/35284) ([Anton Popov](https://github.com/CurtizJ)).
|
* Fix mutations in tables with enabled sparse columns. [#35284](https://github.com/ClickHouse/ClickHouse/pull/35284) ([Anton Popov](https://github.com/CurtizJ)).
|
||||||
* Fix schema inference for TSKV format while using small max_read_buffer_size. [#35332](https://github.com/ClickHouse/ClickHouse/pull/35332) ([Kruglov Pavel](https://github.com/Avogar)).
|
* Fix schema inference for TSKV format while using small max_read_buffer_size. [#35332](https://github.com/ClickHouse/ClickHouse/pull/35332) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||||
* Fix partition pruning in case of comparison with constant in `WHERE`. If column and constant had different types, overflow was possible. Query could return an incorrect empty result. This fixes [#35304](https://github.com/ClickHouse/ClickHouse/issues/35304). [#35334](https://github.com/ClickHouse/ClickHouse/pull/35334) ([Amos Bird](https://github.com/amosbird)).
|
* Fix partition pruning in case of comparison with constant in `WHERE`. If column and constant had different types, overflow was possible. Query could return an incorrect empty result. This fixes [#35304](https://github.com/ClickHouse/ClickHouse/issues/35304). [#35334](https://github.com/ClickHouse/ClickHouse/pull/35334) ([Amos Bird](https://github.com/amosbird)).
|
||||||
* Fix bug in S3 zero-copy replication which can lead to errors like `Found parts with the same min block and with the same max block as the missing part` after concurrent fetch/drop table. [#35348](https://github.com/ClickHouse/ClickHouse/pull/35348) ([alesapin](https://github.com/alesapin)).
|
|
||||||
* Fix issue with non-existing directory https://github.com/ClickHouse/ClickHouse/runs/5588046879?check_suite_focus=true. [#35376](https://github.com/ClickHouse/ClickHouse/pull/35376) ([Mikhail f. Shiryaev](https://github.com/Felixoid)).
|
* Fix issue with non-existing directory https://github.com/ClickHouse/ClickHouse/runs/5588046879?check_suite_focus=true. [#35376](https://github.com/ClickHouse/ClickHouse/pull/35376) ([Mikhail f. Shiryaev](https://github.com/Felixoid)).
|
||||||
* Fix possible deadlock in cache. [#35378](https://github.com/ClickHouse/ClickHouse/pull/35378) ([Kseniia Sumarokova](https://github.com/kssenii)).
|
* Fix possible deadlock in cache. [#35378](https://github.com/ClickHouse/ClickHouse/pull/35378) ([Kseniia Sumarokova](https://github.com/kssenii)).
|
||||||
* Fix wrong assets path in release workflow. [#35379](https://github.com/ClickHouse/ClickHouse/pull/35379) ([Mikhail f. Shiryaev](https://github.com/Felixoid)).
|
* Fix wrong assets path in release workflow. [#35379](https://github.com/ClickHouse/ClickHouse/pull/35379) ([Mikhail f. Shiryaev](https://github.com/Felixoid)).
|
||||||
@ -233,7 +236,3 @@
|
|||||||
* NO CL ENTRY: 'Revert "clang-tidy report issues with Medium priority"'. [#35941](https://github.com/ClickHouse/ClickHouse/pull/35941) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
* NO CL ENTRY: 'Revert "clang-tidy report issues with Medium priority"'. [#35941](https://github.com/ClickHouse/ClickHouse/pull/35941) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
* NO CL ENTRY: 'Revert "Fix crash in ParallelReadBuffer"'. [#36210](https://github.com/ClickHouse/ClickHouse/pull/36210) ([Alexander Tokmakov](https://github.com/tavplubix)).
|
* NO CL ENTRY: 'Revert "Fix crash in ParallelReadBuffer"'. [#36210](https://github.com/ClickHouse/ClickHouse/pull/36210) ([Alexander Tokmakov](https://github.com/tavplubix)).
|
||||||
|
|
||||||
#### Bug Fix (prestable release)
|
|
||||||
|
|
||||||
* call RemoteQueryExecutor with original_query instead of an rewritten query, elimate the AMBIGUOUS_COLUMN_NAME exception. [#35748](https://github.com/ClickHouse/ClickHouse/pull/35748) ([lgbo](https://github.com/lgbo-ustc)).
|
|
||||||
|
|
||||||
|
@ -1,6 +1,2 @@
|
|||||||
### ClickHouse release v22.4.2.1-stable FIXME as compared to v22.4.1.2305-prestable
|
### ClickHouse release v22.4.2.1-stable FIXME as compared to v22.4.1.2305-prestable
|
||||||
|
|
||||||
#### Bug Fix (user-visible misbehaviour in official stable or prestable release)
|
|
||||||
|
|
||||||
* Fix projection analysis which might lead to wrong query result when IN subquery is used. This fixes [#35336](https://github.com/ClickHouse/ClickHouse/issues/35336). [#35631](https://github.com/ClickHouse/ClickHouse/pull/35631) ([Amos Bird](https://github.com/amosbird)).
|
|
||||||
|
|
||||||
|
@ -3,6 +3,5 @@
|
|||||||
#### Bug Fix (user-visible misbehaviour in official stable or prestable release)
|
#### Bug Fix (user-visible misbehaviour in official stable or prestable release)
|
||||||
|
|
||||||
* Backported in [#36524](https://github.com/ClickHouse/ClickHouse/issues/36524): Queries with aliases inside special operators returned parsing error (was broken in 22.1). Example: `SELECT substring('test' AS t, 1, 1)`. [#36167](https://github.com/ClickHouse/ClickHouse/pull/36167) ([Maksim Kita](https://github.com/kitaisreal)).
|
* Backported in [#36524](https://github.com/ClickHouse/ClickHouse/issues/36524): Queries with aliases inside special operators returned parsing error (was broken in 22.1). Example: `SELECT substring('test' AS t, 1, 1)`. [#36167](https://github.com/ClickHouse/ClickHouse/pull/36167) ([Maksim Kita](https://github.com/kitaisreal)).
|
||||||
* Backported in [#36582](https://github.com/ClickHouse/ClickHouse/issues/36582): Fix nullptr dereference in JOIN and COLUMNS matcher. This fixes [#36416](https://github.com/ClickHouse/ClickHouse/issues/36416) . This is for https://github.com/ClickHouse/ClickHouse/pull/36417. [#36430](https://github.com/ClickHouse/ClickHouse/pull/36430) ([Amos Bird](https://github.com/amosbird)).
|
|
||||||
* Backported in [#36673](https://github.com/ClickHouse/ClickHouse/issues/36673): Fix merges of wide parts with type `Object`. [#36637](https://github.com/ClickHouse/ClickHouse/pull/36637) ([Anton Popov](https://github.com/CurtizJ)).
|
* Backported in [#36673](https://github.com/ClickHouse/ClickHouse/issues/36673): Fix merges of wide parts with type `Object`. [#36637](https://github.com/ClickHouse/ClickHouse/pull/36637) ([Anton Popov](https://github.com/CurtizJ)).
|
||||||
|
|
||||||
|
@ -2,7 +2,6 @@
|
|||||||
|
|
||||||
#### Bug Fix (user-visible misbehaviour in official stable or prestable release)
|
#### Bug Fix (user-visible misbehaviour in official stable or prestable release)
|
||||||
|
|
||||||
* Backported in [#36524](https://github.com/ClickHouse/ClickHouse/issues/36524): Queries with aliases inside special operators returned parsing error (was broken in 22.1). Example: `SELECT substring('test' AS t, 1, 1)`. [#36167](https://github.com/ClickHouse/ClickHouse/pull/36167) ([Maksim Kita](https://github.com/kitaisreal)).
|
|
||||||
* Backported in [#36635](https://github.com/ClickHouse/ClickHouse/issues/36635): Fix `Missing column` exception which could happen while using `INTERPOLATE` with `ENGINE = MergeTree` table. [#36549](https://github.com/ClickHouse/ClickHouse/pull/36549) ([Yakov Olkhovskiy](https://github.com/yakov-olkhovskiy)).
|
* Backported in [#36635](https://github.com/ClickHouse/ClickHouse/issues/36635): Fix `Missing column` exception which could happen while using `INTERPOLATE` with `ENGINE = MergeTree` table. [#36549](https://github.com/ClickHouse/ClickHouse/pull/36549) ([Yakov Olkhovskiy](https://github.com/yakov-olkhovskiy)).
|
||||||
* Backported in [#36794](https://github.com/ClickHouse/ClickHouse/issues/36794): Fix vertical merges in wide parts. Previously an exception `There is no column` can be thrown during merge. [#36707](https://github.com/ClickHouse/ClickHouse/pull/36707) ([Anton Popov](https://github.com/CurtizJ)).
|
* Backported in [#36794](https://github.com/ClickHouse/ClickHouse/issues/36794): Fix vertical merges in wide parts. Previously an exception `There is no column` can be thrown during merge. [#36707](https://github.com/ClickHouse/ClickHouse/pull/36707) ([Anton Popov](https://github.com/CurtizJ)).
|
||||||
* Backported in [#36926](https://github.com/ClickHouse/ClickHouse/issues/36926): Fix bug in clickhouse-keeper which can lead to corrupted compressed log files in case of small load and restarts. [#36910](https://github.com/ClickHouse/ClickHouse/pull/36910) ([alesapin](https://github.com/alesapin)).
|
* Backported in [#36926](https://github.com/ClickHouse/ClickHouse/issues/36926): Fix bug in clickhouse-keeper which can lead to corrupted compressed log files in case of small load and restarts. [#36910](https://github.com/ClickHouse/ClickHouse/pull/36910) ([alesapin](https://github.com/alesapin)).
|
||||||
|
182
docs/changelogs/v22.5.1.2079-stable.md
Normal file
182
docs/changelogs/v22.5.1.2079-stable.md
Normal file
@ -0,0 +1,182 @@
|
|||||||
|
### ClickHouse release v22.5.1.2079-stable FIXME as compared to v22.4.1.2305-prestable
|
||||||
|
|
||||||
|
#### Backward Incompatible Change
|
||||||
|
* Updated the BoringSSL module to the official FIPS compliant version. This makes ClickHouse FIPS compliant. [#35914](https://github.com/ClickHouse/ClickHouse/pull/35914) ([Meena-Renganathan](https://github.com/Meena-Renganathan)).
|
||||||
|
* Now, background merges, mutations and `OPTIMIZE` will not increment `SelectedRows` and `SelectedBytes` metrics. They (still) will increment `MergedRows` and `MergedUncompressedBytes` as it was before. [#37040](https://github.com/ClickHouse/ClickHouse/pull/37040) ([Nikolai Kochetov](https://github.com/KochetovNicolai)).
|
||||||
|
|
||||||
|
#### New Feature
|
||||||
|
* add implementation of MeiliSearch storage and table function. [#33332](https://github.com/ClickHouse/ClickHouse/pull/33332) ([Mikhail Artemenko](https://github.com/Michicosun)).
|
||||||
|
* Add support of GROUPING SETS in GROUP BY clause. Follow up after [#33186](https://github.com/ClickHouse/ClickHouse/issues/33186). This implementation supports a parallel processing of grouping sets. [#33631](https://github.com/ClickHouse/ClickHouse/pull/33631) ([Dmitry Novik](https://github.com/novikd)).
|
||||||
|
* According to the design mentioned at :[#19627](https://github.com/ClickHouse/ClickHouse/issues/19627)#issuecomment-1068772646. [#35318](https://github.com/ClickHouse/ClickHouse/pull/35318) ([徐炘](https://github.com/weeds085490)).
|
||||||
|
* Added `SYSTEM SYNC DATABASE REPLICA` query which allows to sync tables metadata inside Replicated database, because currently synchronisation is asynchronous. [#35944](https://github.com/ClickHouse/ClickHouse/pull/35944) ([Nikita Mikhaylov](https://github.com/nikitamikhaylov)).
|
||||||
|
* - Add output format Prometheus, [#36051](https://github.com/ClickHouse/ClickHouse/issues/36051). [#36206](https://github.com/ClickHouse/ClickHouse/pull/36206) ([Vladimir C](https://github.com/vdimir)).
|
||||||
|
* Parse collations in CREATE TABLE, throw exception or ignore. closes [#35892](https://github.com/ClickHouse/ClickHouse/issues/35892). [#36271](https://github.com/ClickHouse/ClickHouse/pull/36271) ([yuuch](https://github.com/yuuch)).
|
||||||
|
* Add aliases JSONLines and NDJSON for JSONEachRow. Closes [#36303](https://github.com/ClickHouse/ClickHouse/issues/36303). [#36327](https://github.com/ClickHouse/ClickHouse/pull/36327) ([flynn](https://github.com/ucasfl)).
|
||||||
|
* Set parts_to_delay_insert and parts_to_throw_insert as query-level settings. If they are defined, they can override table-level settings. [#36371](https://github.com/ClickHouse/ClickHouse/pull/36371) ([Memo](https://github.com/Joeywzr)).
|
||||||
|
* temporary table can show total rows and total bytes. [#36401](https://github.com/ClickHouse/ClickHouse/issues/36401). [#36439](https://github.com/ClickHouse/ClickHouse/pull/36439) ([xiedeyantu](https://github.com/xiedeyantu)).
|
||||||
|
* Added new hash function - wyHash64. [#36467](https://github.com/ClickHouse/ClickHouse/pull/36467) ([olevino](https://github.com/olevino)).
|
||||||
|
* Window function nth_value was added. [#36601](https://github.com/ClickHouse/ClickHouse/pull/36601) ([Nikolay](https://github.com/ndchikin)).
|
||||||
|
* Add MySQLDump input format. It reads all data from INSERT queries belonging to one table in dump. If there are more than one table, by default it reads data from the first one. [#36667](https://github.com/ClickHouse/ClickHouse/pull/36667) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||||
|
* New single binary based diagnostics tool. [#36705](https://github.com/ClickHouse/ClickHouse/pull/36705) ([Dale McDiarmid](https://github.com/gingerwizard)).
|
||||||
|
* **Description:** It is used to count the system table of a request for remote file access, which can help users analyze the causes of performance fluctuations in the scenario of separation of storage and computer. The current system table structure is as follows. When a query reads a segment of a remote file, a record is generated. Read type include **READ_FROM_FS_AND_DOWNLOADED_TO_CACHE、READ_FROM_CACHE、READ_FROM_FS_BYPASSING_CACHE**, which used to indicate whether the query accesses the segment from the cache or from a remote file. [#36802](https://github.com/ClickHouse/ClickHouse/pull/36802) ([Han Shukai](https://github.com/KinderRiven)).
|
||||||
|
* Adds `h3Line`, `h3Distance` and `h3HexRing` functions. [#37030](https://github.com/ClickHouse/ClickHouse/pull/37030) ([Bharat Nallan](https://github.com/bharatnc)).
|
||||||
|
* Related issue - [#35101](https://github.com/ClickHouse/ClickHouse/issues/35101). [#37033](https://github.com/ClickHouse/ClickHouse/pull/37033) ([qieqieplus](https://github.com/qieqieplus)).
|
||||||
|
* Added system.certificates table. [#37142](https://github.com/ClickHouse/ClickHouse/pull/37142) ([Yakov Olkhovskiy](https://github.com/yakov-olkhovskiy)).
|
||||||
|
|
||||||
|
#### Performance Improvement
|
||||||
|
* Improve performance or ORDER BY, MergeJoin, insertion into MergeTree using JIT compilation of sort columns comparator. [#34469](https://github.com/ClickHouse/ClickHouse/pull/34469) ([Maksim Kita](https://github.com/kitaisreal)).
|
||||||
|
* First commit is to increase the inline threshold. Next commits will improve queries by inlining for those who have shown better performance. This way we will not increase the compile time and binary size and optimize the program. [#34544](https://github.com/ClickHouse/ClickHouse/pull/34544) ([Daniel Kutenin](https://github.com/danlark1)).
|
||||||
|
* Transform OR LIKE chain to multiMatchAny. Will enable once we have more confidence it works. [#34932](https://github.com/ClickHouse/ClickHouse/pull/34932) ([Daniel Kutenin](https://github.com/danlark1)).
|
||||||
|
* Rewrite 'select countDistinct(a) from t' to 'select count(1) from (select a from t groupBy a)'. [#35993](https://github.com/ClickHouse/ClickHouse/pull/35993) ([zhanglistar](https://github.com/zhanglistar)).
|
||||||
|
* Change structure of `system.asynchronous_metric_log`. It will take about 10 times less space. This closes [#36357](https://github.com/ClickHouse/ClickHouse/issues/36357). The field `event_time_microseconds` was removed, because it is useless. [#36360](https://github.com/ClickHouse/ClickHouse/pull/36360) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* The default `HashJoin` is not thread safe for inserting right table's rows and run it in a single thread. When the right table is large, the join process is too slow with low cpu utilization. [#36415](https://github.com/ClickHouse/ClickHouse/pull/36415) ([lgbo](https://github.com/lgbo-ustc)).
|
||||||
|
* Improve performance of reading from storage `File` and table functions `file` in case when path has globs and matched directory contains large number of files. [#36647](https://github.com/ClickHouse/ClickHouse/pull/36647) ([Anton Popov](https://github.com/CurtizJ)).
|
||||||
|
* Appy parallel parsing for input format `HiveText`, which can speed up HiveText parsing by 2x when reading local file. [#36650](https://github.com/ClickHouse/ClickHouse/pull/36650) ([李扬](https://github.com/taiyang-li)).
|
||||||
|
* Improves performance of file descriptor cache by narrowing mutex scopes. [#36682](https://github.com/ClickHouse/ClickHouse/pull/36682) ([Anton Kozlov](https://github.com/tonickkozlov)).
|
||||||
|
* This PR improves the `WATCH` query in WindowView: 1. Reduce the latency of providing query results by calling the `fire_condition` signal. 2. Makes the cancel query operation(ctrl-c) faster, by checking `isCancelled()` more frequently. [#37226](https://github.com/ClickHouse/ClickHouse/pull/37226) ([vxider](https://github.com/Vxider)).
|
||||||
|
* Improve performance of `avg`, `sum` aggregate functions if used without GROUP BY expression. [#37257](https://github.com/ClickHouse/ClickHouse/pull/37257) ([Maksim Kita](https://github.com/kitaisreal)).
|
||||||
|
* Improve performance of unary arithmetic functions (`bitCount`, `bitNot`, `abs`, `intExp2`, `intExp10`, `negate`, `roundAge`, `roundDuration`, `roundToExp2`, `sign`) using dynamic dispatch. [#37289](https://github.com/ClickHouse/ClickHouse/pull/37289) ([Maksim Kita](https://github.com/kitaisreal)).
|
||||||
|
|
||||||
|
#### Improvement
|
||||||
|
* Remind properly if use clickhouse-client --file without preceeding --external. Close [#34747](https://github.com/ClickHouse/ClickHouse/issues/34747). [#34765](https://github.com/ClickHouse/ClickHouse/pull/34765) ([李扬](https://github.com/taiyang-li)).
|
||||||
|
* Added support for specifying `content_type` in predefined and static HTTP handler config. [#34916](https://github.com/ClickHouse/ClickHouse/pull/34916) ([Roman Nikonov](https://github.com/nic11)).
|
||||||
|
* Implement partial GROUP BY key for optimize_aggregation_in_order. [#35111](https://github.com/ClickHouse/ClickHouse/pull/35111) ([Azat Khuzhin](https://github.com/azat)).
|
||||||
|
* Nullables detection in protobuf using Google wrappers. [#35149](https://github.com/ClickHouse/ClickHouse/pull/35149) ([Jakub Kuklis](https://github.com/jkuklis)).
|
||||||
|
* If the required amount of memory is available before the selected query stopped, all waiting queries continue execution. Now we don't stop any query if memory is freed before the moment when the selected query knows about the cancellation. [#35637](https://github.com/ClickHouse/ClickHouse/pull/35637) ([Dmitry Novik](https://github.com/novikd)).
|
||||||
|
* Enable memory overcommit by default. [#35921](https://github.com/ClickHouse/ClickHouse/pull/35921) ([Dmitry Novik](https://github.com/novikd)).
|
||||||
|
* - Add branch to avoid unnecessary memcpy in readbig. [#36095](https://github.com/ClickHouse/ClickHouse/pull/36095) ([jasperzhu](https://github.com/jinjunzh)).
|
||||||
|
* Refactor code around schema inference with globs. Try next file from glob only if it makes sense (previously we tried next file in case of any error). Also it fixes [#36317](https://github.com/ClickHouse/ClickHouse/issues/36317). [#36205](https://github.com/ClickHouse/ClickHouse/pull/36205) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||||
|
* Improve schema inference for json objects. [#36207](https://github.com/ClickHouse/ClickHouse/pull/36207) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||||
|
* Add support for force recovery which allows you to reconfigure cluster without quorum. [#36258](https://github.com/ClickHouse/ClickHouse/pull/36258) ([Antonio Andelic](https://github.com/antonio2368)).
|
||||||
|
* We create a local interpreter if we want to execute query on localhost replica. But for when executing query on multiple replicas we rely on the fact that a connection exists so replicas can talk to coordinator. It is now improved and localhost replica can talk to coordinator directly in the same process. [#36281](https://github.com/ClickHouse/ClickHouse/pull/36281) ([Nikita Mikhaylov](https://github.com/nikitamikhaylov)).
|
||||||
|
* Show names of erroneous files in case of parsing errors while executing table functions `file`, `s3` and `url`. [#36314](https://github.com/ClickHouse/ClickHouse/pull/36314) ([Anton Popov](https://github.com/CurtizJ)).
|
||||||
|
* Allowed to increase the number of threads for executing background operations (merges, mutations, moves and fetches) at runtime if they are specified at top level config. [#36425](https://github.com/ClickHouse/ClickHouse/pull/36425) ([Nikita Mikhaylov](https://github.com/nikitamikhaylov)).
|
||||||
|
* clickhouse-benchmark can read auth from environment variables. [#36497](https://github.com/ClickHouse/ClickHouse/pull/36497) ([Anton Kozlov](https://github.com/tonickkozlov)).
|
||||||
|
* Allow names of tuple elements that start from digits. [#36544](https://github.com/ClickHouse/ClickHouse/pull/36544) ([Anton Popov](https://github.com/CurtizJ)).
|
||||||
|
* Allow file descriptors in table function file if it is run in clickhouse-local. [#36562](https://github.com/ClickHouse/ClickHouse/pull/36562) ([wuxiaobai24](https://github.com/wuxiaobai24)).
|
||||||
|
* Allow to cast columns of type `Object(...)` to `Object(Nullable(...))`. [#36564](https://github.com/ClickHouse/ClickHouse/pull/36564) ([awakeljw](https://github.com/awakeljw)).
|
||||||
|
* Cleanup CSS in Play UI. The pixels are more evenly placed. Better usability for long content in table cells. [#36569](https://github.com/ClickHouse/ClickHouse/pull/36569) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* The metrics about time spent reading from s3 now calculated correctly. Close [#35483](https://github.com/ClickHouse/ClickHouse/issues/35483). [#36572](https://github.com/ClickHouse/ClickHouse/pull/36572) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* Improve `SYSTEM DROP FILESYSTEM CACHE` query: `<path>` option and `FORCE` option. [#36639](https://github.com/ClickHouse/ClickHouse/pull/36639) ([Kseniia Sumarokova](https://github.com/kssenii)).
|
||||||
|
* Add `is_all_data_sent` column into `system.processes`, and improve internal testing hardening check based on it. [#36649](https://github.com/ClickHouse/ClickHouse/pull/36649) ([Azat Khuzhin](https://github.com/azat)).
|
||||||
|
* Now date time conversion functions that generates time before 1970-01-01 00:00:00 with partial hours/minutes timezones will be saturated to zero instead of overflow. This is the continuation of https://github.com/ClickHouse/ClickHouse/pull/29953 which addresses https://github.com/ClickHouse/ClickHouse/pull/29953#discussion_r800550280 . Mark as improvement because it's implementation defined behavior (and very rare case) and we are allowed to break it. [#36656](https://github.com/ClickHouse/ClickHouse/pull/36656) ([Amos Bird](https://github.com/amosbird)).
|
||||||
|
* Allow to cancel query while still keep decent query id in MySQLHandler. [#36699](https://github.com/ClickHouse/ClickHouse/pull/36699) ([Amos Bird](https://github.com/amosbird)).
|
||||||
|
* Properly cancel INSERT queries in `clickhouse-client`/`clickhouse-local`. [#36710](https://github.com/ClickHouse/ClickHouse/pull/36710) ([Azat Khuzhin](https://github.com/azat)).
|
||||||
|
* Allow cluster macro in s3Cluster table function. [#36726](https://github.com/ClickHouse/ClickHouse/pull/36726) ([Vadim Volodin](https://github.com/PolyProgrammist)).
|
||||||
|
* Added `user_defined_path` config setting. [#36753](https://github.com/ClickHouse/ClickHouse/pull/36753) ([Maksim Kita](https://github.com/kitaisreal)).
|
||||||
|
* Allow to execute hash functions with arguments of type `Array(Tuple(..))`. [#36812](https://github.com/ClickHouse/ClickHouse/pull/36812) ([Anton Popov](https://github.com/CurtizJ)).
|
||||||
|
* Add warning if someone running clickhouse-server with log level "test". The log level "test" was added recently and cannot be used in production due to inevitable, unavoidable, fatal and life-threatening performance degradation. [#36824](https://github.com/ClickHouse/ClickHouse/pull/36824) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* Play UI: If there is one row in result and more than a few columns, display the result vertically. Continuation of [#36811](https://github.com/ClickHouse/ClickHouse/issues/36811). [#36842](https://github.com/ClickHouse/ClickHouse/pull/36842) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* Add extra diagnostic info (if applicable) when sending exception to other server. [#36872](https://github.com/ClickHouse/ClickHouse/pull/36872) ([Alexander Tokmakov](https://github.com/tavplubix)).
|
||||||
|
* After [#36425](https://github.com/ClickHouse/ClickHouse/issues/36425) settings like `background_fetches_pool_size` became obsolete and can appear in top level config, but clickhouse throws and exception like `Error updating configuration from '/etc/clickhouse-server/config.xml' config.: Code: 137. DB::Exception: A setting 'background_fetches_pool_size' appeared at top level in config /etc/clickhouse-server/config.xml.` This is fixed. [#36917](https://github.com/ClickHouse/ClickHouse/pull/36917) ([Nikita Mikhaylov](https://github.com/nikitamikhaylov)).
|
||||||
|
* Finalize write buffers in case of exception to avoid doing it in destructors. Hope it fixes: [#36907](https://github.com/ClickHouse/ClickHouse/issues/36907). [#36979](https://github.com/ClickHouse/ClickHouse/pull/36979) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||||
|
* Play UI: Nullable numbers will be aligned to the right in table cells. This closes [#36982](https://github.com/ClickHouse/ClickHouse/issues/36982). [#36988](https://github.com/ClickHouse/ClickHouse/pull/36988) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* Implemented a new mode of handling row policies which can be enabled in the main configuration which enables users without permissive row policies to read rows. [#36997](https://github.com/ClickHouse/ClickHouse/pull/36997) ([Vitaly Baranov](https://github.com/vitlibar)).
|
||||||
|
* Fix bug which can lead to forgotten outdated parts in MergeTree table engines family in case of filesystem failures during parts removal. Before fix they will be removed only after first server restart. [#37014](https://github.com/ClickHouse/ClickHouse/pull/37014) ([alesapin](https://github.com/alesapin)).
|
||||||
|
* Modify query div in play.html to be extendable beyond 200px height. In case of very long queries it is helpful to extend the textarea element, only today, since the div is fixed height, the extended textarea hides the data div underneath. With this fix, extending the textarea element will push the data div down/up such the extended textarea won't hide it. [#37051](https://github.com/ClickHouse/ClickHouse/pull/37051) ([guyco87](https://github.com/guyco87)).
|
||||||
|
* Better read from cache. [#37054](https://github.com/ClickHouse/ClickHouse/pull/37054) ([Kseniia Sumarokova](https://github.com/kssenii)).
|
||||||
|
* Fix progress indication for `INSERT SELECT` in clickhouse-local for any query and for file progress in client, more correct file progress. [#37075](https://github.com/ClickHouse/ClickHouse/pull/37075) ([Kseniia Sumarokova](https://github.com/kssenii)).
|
||||||
|
* Disable `log_query_threads` setting by default. It controls the logging of statistics about every thread participating in query execution. After supporting asynchronous reads, the total number of distinct thread ids became too large, and logging into the `query_thread_log` has become too heavy. [#37077](https://github.com/ClickHouse/ClickHouse/pull/37077) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* Option `compatibility_ignore_auto_increment_in_create_table` allows ignoring `AUTO_INCREMENT` keyword in a column declaration to simplify migration from MySQL. [#37178](https://github.com/ClickHouse/ClickHouse/pull/37178) ([Igor Nikonov](https://github.com/devcrafter)).
|
||||||
|
* Added implicit cast for `h3kRing` function second argument to improve usability. Closes [#35432](https://github.com/ClickHouse/ClickHouse/issues/35432). [#37189](https://github.com/ClickHouse/ClickHouse/pull/37189) ([Maksim Kita](https://github.com/kitaisreal)).
|
||||||
|
* Limit the max partitions could be queried for each hive table. Avoid resource overruns. [#37281](https://github.com/ClickHouse/ClickHouse/pull/37281) ([lgbo](https://github.com/lgbo-ustc)).
|
||||||
|
|
||||||
|
#### Bug Fix
|
||||||
|
* Extracts Version ID if present from the URI and adds a request to the AWS HTTP URI. Closes [#31221](https://github.com/ClickHouse/ClickHouse/issues/31221). - [x] Extract `Version ID` from URI if present and reassemble without it. - [x] Configure `AWS HTTP URI` object with request. - [x] Unit Tests: [`gtest_s3_uri`](https://github.com/ClickHouse/ClickHouse/blob/2340a6c6849ebc05a8efbf97ba8de3ff9dc0eff4/src/IO/tests/gtest_s3_uri.cpp) - [x] Drop instrumentation commit. [#34571](https://github.com/ClickHouse/ClickHouse/pull/34571) ([Saad Ur Rahman](https://github.com/surahman)).
|
||||||
|
|
||||||
|
#### Build/Testing/Packaging Improvement
|
||||||
|
* Now `clickhouse-keeper` for the `x86_64` architecture is statically linked with [musl](https://musl.libc.org/) and doesn't depend on any system libraries. [#31833](https://github.com/ClickHouse/ClickHouse/pull/31833) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* Fail performance comparison on errors in the report. [#34797](https://github.com/ClickHouse/ClickHouse/pull/34797) ([Mikhail f. Shiryaev](https://github.com/Felixoid)).
|
||||||
|
* Check out the most of build jobs with depth=1. [#36091](https://github.com/ClickHouse/ClickHouse/pull/36091) ([Mikhail f. Shiryaev](https://github.com/Felixoid)).
|
||||||
|
* Bump minizip-ng to a sane version, or else old git won't be able to address dangling remote ref. [#35656](https://github.com/ClickHouse/ClickHouse/issues/35656). [#36295](https://github.com/ClickHouse/ClickHouse/pull/36295) ([Amos Bird](https://github.com/amosbird)).
|
||||||
|
* Use consistent `force tests` label in CI. [#36496](https://github.com/ClickHouse/ClickHouse/pull/36496) ([Mikhail f. Shiryaev](https://github.com/Felixoid)).
|
||||||
|
* Limit PowerPC code generation to Power8 for better compatibility. This closes [#36025](https://github.com/ClickHouse/ClickHouse/issues/36025). [#36529](https://github.com/ClickHouse/ClickHouse/pull/36529) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* - More robust handling of unknown architectures in CMake. [#36614](https://github.com/ClickHouse/ClickHouse/pull/36614) ([Robert Schulze](https://github.com/rschu1ze)).
|
||||||
|
* Simplify performance test. This will give a chance for us to use it. [#36769](https://github.com/ClickHouse/ClickHouse/pull/36769) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* Fix checking for rabbitmq liveness in tests. Fixed incorrect import. [#36938](https://github.com/ClickHouse/ClickHouse/pull/36938) ([tchepavel](https://github.com/tchepavel)).
|
||||||
|
* ClickHouse builds for `PowerPC64LE` architecture are now available in universal installation script `curl https://clickhouse.com/ | sh` and by direct link `https://builds.clickhouse.com/master/powerpc64le/clickhouse`. [#37095](https://github.com/ClickHouse/ClickHouse/pull/37095) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* - Make cmake build scripts a bit more robust. [#37169](https://github.com/ClickHouse/ClickHouse/pull/37169) ([Robert Schulze](https://github.com/rschu1ze)).
|
||||||
|
|
||||||
|
#### Bug Fix (user-visible misbehavior in official stable or prestable release)
|
||||||
|
|
||||||
|
* The ilike() function on FixedString columns could have returned wrong results (i.e. match less than it should). [#37117](https://github.com/ClickHouse/ClickHouse/pull/37117) ([Robert Schulze](https://github.com/rschu1ze)).
|
||||||
|
* Fix implicit cast for optimize_skip_unused_shards_rewrite_in. [#37153](https://github.com/ClickHouse/ClickHouse/pull/37153) ([Azat Khuzhin](https://github.com/azat)).
|
||||||
|
* Enable `enable_global_with_statement` for subqueries, close [#37141](https://github.com/ClickHouse/ClickHouse/issues/37141). [#37166](https://github.com/ClickHouse/ClickHouse/pull/37166) ([Vladimir C](https://github.com/vdimir)).
|
||||||
|
* Now WindowView `WATCH EVENTS` query will not be terminated due to the nonempty Chunk created in `WindowViewSource.h:58`. [#37182](https://github.com/ClickHouse/ClickHouse/pull/37182) ([vxider](https://github.com/Vxider)).
|
||||||
|
* Fix "Cannot create column of type Set" for distributed queries with LIMIT BY. [#37193](https://github.com/ClickHouse/ClickHouse/pull/37193) ([Azat Khuzhin](https://github.com/azat)).
|
||||||
|
* Fix possible overflow during `OvercommitRatio` comparison. cc @tavplubix. [#37197](https://github.com/ClickHouse/ClickHouse/pull/37197) ([Dmitry Novik](https://github.com/novikd)).
|
||||||
|
* Update `max_fired_watermark ` after blocks **actually** fired, in case delete data that hasn't been fired yet. [#37225](https://github.com/ClickHouse/ClickHouse/pull/37225) ([vxider](https://github.com/Vxider)).
|
||||||
|
* Kafka does not need `group.id` on producer stage. In console log you can find Warning that describe this issue: ``` 2022.05.15 17:59:13.270227 [ 137 ] {} <Warning> StorageKafka (topic-name): [rdk:CONFWARN] [thrd:app]: Configuration property group.id is a consumer property and will be ignored by this producer instance ```. [#37228](https://github.com/ClickHouse/ClickHouse/pull/37228) ([Mark Andreev](https://github.com/mrk-andreev)).
|
||||||
|
* fix MySQL database engine to compatible with binary(0) dataType. [#37232](https://github.com/ClickHouse/ClickHouse/pull/37232) ([zzsmdfj](https://github.com/zzsmdfj)).
|
||||||
|
* Fix execution of mutations in tables, in which there exist columns of type `Object`. Using subcolumns of type `Object` in `WHERE` expression of `UPDATE` or `DELETE` queries is now allowed yet, as well as manipulating (`DROP`, `MODIFY`) of separate subcolumns. Fixes [#37205](https://github.com/ClickHouse/ClickHouse/issues/37205). [#37266](https://github.com/ClickHouse/ClickHouse/pull/37266) ([Anton Popov](https://github.com/CurtizJ)).
|
||||||
|
* Fix Nullable(String) to Nullable(Bool/IPv4/IPv6) conversion Closes [#37221](https://github.com/ClickHouse/ClickHouse/issues/37221). [#37270](https://github.com/ClickHouse/ClickHouse/pull/37270) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||||
|
* Fix system.opentelemetry_span_log attribute.values alias to values instead of keys. [#37275](https://github.com/ClickHouse/ClickHouse/pull/37275) ([Aleksandr Razumov](https://github.com/ernado)).
|
||||||
|
* Fix possible deadlock in OvercommitTracker during logging. cc @alesapin @tavplubix Fixes [#37272](https://github.com/ClickHouse/ClickHouse/issues/37272). [#37299](https://github.com/ClickHouse/ClickHouse/pull/37299) ([Dmitry Novik](https://github.com/novikd)).
|
||||||
|
|
||||||
|
#### Bug Fix (user-visible misbehaviour in official stable or prestable release)
|
||||||
|
|
||||||
|
* - fix substring function range error length when `offset` and `length` is negative constant and `s` is not constant. [#33861](https://github.com/ClickHouse/ClickHouse/pull/33861) ([RogerYK](https://github.com/RogerYK)).
|
||||||
|
* Accidentally ZSTD support for Arrow was not being built. This fixes [#35283](https://github.com/ClickHouse/ClickHouse/issues/35283). [#35486](https://github.com/ClickHouse/ClickHouse/pull/35486) ([Sean Lafferty](https://github.com/seanlaff)).
|
||||||
|
* Fix ALTER DROP COLUMN of nested column with compact parts (i.e. `ALTER TABLE x DROP COLUMN n`, when there is column `n.d`). [#35797](https://github.com/ClickHouse/ClickHouse/pull/35797) ([Azat Khuzhin](https://github.com/azat)).
|
||||||
|
* Fix insertion of complex JSONs with nested arrays to columns of type `Object`. [#36077](https://github.com/ClickHouse/ClickHouse/pull/36077) ([Anton Popov](https://github.com/CurtizJ)).
|
||||||
|
* Queries with aliases inside special operators returned parsing error (was broken in 22.1). Example: `SELECT substring('test' AS t, 1, 1)`. [#36167](https://github.com/ClickHouse/ClickHouse/pull/36167) ([Maksim Kita](https://github.com/kitaisreal)).
|
||||||
|
* - Fix assertion in JOIN, close [#36199](https://github.com/ClickHouse/ClickHouse/issues/36199). [#36201](https://github.com/ClickHouse/ClickHouse/pull/36201) ([Vladimir C](https://github.com/vdimir)).
|
||||||
|
* Fix dictionary reload for `ClickHouseDictionarySource` if it contains scalar subqueries. [#36390](https://github.com/ClickHouse/ClickHouse/pull/36390) ([lthaooo](https://github.com/lthaooo)).
|
||||||
|
* Fix nullptr dereference in JOIN and COLUMNS matcher. This fixes [#36416](https://github.com/ClickHouse/ClickHouse/issues/36416) . This is for https://github.com/ClickHouse/ClickHouse/pull/36417. [#36430](https://github.com/ClickHouse/ClickHouse/pull/36430) ([Amos Bird](https://github.com/amosbird)).
|
||||||
|
* Fix bug in s3Cluster schema inference that let to the fact that not all data was read in the select from s3Cluster. The bug appeared in https://github.com/ClickHouse/ClickHouse/pull/35544. [#36434](https://github.com/ClickHouse/ClickHouse/pull/36434) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||||
|
* Server might fail to start if it cannot resolve hostname of external ClickHouse dictionary. It's fixed. Fixes [#36451](https://github.com/ClickHouse/ClickHouse/issues/36451). [#36463](https://github.com/ClickHouse/ClickHouse/pull/36463) ([Alexander Tokmakov](https://github.com/tavplubix)).
|
||||||
|
* This code segment can prove bug. ``` int main() { RangeGenerator g{1230, 100}; std::cout << g.totalRanges() << std::endl; int count = 0; while(g.nextRange()) ++count; std::cout << "count:" << count << std::endl; return 0; }. [#36469](https://github.com/ClickHouse/ClickHouse/pull/36469) ([李扬](https://github.com/taiyang-li)).
|
||||||
|
* Fix clickhouse-benchmark json report results. [#36473](https://github.com/ClickHouse/ClickHouse/pull/36473) ([Tian Xinhui](https://github.com/xinhuitian)).
|
||||||
|
* Add missing enum values in system.session_log table. Closes [#36474](https://github.com/ClickHouse/ClickHouse/issues/36474). [#36480](https://github.com/ClickHouse/ClickHouse/pull/36480) ([Memo](https://github.com/Joeywzr)).
|
||||||
|
* Fix possible exception with unknown packet from server in client. [#36481](https://github.com/ClickHouse/ClickHouse/pull/36481) ([Kseniia Sumarokova](https://github.com/kssenii)).
|
||||||
|
* Fix usage of executable user defined functions in GROUP BY. Before executable user defined functions cannot be used as expressions in GROUP BY. Closes [#36448](https://github.com/ClickHouse/ClickHouse/issues/36448). [#36486](https://github.com/ClickHouse/ClickHouse/pull/36486) ([Maksim Kita](https://github.com/kitaisreal)).
|
||||||
|
* close [#33906](https://github.com/ClickHouse/ClickHouse/issues/33906). [#36489](https://github.com/ClickHouse/ClickHouse/pull/36489) ([awakeljw](https://github.com/awakeljw)).
|
||||||
|
* Fix hostname sanity checks for Keeper cluster configuration. Add `keeper_server.host_checks_enabled` config to enable/disable those checks. [#36492](https://github.com/ClickHouse/ClickHouse/pull/36492) ([Antonio Andelic](https://github.com/antonio2368)).
|
||||||
|
* Fix offset update ReadBufferFromEncryptedFile, which could cause undefined behaviour. [#36493](https://github.com/ClickHouse/ClickHouse/pull/36493) ([Kseniia Sumarokova](https://github.com/kssenii)).
|
||||||
|
* - Fix potential error with literals in `WHERE` for join queries. Close [#36279](https://github.com/ClickHouse/ClickHouse/issues/36279). [#36542](https://github.com/ClickHouse/ClickHouse/pull/36542) ([Vladimir C](https://github.com/vdimir)).
|
||||||
|
* Fix `Missing column` exception which could happen while using `INTERPOLATE` with `ENGINE = MergeTree` table. [#36549](https://github.com/ClickHouse/ClickHouse/pull/36549) ([Yakov Olkhovskiy](https://github.com/yakov-olkhovskiy)).
|
||||||
|
* Fix format crash when default expression follow EPHEMERAL not literal. Closes [#36618](https://github.com/ClickHouse/ClickHouse/issues/36618). [#36633](https://github.com/ClickHouse/ClickHouse/pull/36633) ([flynn](https://github.com/ucasfl)).
|
||||||
|
* Fix merges of wide parts with type `Object`. [#36637](https://github.com/ClickHouse/ClickHouse/pull/36637) ([Anton Popov](https://github.com/CurtizJ)).
|
||||||
|
* Fixed parsing of query settings in `CREATE` query when engine is not specified. Fixes https://github.com/ClickHouse/ClickHouse/pull/34187#issuecomment-1103812419. [#36642](https://github.com/ClickHouse/ClickHouse/pull/36642) ([Alexander Tokmakov](https://github.com/tavplubix)).
|
||||||
|
* Fix possible heap-use-after-free in schema inference. Closes [#36661](https://github.com/ClickHouse/ClickHouse/issues/36661). [#36679](https://github.com/ClickHouse/ClickHouse/pull/36679) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||||
|
* Fix server restart if cache configuration changed. [#36685](https://github.com/ClickHouse/ClickHouse/pull/36685) ([Kseniia Sumarokova](https://github.com/kssenii)).
|
||||||
|
* In the previous [PR](https://github.com/ClickHouse/ClickHouse/pull/36376), I found that testing **(stateless tests, flaky check (address, actions))** is timeout. Moreover, testing locally can also trigger unstable system deadlocks. This problem still exists when using the latest source code of master. [#36697](https://github.com/ClickHouse/ClickHouse/pull/36697) ([Han Shukai](https://github.com/KinderRiven)).
|
||||||
|
* Fix server reload on port change (do not wait for current connections from query context). [#36700](https://github.com/ClickHouse/ClickHouse/pull/36700) ([Azat Khuzhin](https://github.com/azat)).
|
||||||
|
* Fix vertical merges in wide parts. Previously an exception `There is no column` can be thrown during merge. [#36707](https://github.com/ClickHouse/ClickHouse/pull/36707) ([Anton Popov](https://github.com/CurtizJ)).
|
||||||
|
* During the [test](https://s3.amazonaws.com/clickhouse-test-reports/36376/1cb1c7275cb53769ab826772db9b71361bb3e413/stress_test__thread__actions_/clickhouse-server.clean.log) in [PR](https://github.com/ClickHouse/ClickHouse/pull/36376), I found that the one cache class was initialized twice, it throws a exception. Although the cause of this problem is not clear, there should be code logic of repeatedly loading disk in ClickHouse, so we need to make special judgment for this situation. [#36737](https://github.com/ClickHouse/ClickHouse/pull/36737) ([Han Shukai](https://github.com/KinderRiven)).
|
||||||
|
* Fix a bug of `groupBitmapAndState`/`groupBitmapOrState`/`groupBitmapXorState` on distributed table. [#36739](https://github.com/ClickHouse/ClickHouse/pull/36739) ([Zhang Yifan](https://github.com/zhangyifan27)).
|
||||||
|
* Fix timeouts in Hedged requests. Connection hang right after sending remote query could lead to eternal waiting. [#36749](https://github.com/ClickHouse/ClickHouse/pull/36749) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||||
|
* Fix insertion to columns of type `Object` from multiple files, e.g. via table function `file` with globs. [#36762](https://github.com/ClickHouse/ClickHouse/pull/36762) ([Anton Popov](https://github.com/CurtizJ)).
|
||||||
|
* Fix some issues with async reads from remote filesystem which happened when reading low cardinality. [#36763](https://github.com/ClickHouse/ClickHouse/pull/36763) ([Kseniia Sumarokova](https://github.com/kssenii)).
|
||||||
|
* Fix creation of tables with `flatten_nested = 0`. Previously unflattened `Nested` columns could be flattened after server restart. [#36803](https://github.com/ClickHouse/ClickHouse/pull/36803) ([Anton Popov](https://github.com/CurtizJ)).
|
||||||
|
* Fix incorrect cast in cached buffer from remote fs. [#36809](https://github.com/ClickHouse/ClickHouse/pull/36809) ([Kseniia Sumarokova](https://github.com/kssenii)).
|
||||||
|
* Remove function `groupArraySorted` which has a bug. [#36822](https://github.com/ClickHouse/ClickHouse/pull/36822) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* Fix fire in window view with hop window [#34044](https://github.com/ClickHouse/ClickHouse/issues/34044). [#36861](https://github.com/ClickHouse/ClickHouse/pull/36861) ([vxider](https://github.com/Vxider)).
|
||||||
|
* Fix `current_size` count in cache. [#36887](https://github.com/ClickHouse/ClickHouse/pull/36887) ([Kseniia Sumarokova](https://github.com/kssenii)).
|
||||||
|
* Fix incorrect query result when doing constant aggregation. This fixes [#36728](https://github.com/ClickHouse/ClickHouse/issues/36728) . [#36888](https://github.com/ClickHouse/ClickHouse/pull/36888) ([Amos Bird](https://github.com/amosbird)).
|
||||||
|
* Fix bug in clickhouse-keeper which can lead to corrupted compressed log files in case of small load and restarts. [#36910](https://github.com/ClickHouse/ClickHouse/pull/36910) ([alesapin](https://github.com/alesapin)).
|
||||||
|
* Fix bugs when using multiple columns in WindowView by adding converting actions to make it possible to call`writeIntoWindowView` with a slightly different schema. [#36928](https://github.com/ClickHouse/ClickHouse/pull/36928) ([vxider](https://github.com/Vxider)).
|
||||||
|
* Fix issue: [#36671](https://github.com/ClickHouse/ClickHouse/issues/36671). [#36929](https://github.com/ClickHouse/ClickHouse/pull/36929) ([李扬](https://github.com/taiyang-li)).
|
||||||
|
* Fix stuck when dropping source table in WindowView. Closes [#35678](https://github.com/ClickHouse/ClickHouse/issues/35678). [#36967](https://github.com/ClickHouse/ClickHouse/pull/36967) ([vxider](https://github.com/Vxider)).
|
||||||
|
* Fixed logical error on `TRUNCATE` query in `Replicated` database. Fixes [#33747](https://github.com/ClickHouse/ClickHouse/issues/33747). [#36976](https://github.com/ClickHouse/ClickHouse/pull/36976) ([Alexander Tokmakov](https://github.com/tavplubix)).
|
||||||
|
* Fix sending external tables data in HedgedConnections with max_parallel_replicas != 1. [#36981](https://github.com/ClickHouse/ClickHouse/pull/36981) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||||
|
* Fixed problem with infs in `quantileTDigest`. Fixes [#32107](https://github.com/ClickHouse/ClickHouse/issues/32107). [#37021](https://github.com/ClickHouse/ClickHouse/pull/37021) ([Vladimir Chebotarev](https://github.com/excitoon)).
|
||||||
|
* Fix LowCardinality->ArrowDictionary invalid output when type of indexes is not UInt8. Closes [#36832](https://github.com/ClickHouse/ClickHouse/issues/36832). [#37043](https://github.com/ClickHouse/ClickHouse/pull/37043) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||||
|
* Fix in-order `GROUP BY` (`optimize_aggregation_in_order=1`) with `*Array` (`groupArrayArray`/...) aggregate functions. [#37046](https://github.com/ClickHouse/ClickHouse/pull/37046) ([Azat Khuzhin](https://github.com/azat)).
|
||||||
|
* Fixed performance degradation of some INSERT SELECT queries with implicit aggregation. Fixes [#36792](https://github.com/ClickHouse/ClickHouse/issues/36792). [#37047](https://github.com/ClickHouse/ClickHouse/pull/37047) ([Alexander Tokmakov](https://github.com/tavplubix)).
|
||||||
|
* Fix optimize_aggregation_in_order with prefix GROUP BY and *Array aggregate functions. [#37050](https://github.com/ClickHouse/ClickHouse/pull/37050) ([Azat Khuzhin](https://github.com/azat)).
|
||||||
|
|
||||||
|
#### NO CL ENTRY
|
||||||
|
|
||||||
|
* NO CL ENTRY: 'Revert "Minor refactor to prefer C++ Standard Algorithms"'. [#36511](https://github.com/ClickHouse/ClickHouse/pull/36511) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* NO CL ENTRY: 'Revert "Strict taskstats parser"'. [#36591](https://github.com/ClickHouse/ClickHouse/pull/36591) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* NO CL ENTRY: 'Revert "Translate docs/zh/sql-reference/data-types/map.md"'. [#36594](https://github.com/ClickHouse/ClickHouse/pull/36594) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* NO CL ENTRY: 'Revert "Update setting.md"'. [#36595](https://github.com/ClickHouse/ClickHouse/pull/36595) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* NO CL ENTRY: 'Documentation: Add a missing **ESTIMATE** in explain syntax'. [#36717](https://github.com/ClickHouse/ClickHouse/pull/36717) ([小蝌蚪](https://github.com/kayhaw)).
|
||||||
|
* NO CL ENTRY: '[Snyk] Security upgrade numpy from 1.16.6 to 1.22.2'. [#36729](https://github.com/ClickHouse/ClickHouse/pull/36729) ([Mikhail f. Shiryaev](https://github.com/Felixoid)).
|
||||||
|
* NO CL ENTRY: 'Translate playground.md to Chinese'. [#36821](https://github.com/ClickHouse/ClickHouse/pull/36821) ([小蝌蚪](https://github.com/kayhaw)).
|
||||||
|
* NO CL ENTRY: 'Revert "Memory overcommit: continue query execution if memory is available"'. [#36858](https://github.com/ClickHouse/ClickHouse/pull/36858) ([alesapin](https://github.com/alesapin)).
|
||||||
|
* NO CL ENTRY: 'Revert "Revert "Memory overcommit: continue query execution if memory is available""'. [#36859](https://github.com/ClickHouse/ClickHouse/pull/36859) ([Dmitry Novik](https://github.com/novikd)).
|
||||||
|
* NO CL ENTRY: 'Revert "BLAKE3 hash function documentation"'. [#37092](https://github.com/ClickHouse/ClickHouse/pull/37092) ([Rich Raposa](https://github.com/rfraposa)).
|
||||||
|
* NO CL ENTRY: 'Revert "Remove height restrictions from the query div in play web tool."'. [#37261](https://github.com/ClickHouse/ClickHouse/pull/37261) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
|
@ -54,6 +54,7 @@ To build using Homebrew's vanilla Clang compiler (the only **recommended** way):
|
|||||||
``` bash
|
``` bash
|
||||||
cd ClickHouse
|
cd ClickHouse
|
||||||
mkdir build
|
mkdir build
|
||||||
|
export PATH=$(brew --prefix llvm)/bin:$PATH
|
||||||
export CC=$(brew --prefix llvm)/bin/clang
|
export CC=$(brew --prefix llvm)/bin/clang
|
||||||
export CXX=$(brew --prefix llvm)/bin/clang++
|
export CXX=$(brew --prefix llvm)/bin/clang++
|
||||||
cmake -G Ninja -DCMAKE_BUILD_TYPE=RelWithDebInfo -S . -B build
|
cmake -G Ninja -DCMAKE_BUILD_TYPE=RelWithDebInfo -S . -B build
|
||||||
@ -79,6 +80,8 @@ To build using Homebrew's vanilla GCC compiler (this option is only for developm
|
|||||||
``` bash
|
``` bash
|
||||||
cd ClickHouse
|
cd ClickHouse
|
||||||
mkdir build
|
mkdir build
|
||||||
|
export PATH=$(brew --prefix binutils)/bin:$PATH
|
||||||
|
export PATH=$(brew --prefix gcc)/bin:$PATH
|
||||||
export CC=$(brew --prefix gcc)/bin/gcc-11
|
export CC=$(brew --prefix gcc)/bin/gcc-11
|
||||||
export CXX=$(brew --prefix gcc)/bin/g++-11
|
export CXX=$(brew --prefix gcc)/bin/g++-11
|
||||||
cmake -G Ninja -DCMAKE_BUILD_TYPE=RelWithDebInfo -S . -B build
|
cmake -G Ninja -DCMAKE_BUILD_TYPE=RelWithDebInfo -S . -B build
|
||||||
|
@ -19,7 +19,7 @@ The following tutorial is based on the Ubuntu Linux system. With appropriate cha
|
|||||||
### Install Git, CMake, Python and Ninja {#install-git-cmake-python-and-ninja}
|
### Install Git, CMake, Python and Ninja {#install-git-cmake-python-and-ninja}
|
||||||
|
|
||||||
``` bash
|
``` bash
|
||||||
$ sudo apt-get install git cmake python ninja-build
|
sudo apt-get install git cmake python ninja-build
|
||||||
```
|
```
|
||||||
|
|
||||||
Or cmake3 instead of cmake on older systems.
|
Or cmake3 instead of cmake on older systems.
|
||||||
@ -37,8 +37,8 @@ For other Linux distribution - check the availability of the [prebuild packages]
|
|||||||
#### Use the latest clang for Builds
|
#### Use the latest clang for Builds
|
||||||
|
|
||||||
``` bash
|
``` bash
|
||||||
$ export CC=clang-14
|
export CC=clang-14
|
||||||
$ export CXX=clang++-14
|
export CXX=clang++-14
|
||||||
```
|
```
|
||||||
|
|
||||||
In this example we use version 14 that is the latest as of Feb 2022.
|
In this example we use version 14 that is the latest as of Feb 2022.
|
||||||
@ -48,23 +48,23 @@ Gcc can also be used though it is discouraged.
|
|||||||
### Checkout ClickHouse Sources {#checkout-clickhouse-sources}
|
### Checkout ClickHouse Sources {#checkout-clickhouse-sources}
|
||||||
|
|
||||||
``` bash
|
``` bash
|
||||||
$ git clone --recursive git@github.com:ClickHouse/ClickHouse.git
|
git clone --recursive git@github.com:ClickHouse/ClickHouse.git
|
||||||
```
|
```
|
||||||
|
|
||||||
or
|
or
|
||||||
|
|
||||||
``` bash
|
``` bash
|
||||||
$ git clone --recursive https://github.com/ClickHouse/ClickHouse.git
|
git clone --recursive https://github.com/ClickHouse/ClickHouse.git
|
||||||
```
|
```
|
||||||
|
|
||||||
### Build ClickHouse {#build-clickhouse}
|
### Build ClickHouse {#build-clickhouse}
|
||||||
|
|
||||||
``` bash
|
``` bash
|
||||||
$ cd ClickHouse
|
cd ClickHouse
|
||||||
$ mkdir build
|
mkdir build
|
||||||
$ cd build
|
cd build
|
||||||
$ cmake ..
|
cmake ..
|
||||||
$ ninja
|
ninja
|
||||||
```
|
```
|
||||||
|
|
||||||
To create an executable, run `ninja clickhouse`.
|
To create an executable, run `ninja clickhouse`.
|
||||||
@ -114,13 +114,13 @@ make -j $(nproc)
|
|||||||
Here is an example of how to build `clang` and all the llvm infrastructure from sources:
|
Here is an example of how to build `clang` and all the llvm infrastructure from sources:
|
||||||
|
|
||||||
```
|
```
|
||||||
git clone git@github.com:llvm/llvm-project.git
|
git clone git@github.com:llvm/llvm-project.git
|
||||||
mkdir llvm-build && cd llvm-build
|
mkdir llvm-build && cd llvm-build
|
||||||
cmake -DCMAKE_BUILD_TYPE:STRING=Release -DLLVM_ENABLE_PROJECTS=all ../llvm-project/llvm/
|
cmake -DCMAKE_BUILD_TYPE:STRING=Release -DLLVM_ENABLE_PROJECTS=all ../llvm-project/llvm/
|
||||||
make -j16
|
make -j16
|
||||||
sudo make install
|
sudo make install
|
||||||
hash clang
|
hash clang
|
||||||
clang --version
|
clang --version
|
||||||
```
|
```
|
||||||
|
|
||||||
You can install the older clang like clang-11 from packages and then use it to build the new clang from sources.
|
You can install the older clang like clang-11 from packages and then use it to build the new clang from sources.
|
||||||
@ -140,21 +140,21 @@ hash cmake
|
|||||||
### Install Git {#install-git}
|
### Install Git {#install-git}
|
||||||
|
|
||||||
``` bash
|
``` bash
|
||||||
$ sudo apt-get update
|
sudo apt-get update
|
||||||
$ sudo apt-get install git python debhelper lsb-release fakeroot sudo debian-archive-keyring debian-keyring
|
sudo apt-get install git python debhelper lsb-release fakeroot sudo debian-archive-keyring debian-keyring
|
||||||
```
|
```
|
||||||
|
|
||||||
### Checkout ClickHouse Sources {#checkout-clickhouse-sources-1}
|
### Checkout ClickHouse Sources {#checkout-clickhouse-sources-1}
|
||||||
|
|
||||||
``` bash
|
``` bash
|
||||||
$ git clone --recursive --branch master https://github.com/ClickHouse/ClickHouse.git
|
git clone --recursive --branch master https://github.com/ClickHouse/ClickHouse.git
|
||||||
$ cd ClickHouse
|
cd ClickHouse
|
||||||
```
|
```
|
||||||
|
|
||||||
### Run Release Script {#run-release-script}
|
### Run Release Script {#run-release-script}
|
||||||
|
|
||||||
``` bash
|
``` bash
|
||||||
$ ./release
|
./release
|
||||||
```
|
```
|
||||||
|
|
||||||
## You Don’t Have to Build ClickHouse {#you-dont-have-to-build-clickhouse}
|
## You Don’t Have to Build ClickHouse {#you-dont-have-to-build-clickhouse}
|
||||||
|
@ -13,11 +13,6 @@ cmake .. \
|
|||||||
-DCMAKE_C_COMPILER=$(which clang-13) \
|
-DCMAKE_C_COMPILER=$(which clang-13) \
|
||||||
-DCMAKE_CXX_COMPILER=$(which clang++-13) \
|
-DCMAKE_CXX_COMPILER=$(which clang++-13) \
|
||||||
-DCMAKE_BUILD_TYPE=Debug \
|
-DCMAKE_BUILD_TYPE=Debug \
|
||||||
-DENABLE_CLICKHOUSE_ALL=OFF \
|
|
||||||
-DENABLE_CLICKHOUSE_SERVER=ON \
|
|
||||||
-DENABLE_CLICKHOUSE_CLIENT=ON \
|
|
||||||
-DENABLE_LIBRARIES=OFF \
|
|
||||||
-DUSE_UNWIND=ON \
|
|
||||||
-DENABLE_UTILS=OFF \
|
-DENABLE_UTILS=OFF \
|
||||||
-DENABLE_TESTS=OFF
|
-DENABLE_TESTS=OFF
|
||||||
```
|
```
|
||||||
@ -300,6 +295,12 @@ Note that ClickHouse uses forks of these libraries, see https://github.com/Click
|
|||||||
<td>Take care to add prlimit in command line before ccache, or else ccache thinks that prlimit is compiler, and clang++ is its input file, and refuses to work with multiple inputs, e.g in ccache log: [2021-03-31T18:06:32.655327 36900] Command line: /usr/bin/ccache prlimit --as=10000000000 --data=5000000000 --cpu=600 /usr/bin/clang++-11 - ...... std=gnu++2a -MD -MT src/CMakeFiles/dbms.dir/Storages/MergeTree/IMergeTreeDataPart.cpp.o -MF src/CMakeFiles/dbms.dir/Storages/MergeTree/IMergeTreeDataPart.cpp.o.d -o src/CMakeFiles/dbms.dir/Storages/MergeTree/IMergeTreeDataPart.cpp.o -c ../src/Storages/MergeTree/IMergeTreeDataPart.cpp [2021-03-31T18:06:32.656704 36900] Multiple input files: /usr/bin/clang++-11 and ../src/Storages/MergeTree/IMergeTreeDataPart.cpp Another way would be to use --ccache-skip option before clang++-11 to make ccache ignore it.</td>
|
<td>Take care to add prlimit in command line before ccache, or else ccache thinks that prlimit is compiler, and clang++ is its input file, and refuses to work with multiple inputs, e.g in ccache log: [2021-03-31T18:06:32.655327 36900] Command line: /usr/bin/ccache prlimit --as=10000000000 --data=5000000000 --cpu=600 /usr/bin/clang++-11 - ...... std=gnu++2a -MD -MT src/CMakeFiles/dbms.dir/Storages/MergeTree/IMergeTreeDataPart.cpp.o -MF src/CMakeFiles/dbms.dir/Storages/MergeTree/IMergeTreeDataPart.cpp.o.d -o src/CMakeFiles/dbms.dir/Storages/MergeTree/IMergeTreeDataPart.cpp.o -c ../src/Storages/MergeTree/IMergeTreeDataPart.cpp [2021-03-31T18:06:32.656704 36900] Multiple input files: /usr/bin/clang++-11 and ../src/Storages/MergeTree/IMergeTreeDataPart.cpp Another way would be to use --ccache-skip option before clang++-11 to make ccache ignore it.</td>
|
||||||
</tr>
|
</tr>
|
||||||
<tr>
|
<tr>
|
||||||
|
<td><a name="enable-colored-build"></a><a href="https://github.com/clickhouse/clickhouse/blob/master/CMakeLists.txt#L160" rel="external nofollow noreferrer" target="_blank"><code class="syntax">ENABLE_COLORED_BUILD</code></a></td>
|
||||||
|
<td><code class="syntax">ON</code></td>
|
||||||
|
<td>Enable colored diagnostics in build log.</td>
|
||||||
|
<td></td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
<td><a name="enable-examples"></a><a href="https://github.com/clickhouse/clickhouse/blob/master/CMakeLists.txt#L201" rel="external nofollow noreferrer" target="_blank"><code class="syntax">ENABLE_EXAMPLES</code></a></td>
|
<td><a name="enable-examples"></a><a href="https://github.com/clickhouse/clickhouse/blob/master/CMakeLists.txt#L201" rel="external nofollow noreferrer" target="_blank"><code class="syntax">ENABLE_EXAMPLES</code></a></td>
|
||||||
<td><code class="syntax">OFF</code></td>
|
<td><code class="syntax">OFF</code></td>
|
||||||
<td>Build all example programs in 'examples' subdirectories</td>
|
<td>Build all example programs in 'examples' subdirectories</td>
|
||||||
@ -414,12 +415,6 @@ Note that ClickHouse uses forks of these libraries, see https://github.com/Click
|
|||||||
<td>Using system libs can cause a lot of warnings in includes (on macro expansion).</td>
|
<td>Using system libs can cause a lot of warnings in includes (on macro expansion).</td>
|
||||||
</tr>
|
</tr>
|
||||||
<tr>
|
<tr>
|
||||||
<td><a name="weverything"></a><a href="https://github.com/clickhouse/clickhouse/blob/master/cmake/warnings.cmake#L15" rel="external nofollow noreferrer" target="_blank"><code class="syntax">WEVERYTHING</code></a></td>
|
|
||||||
<td><code class="syntax">ON</code></td>
|
|
||||||
<td>Enable -Weverything option with some exceptions.</td>
|
|
||||||
<td>Add some warnings that are not available even with -Wall -Wextra -Wpedantic. Intended for exploration of new compiler warnings that may be found useful. Applies to clang only</td>
|
|
||||||
</tr>
|
|
||||||
<tr>
|
|
||||||
<td><a name="with-coverage"></a><a href="https://github.com/clickhouse/clickhouse/blob/master/CMakeLists.txt#L344" rel="external nofollow noreferrer" target="_blank"><code class="syntax">WITH_COVERAGE</code></a></td>
|
<td><a name="with-coverage"></a><a href="https://github.com/clickhouse/clickhouse/blob/master/CMakeLists.txt#L344" rel="external nofollow noreferrer" target="_blank"><code class="syntax">WITH_COVERAGE</code></a></td>
|
||||||
<td><code class="syntax">OFF</code></td>
|
<td><code class="syntax">OFF</code></td>
|
||||||
<td>Profile the resulting binary/binaries</td>
|
<td>Profile the resulting binary/binaries</td>
|
||||||
|
@ -12,7 +12,7 @@ Functional tests are the most simple and convenient to use. Most of ClickHouse f
|
|||||||
|
|
||||||
Each functional test sends one or multiple queries to the running ClickHouse server and compares the result with reference.
|
Each functional test sends one or multiple queries to the running ClickHouse server and compares the result with reference.
|
||||||
|
|
||||||
Tests are located in `queries` directory. There are two subdirectories: `stateless` and `stateful`. Stateless tests run queries without any preloaded test data - they often create small synthetic datasets on the fly, within the test itself. Stateful tests require preloaded test data from CLickHouse and it is available to general public.
|
Tests are located in `queries` directory. There are two subdirectories: `stateless` and `stateful`. Stateless tests run queries without any preloaded test data - they often create small synthetic datasets on the fly, within the test itself. Stateful tests require preloaded test data from ClickHouse and it is available to general public.
|
||||||
|
|
||||||
Each test can be one of two types: `.sql` and `.sh`. `.sql` test is the simple SQL script that is piped to `clickhouse-client --multiquery`. `.sh` test is a script that is run by itself. SQL tests are generally preferable to `.sh` tests. You should use `.sh` tests only when you have to test some feature that cannot be exercised from pure SQL, such as piping some input data into `clickhouse-client` or testing `clickhouse-local`.
|
Each test can be one of two types: `.sql` and `.sh`. `.sql` test is the simple SQL script that is piped to `clickhouse-client --multiquery`. `.sh` test is a script that is run by itself. SQL tests are generally preferable to `.sh` tests. You should use `.sh` tests only when you have to test some feature that cannot be exercised from pure SQL, such as piping some input data into `clickhouse-client` or testing `clickhouse-local`.
|
||||||
|
|
||||||
|
@ -189,6 +189,8 @@ Example:
|
|||||||
- `_timestamp` — Timestamp of the message.
|
- `_timestamp` — Timestamp of the message.
|
||||||
- `_timestamp_ms` — Timestamp in milliseconds of the message.
|
- `_timestamp_ms` — Timestamp in milliseconds of the message.
|
||||||
- `_partition` — Partition of Kafka topic.
|
- `_partition` — Partition of Kafka topic.
|
||||||
|
- `_headers.name` — Array of message's headers keys.
|
||||||
|
- `_headers.value` — Array of message's headers values.
|
||||||
|
|
||||||
**See Also**
|
**See Also**
|
||||||
|
|
||||||
|
@ -669,6 +669,7 @@ Storage policies configuration markup:
|
|||||||
<volume_name_1>
|
<volume_name_1>
|
||||||
<disk>disk_name_from_disks_configuration</disk>
|
<disk>disk_name_from_disks_configuration</disk>
|
||||||
<max_data_part_size_bytes>1073741824</max_data_part_size_bytes>
|
<max_data_part_size_bytes>1073741824</max_data_part_size_bytes>
|
||||||
|
<load_balancing>round_robin</load_balancing>
|
||||||
</volume_name_1>
|
</volume_name_1>
|
||||||
<volume_name_2>
|
<volume_name_2>
|
||||||
<!-- configuration -->
|
<!-- configuration -->
|
||||||
@ -695,6 +696,8 @@ Tags:
|
|||||||
- `max_data_part_size_bytes` — the maximum size of a part that can be stored on any of the volume’s disks. If the a size of a merged part estimated to be bigger than `max_data_part_size_bytes` then this part will be written to a next volume. Basically this feature allows to keep new/small parts on a hot (SSD) volume and move them to a cold (HDD) volume when they reach large size. Do not use this setting if your policy has only one volume.
|
- `max_data_part_size_bytes` — the maximum size of a part that can be stored on any of the volume’s disks. If the a size of a merged part estimated to be bigger than `max_data_part_size_bytes` then this part will be written to a next volume. Basically this feature allows to keep new/small parts on a hot (SSD) volume and move them to a cold (HDD) volume when they reach large size. Do not use this setting if your policy has only one volume.
|
||||||
- `move_factor` — when the amount of available space gets lower than this factor, data automatically starts to move on the next volume if any (by default, 0.1). ClickHouse sorts existing parts by size from largest to smallest (in descending order) and selects parts with the total size that is sufficient to meet the `move_factor` condition. If the total size of all parts is insufficient, all parts will be moved.
|
- `move_factor` — when the amount of available space gets lower than this factor, data automatically starts to move on the next volume if any (by default, 0.1). ClickHouse sorts existing parts by size from largest to smallest (in descending order) and selects parts with the total size that is sufficient to meet the `move_factor` condition. If the total size of all parts is insufficient, all parts will be moved.
|
||||||
- `prefer_not_to_merge` — Disables merging of data parts on this volume. When this setting is enabled, merging data on this volume is not allowed. This allows controlling how ClickHouse works with slow disks.
|
- `prefer_not_to_merge` — Disables merging of data parts on this volume. When this setting is enabled, merging data on this volume is not allowed. This allows controlling how ClickHouse works with slow disks.
|
||||||
|
- `perform_ttl_move_on_insert` — Disables TTL move on data part INSERT. By default if we insert a data part that already expired by the TTL move rule it immediately goes to a volume/disk declared in move rule. This can significantly slowdown insert in case if destination volume/disk is slow (e.g. S3).
|
||||||
|
- `load_balancing` - Policy for disk balancing, `round_robin` or `least_used`.
|
||||||
|
|
||||||
Cofiguration examples:
|
Cofiguration examples:
|
||||||
|
|
||||||
@ -724,7 +727,7 @@ Cofiguration examples:
|
|||||||
<move_factor>0.2</move_factor>
|
<move_factor>0.2</move_factor>
|
||||||
</moving_from_ssd_to_hdd>
|
</moving_from_ssd_to_hdd>
|
||||||
|
|
||||||
<small_jbod_with_external_no_merges>
|
<small_jbod_with_external_no_merges>
|
||||||
<volumes>
|
<volumes>
|
||||||
<main>
|
<main>
|
||||||
<disk>jbod1</disk>
|
<disk>jbod1</disk>
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
---
|
---
|
||||||
sidebar_label: New York Taxi Data
|
sidebar_label: New York Taxi Data
|
||||||
|
sidebar_position: 2
|
||||||
description: Data for billions of taxi and for-hire vehicle (Uber, Lyft, etc.) trips originating in New York City since 2009
|
description: Data for billions of taxi and for-hire vehicle (Uber, Lyft, etc.) trips originating in New York City since 2009
|
||||||
---
|
---
|
||||||
|
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
---
|
---
|
||||||
sidebar_label: UK Property Price Paid
|
sidebar_label: UK Property Price Paid
|
||||||
|
sidebar_position: 1
|
||||||
---
|
---
|
||||||
|
|
||||||
# UK Property Price Paid
|
# UK Property Price Paid
|
||||||
|
@ -238,7 +238,7 @@ To start the server as a daemon, run:
|
|||||||
$ sudo clickhouse start
|
$ sudo clickhouse start
|
||||||
```
|
```
|
||||||
|
|
||||||
There are also another ways to run ClickHouse:
|
There are also other ways to run ClickHouse:
|
||||||
|
|
||||||
``` bash
|
``` bash
|
||||||
$ sudo service clickhouse-server start
|
$ sudo service clickhouse-server start
|
||||||
|
@ -31,8 +31,11 @@ The supported formats are:
|
|||||||
| [JSON](#json) | ✗ | ✔ |
|
| [JSON](#json) | ✗ | ✔ |
|
||||||
| [JSONAsString](#jsonasstring) | ✔ | ✗ |
|
| [JSONAsString](#jsonasstring) | ✔ | ✗ |
|
||||||
| [JSONStrings](#jsonstrings) | ✗ | ✔ |
|
| [JSONStrings](#jsonstrings) | ✗ | ✔ |
|
||||||
|
| [JSONColumns](#jsoncolumns) | ✔ | ✔ |
|
||||||
|
| [JSONColumnsWithMetadata](#jsoncolumnswithmetadata) | ✗ | ✔ |
|
||||||
| [JSONCompact](#jsoncompact) | ✗ | ✔ |
|
| [JSONCompact](#jsoncompact) | ✗ | ✔ |
|
||||||
| [JSONCompactStrings](#jsoncompactstrings) | ✗ | ✔ |
|
| [JSONCompactStrings](#jsoncompactstrings) | ✗ | ✔ |
|
||||||
|
| [JSONCompactColumns](#jsoncompactcolumns) | ✔ | ✔ |
|
||||||
| [JSONEachRow](#jsoneachrow) | ✔ | ✔ |
|
| [JSONEachRow](#jsoneachrow) | ✔ | ✔ |
|
||||||
| [JSONEachRowWithProgress](#jsoneachrowwithprogress) | ✗ | ✔ |
|
| [JSONEachRowWithProgress](#jsoneachrowwithprogress) | ✗ | ✔ |
|
||||||
| [JSONStringsEachRow](#jsonstringseachrow) | ✔ | ✔ |
|
| [JSONStringsEachRow](#jsonstringseachrow) | ✔ | ✔ |
|
||||||
@ -400,6 +403,8 @@ Both data output and parsing are supported in this format. For parsing, any orde
|
|||||||
|
|
||||||
Parsing allows the presence of the additional field `tskv` without the equal sign or a value. This field is ignored.
|
Parsing allows the presence of the additional field `tskv` without the equal sign or a value. This field is ignored.
|
||||||
|
|
||||||
|
During import, columns with unknown names will be skipped if setting [input_format_skip_unknown_fields](../operations/settings/settings.md#settings-input-format-skip-unknown-fields) is set to 1.
|
||||||
|
|
||||||
## CSV {#csv}
|
## CSV {#csv}
|
||||||
|
|
||||||
Comma Separated Values format ([RFC](https://tools.ietf.org/html/rfc4180)).
|
Comma Separated Values format ([RFC](https://tools.ietf.org/html/rfc4180)).
|
||||||
@ -459,15 +464,15 @@ SELECT SearchPhrase, count() AS c FROM test.hits GROUP BY SearchPhrase WITH TOTA
|
|||||||
"meta":
|
"meta":
|
||||||
[
|
[
|
||||||
{
|
{
|
||||||
"name": "'hello'",
|
"name": "num",
|
||||||
|
"type": "Int32"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "str",
|
||||||
"type": "String"
|
"type": "String"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"name": "multiply(42, number)",
|
"name": "arr",
|
||||||
"type": "UInt64"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "range(5)",
|
|
||||||
"type": "Array(UInt8)"
|
"type": "Array(UInt8)"
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
@ -475,25 +480,32 @@ SELECT SearchPhrase, count() AS c FROM test.hits GROUP BY SearchPhrase WITH TOTA
|
|||||||
"data":
|
"data":
|
||||||
[
|
[
|
||||||
{
|
{
|
||||||
"'hello'": "hello",
|
"num": 42,
|
||||||
"multiply(42, number)": "0",
|
"str": "hello",
|
||||||
"range(5)": [0,1,2,3,4]
|
"arr": [0,1]
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"'hello'": "hello",
|
"num": 43,
|
||||||
"multiply(42, number)": "42",
|
"str": "hello",
|
||||||
"range(5)": [0,1,2,3,4]
|
"arr": [0,1,2]
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"'hello'": "hello",
|
"num": 44,
|
||||||
"multiply(42, number)": "84",
|
"str": "hello",
|
||||||
"range(5)": [0,1,2,3,4]
|
"arr": [0,1,2,3]
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
|
|
||||||
"rows": 3,
|
"rows": 3,
|
||||||
|
|
||||||
"rows_before_limit_at_least": 3
|
"rows_before_limit_at_least": 3,
|
||||||
|
|
||||||
|
"statistics":
|
||||||
|
{
|
||||||
|
"elapsed": 0.001137687,
|
||||||
|
"rows_read": 3,
|
||||||
|
"bytes_read": 24
|
||||||
|
}
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
@ -528,15 +540,15 @@ Example:
|
|||||||
"meta":
|
"meta":
|
||||||
[
|
[
|
||||||
{
|
{
|
||||||
"name": "'hello'",
|
"name": "num",
|
||||||
|
"type": "Int32"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "str",
|
||||||
"type": "String"
|
"type": "String"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"name": "multiply(42, number)",
|
"name": "arr",
|
||||||
"type": "UInt64"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "range(5)",
|
|
||||||
"type": "Array(UInt8)"
|
"type": "Array(UInt8)"
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
@ -544,25 +556,95 @@ Example:
|
|||||||
"data":
|
"data":
|
||||||
[
|
[
|
||||||
{
|
{
|
||||||
"'hello'": "hello",
|
"num": "42",
|
||||||
"multiply(42, number)": "0",
|
"str": "hello",
|
||||||
"range(5)": "[0,1,2,3,4]"
|
"arr": "[0,1]"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"'hello'": "hello",
|
"num": "43",
|
||||||
"multiply(42, number)": "42",
|
"str": "hello",
|
||||||
"range(5)": "[0,1,2,3,4]"
|
"arr": "[0,1,2]"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"'hello'": "hello",
|
"num": "44",
|
||||||
"multiply(42, number)": "84",
|
"str": "hello",
|
||||||
"range(5)": "[0,1,2,3,4]"
|
"arr": "[0,1,2,3]"
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
|
|
||||||
"rows": 3,
|
"rows": 3,
|
||||||
|
|
||||||
"rows_before_limit_at_least": 3
|
"rows_before_limit_at_least": 3,
|
||||||
|
|
||||||
|
"statistics":
|
||||||
|
{
|
||||||
|
"elapsed": 0.001403233,
|
||||||
|
"rows_read": 3,
|
||||||
|
"bytes_read": 24
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## JSONColumns {#jsoncolumns}
|
||||||
|
|
||||||
|
In this format, all data is represented as a single JSON Object.
|
||||||
|
Note that JSONColumns output format buffers all data in memory to output it as a single block and it can lead to high memory consumption.
|
||||||
|
|
||||||
|
Example:
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"num": [42, 43, 44],
|
||||||
|
"str": ["hello", "hello", "hello"],
|
||||||
|
"arr": [[0,1], [0,1,2], [0,1,2,3]]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
During import, columns with unknown names will be skipped if setting [input_format_skip_unknown_fields](../operations/settings/settings.md#settings-input-format-skip-unknown-fields) is set to 1.
|
||||||
|
Columns that are not present in the block will be filled with default values (you can use [input_format_defaults_for_omitted_fields](../operations/settings/settings.md#session_settings-input_format_defaults_for_omitted_fields) setting here)
|
||||||
|
|
||||||
|
|
||||||
|
## JSONColumnsWithMetadata {#jsoncolumnsmonoblock}
|
||||||
|
|
||||||
|
Differs from JSONColumns output format in that it also outputs some metadata and statistics (similar to JSON output format).
|
||||||
|
This format buffers all data in memory and then outputs them as a single block, so, it can lead to high memory consumption.
|
||||||
|
|
||||||
|
Example:
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"meta":
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"name": "num",
|
||||||
|
"type": "Int32"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "str",
|
||||||
|
"type": "String"
|
||||||
|
},
|
||||||
|
|
||||||
|
{
|
||||||
|
"name": "arr",
|
||||||
|
"type": "Array(UInt8)"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
|
||||||
|
"data":
|
||||||
|
{
|
||||||
|
"num": [42, 43, 44],
|
||||||
|
"str": ["hello", "hello", "hello"],
|
||||||
|
"arr": [[0,1], [0,1,2], [0,1,2,3]]
|
||||||
|
},
|
||||||
|
|
||||||
|
"rows": 3,
|
||||||
|
|
||||||
|
"rows_before_limit_at_least": 3,
|
||||||
|
|
||||||
|
"statistics":
|
||||||
|
{
|
||||||
|
"elapsed": 0.000272376,
|
||||||
|
"rows_read": 3,
|
||||||
|
"bytes_read": 24
|
||||||
|
}
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
@ -618,71 +700,101 @@ Result:
|
|||||||
|
|
||||||
Differs from JSON only in that data rows are output in arrays, not in objects.
|
Differs from JSON only in that data rows are output in arrays, not in objects.
|
||||||
|
|
||||||
|
Examples:
|
||||||
|
|
||||||
|
1) JSONCompact:
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"meta":
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"name": "num",
|
||||||
|
"type": "Int32"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "str",
|
||||||
|
"type": "String"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "arr",
|
||||||
|
"type": "Array(UInt8)"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
|
||||||
|
"data":
|
||||||
|
[
|
||||||
|
[42, "hello", [0,1]],
|
||||||
|
[43, "hello", [0,1,2]],
|
||||||
|
[44, "hello", [0,1,2,3]]
|
||||||
|
],
|
||||||
|
|
||||||
|
"rows": 3,
|
||||||
|
|
||||||
|
"rows_before_limit_at_least": 3,
|
||||||
|
|
||||||
|
"statistics":
|
||||||
|
{
|
||||||
|
"elapsed": 0.001222069,
|
||||||
|
"rows_read": 3,
|
||||||
|
"bytes_read": 24
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
2) JSONCompactStrings
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"meta":
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"name": "num",
|
||||||
|
"type": "Int32"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "str",
|
||||||
|
"type": "String"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "arr",
|
||||||
|
"type": "Array(UInt8)"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
|
||||||
|
"data":
|
||||||
|
[
|
||||||
|
["42", "hello", "[0,1]"],
|
||||||
|
["43", "hello", "[0,1,2]"],
|
||||||
|
["44", "hello", "[0,1,2,3]"]
|
||||||
|
],
|
||||||
|
|
||||||
|
"rows": 3,
|
||||||
|
|
||||||
|
"rows_before_limit_at_least": 3,
|
||||||
|
|
||||||
|
"statistics":
|
||||||
|
{
|
||||||
|
"elapsed": 0.001572097,
|
||||||
|
"rows_read": 3,
|
||||||
|
"bytes_read": 24
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## JSONCompactColumns {#jsoncompactcolumns}
|
||||||
|
|
||||||
|
In this format, all data is represented as a single JSON Array.
|
||||||
|
Note that JSONCompactColumns output format buffers all data in memory to output it as a single block and it can lead to high memory consumption
|
||||||
|
|
||||||
Example:
|
Example:
|
||||||
|
```json
|
||||||
```
|
[
|
||||||
// JSONCompact
|
[42, 43, 44],
|
||||||
{
|
["hello", "hello", "hello"],
|
||||||
"meta":
|
[[0,1], [0,1,2], [0,1,2,3]]
|
||||||
[
|
]
|
||||||
{
|
|
||||||
"name": "'hello'",
|
|
||||||
"type": "String"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "multiply(42, number)",
|
|
||||||
"type": "UInt64"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "range(5)",
|
|
||||||
"type": "Array(UInt8)"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
|
|
||||||
"data":
|
|
||||||
[
|
|
||||||
["hello", "0", [0,1,2,3,4]],
|
|
||||||
["hello", "42", [0,1,2,3,4]],
|
|
||||||
["hello", "84", [0,1,2,3,4]]
|
|
||||||
],
|
|
||||||
|
|
||||||
"rows": 3,
|
|
||||||
|
|
||||||
"rows_before_limit_at_least": 3
|
|
||||||
}
|
|
||||||
```
|
```
|
||||||
|
|
||||||
```
|
Columns that are not present in the block will be filled with default values (you can use [input_format_defaults_for_omitted_fields](../operations/settings/settings.md#session_settings-input_format_defaults_for_omitted_fields) setting here)
|
||||||
// JSONCompactStrings
|
|
||||||
{
|
|
||||||
"meta":
|
|
||||||
[
|
|
||||||
{
|
|
||||||
"name": "'hello'",
|
|
||||||
"type": "String"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "multiply(42, number)",
|
|
||||||
"type": "UInt64"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "range(5)",
|
|
||||||
"type": "Array(UInt8)"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
|
|
||||||
"data":
|
|
||||||
[
|
|
||||||
["hello", "0", "[0,1,2,3,4]"],
|
|
||||||
["hello", "42", "[0,1,2,3,4]"],
|
|
||||||
["hello", "84", "[0,1,2,3,4]"]
|
|
||||||
],
|
|
||||||
|
|
||||||
"rows": 3,
|
|
||||||
|
|
||||||
"rows_before_limit_at_least": 3
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
## JSONEachRow {#jsoneachrow}
|
## JSONEachRow {#jsoneachrow}
|
||||||
## JSONStringsEachRow {#jsonstringseachrow}
|
## JSONStringsEachRow {#jsonstringseachrow}
|
||||||
@ -699,15 +811,17 @@ When using these formats, ClickHouse outputs rows as separated, newline-delimite
|
|||||||
|
|
||||||
When inserting the data, you should provide a separate JSON value for each row.
|
When inserting the data, you should provide a separate JSON value for each row.
|
||||||
|
|
||||||
|
In JSONEachRow/JSONStringsEachRow input formats columns with unknown names will be skipped if setting [input_format_skip_unknown_fields](../operations/settings/settings.md#settings-input-format-skip-unknown-fields) is set to 1.
|
||||||
|
|
||||||
## JSONEachRowWithProgress {#jsoneachrowwithprogress}
|
## JSONEachRowWithProgress {#jsoneachrowwithprogress}
|
||||||
## JSONStringsEachRowWithProgress {#jsonstringseachrowwithprogress}
|
## JSONStringsEachRowWithProgress {#jsonstringseachrowwithprogress}
|
||||||
|
|
||||||
Differs from `JSONEachRow`/`JSONStringsEachRow` in that ClickHouse will also yield progress information as JSON values.
|
Differs from `JSONEachRow`/`JSONStringsEachRow` in that ClickHouse will also yield progress information as JSON values.
|
||||||
|
|
||||||
```json
|
```json
|
||||||
{"row":{"'hello'":"hello","multiply(42, number)":"0","range(5)":[0,1,2,3,4]}}
|
{"row":{"num":42,"str":"hello","arr":[0,1]}}
|
||||||
{"row":{"'hello'":"hello","multiply(42, number)":"42","range(5)":[0,1,2,3,4]}}
|
{"row":{"num":43,"str":"hello","arr":[0,1,2]}}
|
||||||
{"row":{"'hello'":"hello","multiply(42, number)":"84","range(5)":[0,1,2,3,4]}}
|
{"row":{"num":44,"str":"hello","arr":[0,1,2,3]}}
|
||||||
{"progress":{"read_rows":"3","read_bytes":"24","written_rows":"0","written_bytes":"0","total_rows_to_read":"3"}}
|
{"progress":{"read_rows":"3","read_bytes":"24","written_rows":"0","written_bytes":"0","total_rows_to_read":"3"}}
|
||||||
```
|
```
|
||||||
|
|
||||||
@ -728,11 +842,11 @@ Differs from `JSONCompactStringsEachRow` in that in that it also prints the head
|
|||||||
Differs from `JSONCompactStringsEachRow` in that it also prints two header rows with column names and types, similar to [TabSeparatedWithNamesAndTypes](#tabseparatedwithnamesandtypes).
|
Differs from `JSONCompactStringsEachRow` in that it also prints two header rows with column names and types, similar to [TabSeparatedWithNamesAndTypes](#tabseparatedwithnamesandtypes).
|
||||||
|
|
||||||
```json
|
```json
|
||||||
["'hello'", "multiply(42, number)", "range(5)"]
|
["num", "str", "arr"]
|
||||||
["String", "UInt64", "Array(UInt8)"]
|
["Int32", "String", "Array(UInt8)"]
|
||||||
["hello", "0", [0,1,2,3,4]]
|
[42, "hello", [0,1]]
|
||||||
["hello", "42", [0,1,2,3,4]]
|
[43, "hello", [0,1,2]]
|
||||||
["hello", "84", [0,1,2,3,4]]
|
[44, "hello", [0,1,2,3]]
|
||||||
```
|
```
|
||||||
|
|
||||||
### Inserting Data {#inserting-data}
|
### Inserting Data {#inserting-data}
|
||||||
|
@ -29,7 +29,7 @@ To analyze the `trace_log` system table:
|
|||||||
|
|
||||||
- Use the `addressToLine`, `addressToLineWithInlines`, `addressToSymbol` and `demangle` [introspection functions](../../sql-reference/functions/introspection.md) to get function names and their positions in ClickHouse code. To get a profile for some query, you need to aggregate data from the `trace_log` table. You can aggregate data by individual functions or by the whole stack traces.
|
- Use the `addressToLine`, `addressToLineWithInlines`, `addressToSymbol` and `demangle` [introspection functions](../../sql-reference/functions/introspection.md) to get function names and their positions in ClickHouse code. To get a profile for some query, you need to aggregate data from the `trace_log` table. You can aggregate data by individual functions or by the whole stack traces.
|
||||||
|
|
||||||
If you need to visualize `trace_log` info, try [flamegraph](../../interfaces/third-party/gui/#clickhouse-flamegraph) and [speedscope](https://github.com/laplab/clickhouse-speedscope).
|
If you need to visualize `trace_log` info, try [flamegraph](../../interfaces/third-party/gui.md#clickhouse-flamegraph-clickhouse-flamegraph) and [speedscope](https://github.com/laplab/clickhouse-speedscope).
|
||||||
|
|
||||||
## Example {#example}
|
## Example {#example}
|
||||||
|
|
||||||
|
@ -12,11 +12,13 @@ Columns:
|
|||||||
|
|
||||||
- `name` ([String](../../sql-reference/data-types/string.md)) — Table name.
|
- `name` ([String](../../sql-reference/data-types/string.md)) — Table name.
|
||||||
|
|
||||||
|
- `uuid` ([UUID](../../sql-reference/data-types/uuid.md)) — Table uuid (Atomic database).
|
||||||
|
|
||||||
- `engine` ([String](../../sql-reference/data-types/string.md)) — Table engine name (without parameters).
|
- `engine` ([String](../../sql-reference/data-types/string.md)) — Table engine name (without parameters).
|
||||||
|
|
||||||
- `is_temporary` ([UInt8](../../sql-reference/data-types/int-uint.md)) - Flag that indicates whether the table is temporary.
|
- `is_temporary` ([UInt8](../../sql-reference/data-types/int-uint.md)) - Flag that indicates whether the table is temporary.
|
||||||
|
|
||||||
- `data_path` ([String](../../sql-reference/data-types/string.md)) - Path to the table data in the file system.
|
- `data_paths` ([Array](../../sql-reference/data-types/array.md)([String](../../sql-reference/data-types/string.md))) - Paths to the table data in the file systems.
|
||||||
|
|
||||||
- `metadata_path` ([String](../../sql-reference/data-types/string.md)) - Path to the table metadata in the file system.
|
- `metadata_path` ([String](../../sql-reference/data-types/string.md)) - Path to the table metadata in the file system.
|
||||||
|
|
||||||
@ -60,6 +62,14 @@ Columns:
|
|||||||
|
|
||||||
- `has_own_data` ([UInt8](../../sql-reference/data-types/int-uint.md)) — Flag that indicates whether the table itself stores some data on disk or only accesses some other source.
|
- `has_own_data` ([UInt8](../../sql-reference/data-types/int-uint.md)) — Flag that indicates whether the table itself stores some data on disk or only accesses some other source.
|
||||||
|
|
||||||
|
- `loading_dependencies_database` ([Array](../../sql-reference/data-types/array.md)([String](../../sql-reference/data-types/string.md))) - Database loading dependencies (list of objects which should be loaded before the current object).
|
||||||
|
|
||||||
|
- `loading_dependencies_table` ([Array](../../sql-reference/data-types/array.md)([String](../../sql-reference/data-types/string.md))) - Table loading dependencies (list of objects which should be loaded before the current object).
|
||||||
|
|
||||||
|
- `loading_dependent_database` ([Array](../../sql-reference/data-types/array.md)([String](../../sql-reference/data-types/string.md))) - Dependent loading database.
|
||||||
|
|
||||||
|
- `loading_dependent_table` ([Array](../../sql-reference/data-types/array.md)([String](../../sql-reference/data-types/string.md))) - Dependent loading table.
|
||||||
|
|
||||||
The `system.tables` table is used in `SHOW TABLES` query implementation.
|
The `system.tables` table is used in `SHOW TABLES` query implementation.
|
||||||
|
|
||||||
**Example**
|
**Example**
|
||||||
@ -95,6 +105,10 @@ lifetime_rows: ᴺᵁᴸᴸ
|
|||||||
lifetime_bytes: ᴺᵁᴸᴸ
|
lifetime_bytes: ᴺᵁᴸᴸ
|
||||||
comment:
|
comment:
|
||||||
has_own_data: 0
|
has_own_data: 0
|
||||||
|
loading_dependencies_database: []
|
||||||
|
loading_dependencies_table: []
|
||||||
|
loading_dependent_database: []
|
||||||
|
loading_dependent_table: []
|
||||||
|
|
||||||
Row 2:
|
Row 2:
|
||||||
──────
|
──────
|
||||||
@ -122,4 +136,8 @@ lifetime_rows: ᴺᵁᴸᴸ
|
|||||||
lifetime_bytes: ᴺᵁᴸᴸ
|
lifetime_bytes: ᴺᵁᴸᴸ
|
||||||
comment:
|
comment:
|
||||||
has_own_data: 0
|
has_own_data: 0
|
||||||
|
loading_dependencies_database: []
|
||||||
|
loading_dependencies_table: []
|
||||||
|
loading_dependent_database: []
|
||||||
|
loading_dependent_table: []
|
||||||
```
|
```
|
||||||
|
@ -12,3 +12,34 @@ Values can be added to the array in any (indeterminate) order.
|
|||||||
The second version (with the `max_size` parameter) limits the size of the resulting array to `max_size` elements. For example, `groupArray(1)(x)` is equivalent to `[any (x)]`.
|
The second version (with the `max_size` parameter) limits the size of the resulting array to `max_size` elements. For example, `groupArray(1)(x)` is equivalent to `[any (x)]`.
|
||||||
|
|
||||||
In some cases, you can still rely on the order of execution. This applies to cases when `SELECT` comes from a subquery that uses `ORDER BY`.
|
In some cases, you can still rely on the order of execution. This applies to cases when `SELECT` comes from a subquery that uses `ORDER BY`.
|
||||||
|
|
||||||
|
**Example**
|
||||||
|
|
||||||
|
``` text
|
||||||
|
SELECT * FROM default.ck;
|
||||||
|
|
||||||
|
┌─id─┬─name─────┐
|
||||||
|
│ 1 │ zhangsan │
|
||||||
|
│ 1 │ ᴺᵁᴸᴸ │
|
||||||
|
│ 1 │ lisi │
|
||||||
|
│ 2 │ wangwu │
|
||||||
|
└────┴──────────┘
|
||||||
|
|
||||||
|
```
|
||||||
|
|
||||||
|
Query:
|
||||||
|
|
||||||
|
``` sql
|
||||||
|
select id, groupArray(10)(name) from default.ck group by id;
|
||||||
|
```
|
||||||
|
|
||||||
|
Result:
|
||||||
|
|
||||||
|
``` text
|
||||||
|
┌─id─┬─groupArray(10)(name)─┐
|
||||||
|
│ 1 │ ['zhangsan','lisi'] │
|
||||||
|
│ 2 │ ['wangwu'] │
|
||||||
|
└────┴──────────────────────┘
|
||||||
|
```
|
||||||
|
|
||||||
|
The groupArray function will remove ᴺᵁᴸᴸ value based on the above results.
|
||||||
|
@ -3,10 +3,38 @@ sidebar_position: 43
|
|||||||
sidebar_label: Boolean
|
sidebar_label: Boolean
|
||||||
---
|
---
|
||||||
|
|
||||||
# Boolean Values {#boolean-values}
|
# Boolean Values bool (boolean) {#boolean-values}
|
||||||
|
|
||||||
Since https://github.com/ClickHouse/ClickHouse/commit/4076ae77b46794e73594a9f400200088ed1e7a6e , there be a separate type for boolean values.
|
Type `bool` is stored as UInt8. Possible values `true` (1), `false` (0).
|
||||||
|
|
||||||
For versions before that, there is no separate type for boolean values. Use UInt8 type, restricted to the values 0 or 1.
|
|
||||||
|
```sql
|
||||||
|
select true as col, toTypeName(col);
|
||||||
|
┌─col──┬─toTypeName(true)─┐
|
||||||
|
│ true │ Bool │
|
||||||
|
└──────┴──────────────────┘
|
||||||
|
|
||||||
|
select true == 1 as col, toTypeName(col);
|
||||||
|
┌─col─┬─toTypeName(equals(true, 1))─┐
|
||||||
|
│ 1 │ UInt8 │
|
||||||
|
└─────┴─────────────────────────────┘
|
||||||
|
```
|
||||||
|
|
||||||
|
```sql
|
||||||
|
CREATE TABLE test_bool
|
||||||
|
(
|
||||||
|
`A` Int64,
|
||||||
|
`B` Bool
|
||||||
|
)
|
||||||
|
ENGINE = Memory;
|
||||||
|
|
||||||
|
INSERT INTO test_bool VALUES (1, true),(2,0);
|
||||||
|
|
||||||
|
SELECT * FROM test_bool;
|
||||||
|
┌─A─┬─B─────┐
|
||||||
|
│ 1 │ true │
|
||||||
|
│ 2 │ false │
|
||||||
|
└───┴───────┘
|
||||||
|
```
|
||||||
|
|
||||||
[Original article](https://clickhouse.com/docs/en/data_types/boolean/) <!--hide-->
|
[Original article](https://clickhouse.com/docs/en/data_types/boolean/) <!--hide-->
|
||||||
|
76
docs/en/sql-reference/data-types/json.md
Normal file
76
docs/en/sql-reference/data-types/json.md
Normal file
@ -0,0 +1,76 @@
|
|||||||
|
---
|
||||||
|
sidebar_position: 54
|
||||||
|
sidebar_label: JSON
|
||||||
|
---
|
||||||
|
|
||||||
|
# JSON {#json-data-type}
|
||||||
|
|
||||||
|
Stores JavaScript Object Notation (JSON) documents in a single column.
|
||||||
|
|
||||||
|
`JSON` is an alias for `Object('json')`.
|
||||||
|
|
||||||
|
:::warning
|
||||||
|
The JSON data type is an experimental feature. To use it, set `allow_experimental_object_type = 1`.
|
||||||
|
:::
|
||||||
|
|
||||||
|
## Example {#usage-example}
|
||||||
|
|
||||||
|
**Example 1**
|
||||||
|
|
||||||
|
Creating a table with a `JSON` column and inserting data into it:
|
||||||
|
|
||||||
|
```sql
|
||||||
|
CREATE TABLE json
|
||||||
|
(
|
||||||
|
o JSON
|
||||||
|
)
|
||||||
|
ENGINE = Memory
|
||||||
|
```
|
||||||
|
|
||||||
|
```sql
|
||||||
|
INSERT INTO json VALUES ('{"a": 1, "b": { "c": 2, "d": [1, 2, 3] }}')
|
||||||
|
```
|
||||||
|
|
||||||
|
```sql
|
||||||
|
SELECT o.a, o.b.c, o.b.d[3] FROM json
|
||||||
|
```
|
||||||
|
|
||||||
|
```text
|
||||||
|
┌─o.a─┬─o.b.c─┬─arrayElement(o.b.d, 3)─┐
|
||||||
|
│ 1 │ 2 │ 3 │
|
||||||
|
└─────┴───────┴────────────────────────┘
|
||||||
|
```
|
||||||
|
|
||||||
|
**Example 2**
|
||||||
|
|
||||||
|
To be able to create an ordered `MergeTree` family table the sorting key has to be extracted into its column. For example, to insert a file of compressed HTTP access logs in JSON format:
|
||||||
|
|
||||||
|
```sql
|
||||||
|
CREATE TABLE logs
|
||||||
|
(
|
||||||
|
timestamp DateTime,
|
||||||
|
message JSON
|
||||||
|
)
|
||||||
|
ENGINE = MergeTree
|
||||||
|
ORDER BY timestamp
|
||||||
|
```
|
||||||
|
|
||||||
|
```sql
|
||||||
|
INSERT INTO logs
|
||||||
|
SELECT parseDateTimeBestEffort(JSONExtractString(json, 'timestamp')), json
|
||||||
|
FROM file('access.json.gz', JSONAsString)
|
||||||
|
```
|
||||||
|
|
||||||
|
## Displaying JSON columns
|
||||||
|
|
||||||
|
When displaying a `JSON` column ClickHouse only shows the field values by default (because internally, it is represented as a tuple). You can display the field names as well by setting `output_format_json_named_tuples_as_objects = 1`:
|
||||||
|
|
||||||
|
```sql
|
||||||
|
SET output_format_json_named_tuples_as_objects = 1
|
||||||
|
|
||||||
|
SELECT * FROM json FORMAT JSONEachRow
|
||||||
|
```
|
||||||
|
|
||||||
|
```text
|
||||||
|
{"o":{"a":1,"b":{"c":2,"d":[1,2,3]}}}
|
||||||
|
```
|
@ -1026,4 +1026,119 @@ Result:
|
|||||||
│ 41162 │
|
│ 41162 │
|
||||||
└─────────────┘
|
└─────────────┘
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## h3Line {#h3line}
|
||||||
|
|
||||||
|
Returns the line of indices between the two indices that are provided.
|
||||||
|
|
||||||
|
**Syntax**
|
||||||
|
|
||||||
|
``` sql
|
||||||
|
h3Line(start,end)
|
||||||
|
```
|
||||||
|
|
||||||
|
**Parameter**
|
||||||
|
|
||||||
|
- `start` — Hexagon index number that represents a starting point. Type: [UInt64](../../../sql-reference/data-types/int-uint.md).
|
||||||
|
- `end` — Hexagon index number that represents an ending point. Type: [UInt64](../../../sql-reference/data-types/int-uint.md).
|
||||||
|
|
||||||
|
**Returned value**
|
||||||
|
|
||||||
|
Array of h3 indexes representing the line of indices between the two provided indices:
|
||||||
|
|
||||||
|
Type: [Array](../../../sql-reference/data-types/array.md)([UInt64](../../../sql-reference/data-types/int-uint.md)).
|
||||||
|
|
||||||
|
**Example**
|
||||||
|
|
||||||
|
Query:
|
||||||
|
|
||||||
|
``` sql
|
||||||
|
SELECT h3Line(590080540275638271,590103561300344831) as indexes;
|
||||||
|
```
|
||||||
|
|
||||||
|
Result:
|
||||||
|
|
||||||
|
``` text
|
||||||
|
┌─indexes────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────┐
|
||||||
|
│ [590080540275638271,590080471556161535,590080883873021951,590106516237844479,590104385934065663,590103630019821567,590103561300344831] │
|
||||||
|
└────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────┘
|
||||||
|
```
|
||||||
|
|
||||||
|
## h3Distance {#h3distance}
|
||||||
|
|
||||||
|
Returns the distance in grid cells between the two indices that are provided.
|
||||||
|
|
||||||
|
**Syntax**
|
||||||
|
|
||||||
|
``` sql
|
||||||
|
h3Distance(start,end)
|
||||||
|
```
|
||||||
|
|
||||||
|
**Parameter**
|
||||||
|
|
||||||
|
- `start` — Hexagon index number that represents a starting point. Type: [UInt64](../../../sql-reference/data-types/int-uint.md).
|
||||||
|
- `end` — Hexagon index number that represents an ending point. Type: [UInt64](../../../sql-reference/data-types/int-uint.md).
|
||||||
|
|
||||||
|
**Returned value**
|
||||||
|
|
||||||
|
- Number of grid cells.
|
||||||
|
|
||||||
|
Type: [Int64](../../../sql-reference/data-types/int-uint.md).
|
||||||
|
|
||||||
|
Returns a negative number if finding the distance fails.
|
||||||
|
|
||||||
|
**Example**
|
||||||
|
|
||||||
|
Query:
|
||||||
|
|
||||||
|
``` sql
|
||||||
|
SELECT h3Distance(590080540275638271,590103561300344831) as distance;
|
||||||
|
```
|
||||||
|
|
||||||
|
Result:
|
||||||
|
|
||||||
|
``` text
|
||||||
|
┌─distance─┐
|
||||||
|
│ 7 │
|
||||||
|
└──────────┘
|
||||||
|
```
|
||||||
|
|
||||||
|
## h3HexRing {#h3hexring}
|
||||||
|
|
||||||
|
Returns the indexes of the hexagonal ring centered at the provided origin h3Index and length k.
|
||||||
|
|
||||||
|
Returns 0 if no pentagonal distortion was encountered.
|
||||||
|
|
||||||
|
**Syntax**
|
||||||
|
|
||||||
|
``` sql
|
||||||
|
h3HexRing(index, k)
|
||||||
|
```
|
||||||
|
|
||||||
|
**Parameter**
|
||||||
|
|
||||||
|
- `index` — Hexagon index number that represents the origin. Type: [UInt64](../../../sql-reference/data-types/int-uint.md).
|
||||||
|
- `k` — Distance. Type: [UInt64](../../../sql-reference/data-types/int-uint.md).
|
||||||
|
|
||||||
|
**Returned values**
|
||||||
|
|
||||||
|
- Array of H3 indexes.
|
||||||
|
|
||||||
|
Type: [Array](../../../sql-reference/data-types/array.md)([UInt64](../../../sql-reference/data-types/int-uint.md)).
|
||||||
|
|
||||||
|
**Example**
|
||||||
|
|
||||||
|
Query:
|
||||||
|
|
||||||
|
``` sql
|
||||||
|
SELECT h3HexRing(590080540275638271, toUInt16(1)) AS hexRing;
|
||||||
|
```
|
||||||
|
|
||||||
|
Result:
|
||||||
|
|
||||||
|
``` text
|
||||||
|
┌─hexRing─────────────────────────────────────────────────────────────────────────────────────────────────────────────┐
|
||||||
|
│ [590080815153545215,590080471556161535,590080677714591743,590077585338138623,590077447899185151,590079509483487231] │
|
||||||
|
└─────────────────────────────────────────────────────────────────────────────────────────────────────────────────────┘
|
||||||
|
```
|
||||||
[Original article](https://clickhouse.com/docs/en/sql-reference/functions/geo/h3) <!--hide-->
|
[Original article](https://clickhouse.com/docs/en/sql-reference/functions/geo/h3) <!--hide-->
|
||||||
|
@ -96,10 +96,14 @@ SELECT fuzzBits(materialize('abacaba'), 0.1)
|
|||||||
FROM numbers(3)
|
FROM numbers(3)
|
||||||
```
|
```
|
||||||
|
|
||||||
\`\`\` text
|
Result:
|
||||||
┌─fuzzBits(materialize(‘abacaba’), 0.1)─┐
|
|
||||||
│ abaaaja │
|
``` text
|
||||||
│ a\*cjab+ │
|
┌─fuzzBits(materialize('abacaba'), 0.1)─┐
|
||||||
│ aeca2A │
|
│ abaaaja │
|
||||||
└───────────────────────────────────────┘
|
│ a*cjab+ │
|
||||||
|
│ aeca2A │
|
||||||
|
└───────────────────────────────────────┘
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
@ -11,10 +11,16 @@ The functions for working with UUID are listed below.
|
|||||||
|
|
||||||
Generates the [UUID](../data-types/uuid.md) of [version 4](https://tools.ietf.org/html/rfc4122#section-4.4).
|
Generates the [UUID](../data-types/uuid.md) of [version 4](https://tools.ietf.org/html/rfc4122#section-4.4).
|
||||||
|
|
||||||
|
**Syntax**
|
||||||
|
|
||||||
``` sql
|
``` sql
|
||||||
generateUUIDv4()
|
generateUUIDv4([x])
|
||||||
```
|
```
|
||||||
|
|
||||||
|
**Arguments**
|
||||||
|
|
||||||
|
- `x` — [Expression](../../sql-reference/syntax.md#syntax-expressions) resulting in any of the [supported data types](../../sql-reference/data-types/index.md#data_types). The resulting value is discarded, but the expression itself if used for bypassing [common subexpression elimination](../../sql-reference/functions/index.md#common-subexpression-elimination) if the function is called multiple times in one query. Optional parameter.
|
||||||
|
|
||||||
**Returned value**
|
**Returned value**
|
||||||
|
|
||||||
The UUID type value.
|
The UUID type value.
|
||||||
@ -37,6 +43,15 @@ SELECT * FROM t_uuid
|
|||||||
└──────────────────────────────────────┘
|
└──────────────────────────────────────┘
|
||||||
```
|
```
|
||||||
|
|
||||||
|
**Usage example if it is needed to generate multiple values in one row**
|
||||||
|
|
||||||
|
```sql
|
||||||
|
SELECT generateUUIDv4(1), generateUUIDv4(2)
|
||||||
|
┌─generateUUIDv4(1)────────────────────┬─generateUUIDv4(2)────────────────────┐
|
||||||
|
│ 2d49dc6e-ddce-4cd0-afb8-790956df54c1 │ 8abf8c13-7dea-4fdf-af3e-0e18767770e6 │
|
||||||
|
└──────────────────────────────────────┴──────────────────────────────────────┘
|
||||||
|
```
|
||||||
|
|
||||||
## empty {#empty}
|
## empty {#empty}
|
||||||
|
|
||||||
Checks whether the input UUID is empty.
|
Checks whether the input UUID is empty.
|
||||||
|
@ -105,7 +105,7 @@ Example: `regionToCountry(toUInt32(213)) = 225` converts Moscow (213) to Russia
|
|||||||
Converts a region to a continent. In every other way, this function is the same as ‘regionToCity’.
|
Converts a region to a continent. In every other way, this function is the same as ‘regionToCity’.
|
||||||
Example: `regionToContinent(toUInt32(213)) = 10001` converts Moscow (213) to Eurasia (10001).
|
Example: `regionToContinent(toUInt32(213)) = 10001` converts Moscow (213) to Eurasia (10001).
|
||||||
|
|
||||||
### regionToTopContinent (#regiontotopcontinent) {#regiontotopcontinent-regiontotopcontinent}
|
### regionToTopContinent(id\[, geobase\]) {#regiontotopcontinentid-geobase}
|
||||||
|
|
||||||
Finds the highest continent in the hierarchy for the region.
|
Finds the highest continent in the hierarchy for the region.
|
||||||
|
|
||||||
|
@ -12,7 +12,7 @@ The following operations are available:
|
|||||||
|
|
||||||
- `ALTER TABLE [db].name DROP INDEX name` - Removes index description from tables metadata and deletes index files from disk.
|
- `ALTER TABLE [db].name DROP INDEX name` - Removes index description from tables metadata and deletes index files from disk.
|
||||||
|
|
||||||
- `ALTER TABLE [db.]table MATERIALIZE INDEX name IN PARTITION partition_name` - The query rebuilds the secondary index `name` in the partition `partition_name`. Implemented as a [mutation](../../../../sql-reference/statements/alter/index.md#mutations). To rebuild index over the whole data in the table you need to remove `IN PARTITION` from query.
|
- `ALTER TABLE [db.]table MATERIALIZE INDEX name [IN PARTITION partition_name]` - Rebuilds the secondary index `name` for the specified `partition_name`. Implemented as a [mutation](../../../../sql-reference/statements/alter/index.md#mutations). If `IN PARTITION` part is omitted then it rebuilds the index for the whole table data.
|
||||||
|
|
||||||
The first two commands are lightweight in a sense that they only change metadata or remove files.
|
The first two commands are lightweight in a sense that they only change metadata or remove files.
|
||||||
|
|
||||||
@ -20,4 +20,4 @@ Also, they are replicated, syncing indices metadata via ZooKeeper.
|
|||||||
|
|
||||||
:::note
|
:::note
|
||||||
Index manipulation is supported only for tables with [`*MergeTree`](../../../../engines/table-engines/mergetree-family/mergetree.md) engine (including [replicated](../../../../engines/table-engines/mergetree-family/replication.md) variants).
|
Index manipulation is supported only for tables with [`*MergeTree`](../../../../engines/table-engines/mergetree-family/mergetree.md) engine (including [replicated](../../../../engines/table-engines/mergetree-family/replication.md) variants).
|
||||||
:::
|
:::
|
||||||
|
@ -29,12 +29,14 @@ There are multiple ways of user identification:
|
|||||||
- `IDENTIFIED WITH no_password`
|
- `IDENTIFIED WITH no_password`
|
||||||
- `IDENTIFIED WITH plaintext_password BY 'qwerty'`
|
- `IDENTIFIED WITH plaintext_password BY 'qwerty'`
|
||||||
- `IDENTIFIED WITH sha256_password BY 'qwerty'` or `IDENTIFIED BY 'password'`
|
- `IDENTIFIED WITH sha256_password BY 'qwerty'` or `IDENTIFIED BY 'password'`
|
||||||
- `IDENTIFIED WITH sha256_hash BY 'hash'`
|
- `IDENTIFIED WITH sha256_hash BY 'hash'` or `IDENTIFIED WITH sha256_hash BY 'hash' SALT 'salt'`
|
||||||
- `IDENTIFIED WITH double_sha1_password BY 'qwerty'`
|
- `IDENTIFIED WITH double_sha1_password BY 'qwerty'`
|
||||||
- `IDENTIFIED WITH double_sha1_hash BY 'hash'`
|
- `IDENTIFIED WITH double_sha1_hash BY 'hash'`
|
||||||
- `IDENTIFIED WITH ldap SERVER 'server_name'`
|
- `IDENTIFIED WITH ldap SERVER 'server_name'`
|
||||||
- `IDENTIFIED WITH kerberos` or `IDENTIFIED WITH kerberos REALM 'realm'`
|
- `IDENTIFIED WITH kerberos` or `IDENTIFIED WITH kerberos REALM 'realm'`
|
||||||
|
|
||||||
|
For identification with sha256_hash using `SALT` - hash must be calculated from concatination of 'password' and 'salt'.
|
||||||
|
|
||||||
## User Host {#user-host}
|
## User Host {#user-host}
|
||||||
|
|
||||||
User host is a host from which a connection to ClickHouse server could be established. The host can be specified in the `HOST` query section in the following ways:
|
User host is a host from which a connection to ClickHouse server could be established. The host can be specified in the `HOST` query section in the following ways:
|
||||||
|
@ -170,6 +170,7 @@ Hierarchy of privileges:
|
|||||||
- `SYSTEM FLUSH`
|
- `SYSTEM FLUSH`
|
||||||
- `SYSTEM FLUSH DISTRIBUTED`
|
- `SYSTEM FLUSH DISTRIBUTED`
|
||||||
- `SYSTEM FLUSH LOGS`
|
- `SYSTEM FLUSH LOGS`
|
||||||
|
- `CLUSTER` (see also `access_control_improvements.on_cluster_queries_require_cluster_grant` configuration directive)
|
||||||
- [INTROSPECTION](#grant-introspection)
|
- [INTROSPECTION](#grant-introspection)
|
||||||
- `addressToLine`
|
- `addressToLine`
|
||||||
- `addressToLineWithInlines`
|
- `addressToLineWithInlines`
|
||||||
|
@ -104,7 +104,7 @@ There are many nuances to processing `NULL`. For example, if at least one of the
|
|||||||
|
|
||||||
In queries, you can check `NULL` using the [IS NULL](../sql-reference/operators/index.md#operator-is-null) and [IS NOT NULL](../sql-reference/operators/index.md) operators and the related functions `isNull` and `isNotNull`.
|
In queries, you can check `NULL` using the [IS NULL](../sql-reference/operators/index.md#operator-is-null) and [IS NOT NULL](../sql-reference/operators/index.md) operators and the related functions `isNull` and `isNotNull`.
|
||||||
|
|
||||||
### Heredoc {#heredeoc}
|
### Heredoc {#heredoc}
|
||||||
|
|
||||||
A [heredoc](https://en.wikipedia.org/wiki/Here_document) is a way to define a string (often multiline), while maintaining the original formatting. A heredoc is defined as a custom string literal, placed between two `$` symbols, for example `$heredoc$`. A value between two heredocs is processed "as-is".
|
A [heredoc](https://en.wikipedia.org/wiki/Here_document) is a way to define a string (often multiline), while maintaining the original formatting. A heredoc is defined as a custom string literal, placed between two `$` symbols, for example `$heredoc$`. A value between two heredocs is processed "as-is".
|
||||||
|
|
||||||
|
@ -5,7 +5,7 @@ sidebar_position: 101
|
|||||||
|
|
||||||
# Можно ли использовать ClickHouse как базу данных временных рядов? {#can-i-use-clickhouse-as-a-time-series-database}
|
# Можно ли использовать ClickHouse как базу данных временных рядов? {#can-i-use-clickhouse-as-a-time-series-database}
|
||||||
|
|
||||||
ClickHouse — это универсальное решение для [OLAP](../../faq/general/olap.md) операций, в то время как существует много специализированных СУБД временных рядов. Однако [высокая скорость выполнения запросов](../../faq/general/why-clickhouse-is-so-fast.md) позволяет CLickHouse во многих случаях "побеждать" специализированные аналоги. В подтверждение этому есть много примеров с конкретными показателями производительности, так что мы не будем останавливаться на этом подробно. Лучше рассмотрим те возможности ClickHouse, которые стоит использовать.
|
ClickHouse — это универсальное решение для [OLAP](../../faq/general/olap.md) операций, в то время как существует много специализированных СУБД временных рядов. Однако [высокая скорость выполнения запросов](../../faq/general/why-clickhouse-is-so-fast.md) позволяет ClickHouse во многих случаях "побеждать" специализированные аналоги. В подтверждение этому есть много примеров с конкретными показателями производительности, так что мы не будем останавливаться на этом подробно. Лучше рассмотрим те возможности ClickHouse, которые стоит использовать.
|
||||||
|
|
||||||
Во-первых, есть **[специальные кодеки](../../sql-reference/statements/create/table.md#create-query-specialized-codecs)**, которые составляют типичные временные ряды. Это могут быть либо стандартные алгоритмы, такие как `DoubleDelta` или `Gorilla`, либо специфические для ClickHouse, например `T64`.
|
Во-первых, есть **[специальные кодеки](../../sql-reference/statements/create/table.md#create-query-specialized-codecs)**, которые составляют типичные временные ряды. Это могут быть либо стандартные алгоритмы, такие как `DoubleDelta` или `Gorilla`, либо специфические для ClickHouse, например `T64`.
|
||||||
|
|
||||||
|
@ -2432,7 +2432,7 @@ SELECT idx, i FROM null_in WHERE i IN (1, NULL) SETTINGS transform_null_in = 1;
|
|||||||
|
|
||||||
Разрешает или запрещает использование типа данных `LowCardinality` с форматом данных [Native](../../interfaces/formats.md#native).
|
Разрешает или запрещает использование типа данных `LowCardinality` с форматом данных [Native](../../interfaces/formats.md#native).
|
||||||
|
|
||||||
Если использование типа `LowCardinality` ограничено, сервер CLickHouse преобразует столбцы `LowCardinality` в обычные столбцы для запросов `SELECT`, а обычные столбцы - в столбцы `LowCardinality` для запросов `INSERT`.
|
Если использование типа `LowCardinality` ограничено, сервер ClickHouse преобразует столбцы `LowCardinality` в обычные столбцы для запросов `SELECT`, а обычные столбцы - в столбцы `LowCardinality` для запросов `INSERT`.
|
||||||
|
|
||||||
В основном настройка используется для сторонних клиентов, не поддерживающих тип данных `LowCardinality`.
|
В основном настройка используется для сторонних клиентов, не поддерживающих тип данных `LowCardinality`.
|
||||||
|
|
||||||
|
@ -12,11 +12,13 @@
|
|||||||
|
|
||||||
- `name` ([String](../../sql-reference/data-types/string.md)) — имя таблицы.
|
- `name` ([String](../../sql-reference/data-types/string.md)) — имя таблицы.
|
||||||
|
|
||||||
|
- `uuid` ([UUID](../../sql-reference/data-types/uuid.md)) — Uuid таблицы (Atomic database).
|
||||||
|
|
||||||
- `engine` ([String](../../sql-reference/data-types/string.md)) — движок таблицы (без параметров).
|
- `engine` ([String](../../sql-reference/data-types/string.md)) — движок таблицы (без параметров).
|
||||||
|
|
||||||
- `is_temporary` ([UInt8](../../sql-reference/data-types/int-uint.md)) — флаг, указывающий на то, временная это таблица или нет.
|
- `is_temporary` ([UInt8](../../sql-reference/data-types/int-uint.md)) — флаг, указывающий на то, временная это таблица или нет.
|
||||||
|
|
||||||
- `data_path` ([String](../../sql-reference/data-types/string.md)) — путь к данным таблицы в файловой системе.
|
- `data_paths` ([Array](../../sql-reference/data-types/array.md)([String](../../sql-reference/data-types/string.md))) — пути к данным таблицы в файловых системах.
|
||||||
|
|
||||||
- `metadata_path` ([String](../../sql-reference/data-types/string.md)) — путь к табличным метаданным в файловой системе.
|
- `metadata_path` ([String](../../sql-reference/data-types/string.md)) — путь к табличным метаданным в файловой системе.
|
||||||
|
|
||||||
@ -60,6 +62,14 @@
|
|||||||
|
|
||||||
- `has_own_data` ([UInt8](../../sql-reference/data-types/int-uint.md)) — флаг, показывающий хранит ли таблица сама какие-то данные на диске или только обращается к какому-то другому источнику.
|
- `has_own_data` ([UInt8](../../sql-reference/data-types/int-uint.md)) — флаг, показывающий хранит ли таблица сама какие-то данные на диске или только обращается к какому-то другому источнику.
|
||||||
|
|
||||||
|
- `loading_dependencies_database` ([Array](../../sql-reference/data-types/array.md)([String](../../sql-reference/data-types/string.md))) - базы данных необходимые для загрузки объекта.
|
||||||
|
|
||||||
|
- `loading_dependencies_table` ([Array](../../sql-reference/data-types/array.md)([String](../../sql-reference/data-types/string.md))) - таблицы необходимые для загрузки объекта.
|
||||||
|
|
||||||
|
- `loading_dependent_database` ([Array](../../sql-reference/data-types/array.md)([String](../../sql-reference/data-types/string.md))) - базы данных, которым объект необходим для загрузки.
|
||||||
|
|
||||||
|
- `loading_dependent_table` ([Array](../../sql-reference/data-types/array.md)([String](../../sql-reference/data-types/string.md))) - таблицы, которым объект необходим для загрузки.
|
||||||
|
|
||||||
Таблица `system.tables` используется при выполнении запроса `SHOW TABLES`.
|
Таблица `system.tables` используется при выполнении запроса `SHOW TABLES`.
|
||||||
|
|
||||||
**Пример**
|
**Пример**
|
||||||
@ -95,6 +105,10 @@ lifetime_rows: ᴺᵁᴸᴸ
|
|||||||
lifetime_bytes: ᴺᵁᴸᴸ
|
lifetime_bytes: ᴺᵁᴸᴸ
|
||||||
comment:
|
comment:
|
||||||
has_own_data: 0
|
has_own_data: 0
|
||||||
|
loading_dependencies_database: []
|
||||||
|
loading_dependencies_table: []
|
||||||
|
loading_dependent_database: []
|
||||||
|
loading_dependent_table: []
|
||||||
|
|
||||||
Row 2:
|
Row 2:
|
||||||
──────
|
──────
|
||||||
@ -122,4 +136,8 @@ lifetime_rows: ᴺᵁᴸᴸ
|
|||||||
lifetime_bytes: ᴺᵁᴸᴸ
|
lifetime_bytes: ᴺᵁᴸᴸ
|
||||||
comment:
|
comment:
|
||||||
has_own_data: 0
|
has_own_data: 0
|
||||||
|
loading_dependencies_database: []
|
||||||
|
loading_dependencies_table: []
|
||||||
|
loading_dependent_database: []
|
||||||
|
loading_dependent_table: []
|
||||||
```
|
```
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user