mirror of
https://github.com/ClickHouse/ClickHouse.git
synced 2024-11-23 16:12:01 +00:00
Merge branch 'master' into properly_split_remote_proxy_http_https
This commit is contained in:
commit
63562cbb20
47
.clang-tidy
47
.clang-tidy
@ -5,6 +5,9 @@
|
|||||||
# a) the new check is not controversial (this includes many checks in readability-* and google-*) or
|
# a) the new check is not controversial (this includes many checks in readability-* and google-*) or
|
||||||
# b) too noisy (checks with > 100 new warnings are considered noisy, this includes e.g. cppcoreguidelines-*).
|
# b) too noisy (checks with > 100 new warnings are considered noisy, this includes e.g. cppcoreguidelines-*).
|
||||||
|
|
||||||
|
# TODO: Once clang(-tidy) 17 is the minimum, we can convert this list to YAML
|
||||||
|
# See https://releases.llvm.org/17.0.1/tools/clang/tools/extra/docs/ReleaseNotes.html#improvements-to-clang-tidy
|
||||||
|
|
||||||
# TODO Let clang-tidy check headers in further directories
|
# TODO Let clang-tidy check headers in further directories
|
||||||
# --> HeaderFilterRegex: '^.*/(src|base|programs|utils)/.*(h|hpp)$'
|
# --> HeaderFilterRegex: '^.*/(src|base|programs|utils)/.*(h|hpp)$'
|
||||||
HeaderFilterRegex: '^.*/(base)/.*(h|hpp)$'
|
HeaderFilterRegex: '^.*/(base)/.*(h|hpp)$'
|
||||||
@ -35,38 +38,9 @@ Checks: '*,
|
|||||||
-cert-oop54-cpp,
|
-cert-oop54-cpp,
|
||||||
-cert-oop57-cpp,
|
-cert-oop57-cpp,
|
||||||
|
|
||||||
-clang-analyzer-optin.performance.Padding,
|
-clang-analyzer-unix.Malloc,
|
||||||
-clang-analyzer-optin.portability.UnixAPI,
|
|
||||||
-clang-analyzer-security.insecureAPI.bzero,
|
|
||||||
-clang-analyzer-security.insecureAPI.strcpy,
|
|
||||||
|
|
||||||
-cppcoreguidelines-avoid-c-arrays,
|
-cppcoreguidelines-*, # impractical in a codebase as large as ClickHouse, also slow
|
||||||
-cppcoreguidelines-avoid-const-or-ref-data-members,
|
|
||||||
-cppcoreguidelines-avoid-do-while,
|
|
||||||
-cppcoreguidelines-avoid-goto,
|
|
||||||
-cppcoreguidelines-avoid-magic-numbers,
|
|
||||||
-cppcoreguidelines-avoid-non-const-global-variables,
|
|
||||||
-cppcoreguidelines-explicit-virtual-functions,
|
|
||||||
-cppcoreguidelines-init-variables,
|
|
||||||
-cppcoreguidelines-interfaces-global-init,
|
|
||||||
-cppcoreguidelines-macro-usage,
|
|
||||||
-cppcoreguidelines-narrowing-conversions,
|
|
||||||
-cppcoreguidelines-no-malloc,
|
|
||||||
-cppcoreguidelines-non-private-member-variables-in-classes,
|
|
||||||
-cppcoreguidelines-owning-memory,
|
|
||||||
-cppcoreguidelines-prefer-member-initializer,
|
|
||||||
-cppcoreguidelines-pro-bounds-array-to-pointer-decay,
|
|
||||||
-cppcoreguidelines-pro-bounds-constant-array-index,
|
|
||||||
-cppcoreguidelines-pro-bounds-pointer-arithmetic,
|
|
||||||
-cppcoreguidelines-pro-type-const-cast,
|
|
||||||
-cppcoreguidelines-pro-type-cstyle-cast,
|
|
||||||
-cppcoreguidelines-pro-type-member-init,
|
|
||||||
-cppcoreguidelines-pro-type-reinterpret-cast,
|
|
||||||
-cppcoreguidelines-pro-type-static-cast-downcast,
|
|
||||||
-cppcoreguidelines-pro-type-union-access,
|
|
||||||
-cppcoreguidelines-pro-type-vararg,
|
|
||||||
-cppcoreguidelines-slicing,
|
|
||||||
-cppcoreguidelines-special-member-functions,
|
|
||||||
|
|
||||||
-darwin-*,
|
-darwin-*,
|
||||||
|
|
||||||
@ -78,7 +52,6 @@ Checks: '*,
|
|||||||
-google-readability-function-size,
|
-google-readability-function-size,
|
||||||
-google-readability-namespace-comments,
|
-google-readability-namespace-comments,
|
||||||
-google-readability-todo,
|
-google-readability-todo,
|
||||||
-google-upgrade-googletest-case,
|
|
||||||
|
|
||||||
-hicpp-avoid-c-arrays,
|
-hicpp-avoid-c-arrays,
|
||||||
-hicpp-avoid-goto,
|
-hicpp-avoid-goto,
|
||||||
@ -108,6 +81,7 @@ Checks: '*,
|
|||||||
-openmp-*,
|
-openmp-*,
|
||||||
|
|
||||||
-misc-const-correctness,
|
-misc-const-correctness,
|
||||||
|
-misc-include-cleaner, # useful but far too many occurrences
|
||||||
-misc-no-recursion,
|
-misc-no-recursion,
|
||||||
-misc-non-private-member-variables-in-classes,
|
-misc-non-private-member-variables-in-classes,
|
||||||
-misc-confusable-identifiers, # useful but slooow
|
-misc-confusable-identifiers, # useful but slooow
|
||||||
@ -127,10 +101,12 @@ Checks: '*,
|
|||||||
|
|
||||||
-performance-inefficient-string-concatenation,
|
-performance-inefficient-string-concatenation,
|
||||||
-performance-no-int-to-ptr,
|
-performance-no-int-to-ptr,
|
||||||
|
-performance-avoid-endl,
|
||||||
-performance-unnecessary-value-param,
|
-performance-unnecessary-value-param,
|
||||||
|
|
||||||
-portability-simd-intrinsics,
|
-portability-simd-intrinsics,
|
||||||
|
|
||||||
|
-readability-avoid-unconditional-preprocessor-if,
|
||||||
-readability-braces-around-statements,
|
-readability-braces-around-statements,
|
||||||
-readability-convert-member-functions-to-static,
|
-readability-convert-member-functions-to-static,
|
||||||
-readability-else-after-return,
|
-readability-else-after-return,
|
||||||
@ -154,6 +130,13 @@ Checks: '*,
|
|||||||
|
|
||||||
WarningsAsErrors: '*'
|
WarningsAsErrors: '*'
|
||||||
|
|
||||||
|
ExtraArgs:
|
||||||
|
# clang-tidy 17 started to complain (for unknown reasons) that various pragmas are unknown ("clang-diagnostic-unknown-pragmas").
|
||||||
|
# This is technically a compiler error, not a clang-tidy error. We could litter the code base with more pragmas that suppress
|
||||||
|
# this error but it is better to pass the following flag to the compiler:
|
||||||
|
- '-Wno-unknown-pragmas'
|
||||||
|
- '-Wno-unused-command-line-argument' # similar issue
|
||||||
|
|
||||||
CheckOptions:
|
CheckOptions:
|
||||||
readability-identifier-naming.ClassCase: CamelCase
|
readability-identifier-naming.ClassCase: CamelCase
|
||||||
readability-identifier-naming.EnumCase: CamelCase
|
readability-identifier-naming.EnumCase: CamelCase
|
||||||
|
14
.github/workflows/backport_branches.yml
vendored
14
.github/workflows/backport_branches.yml
vendored
@ -399,6 +399,13 @@ jobs:
|
|||||||
clear-repository: true
|
clear-repository: true
|
||||||
submodules: true
|
submodules: true
|
||||||
fetch-depth: 0 # otherwise we will have no info about contributors
|
fetch-depth: 0 # otherwise we will have no info about contributors
|
||||||
|
- name: Apply sparse checkout for contrib # in order to check that it doesn't break build
|
||||||
|
run: |
|
||||||
|
rm -rf "$GITHUB_WORKSPACE/contrib" && echo 'removed'
|
||||||
|
git -C "$GITHUB_WORKSPACE" checkout . && echo 'restored'
|
||||||
|
"$GITHUB_WORKSPACE/contrib/update-submodules.sh" && echo 'OK'
|
||||||
|
du -hs "$GITHUB_WORKSPACE/contrib" ||:
|
||||||
|
find "$GITHUB_WORKSPACE/contrib" -type f | wc -l ||:
|
||||||
- name: Build
|
- name: Build
|
||||||
run: |
|
run: |
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
@ -441,6 +448,13 @@ jobs:
|
|||||||
clear-repository: true
|
clear-repository: true
|
||||||
submodules: true
|
submodules: true
|
||||||
fetch-depth: 0 # otherwise we will have no info about contributors
|
fetch-depth: 0 # otherwise we will have no info about contributors
|
||||||
|
- name: Apply sparse checkout for contrib # in order to check that it doesn't break build
|
||||||
|
run: |
|
||||||
|
rm -rf "$GITHUB_WORKSPACE/contrib" && echo 'removed'
|
||||||
|
git -C "$GITHUB_WORKSPACE" checkout . && echo 'restored'
|
||||||
|
"$GITHUB_WORKSPACE/contrib/update-submodules.sh" && echo 'OK'
|
||||||
|
du -hs "$GITHUB_WORKSPACE/contrib" ||:
|
||||||
|
find "$GITHUB_WORKSPACE/contrib" -type f | wc -l ||:
|
||||||
- name: Build
|
- name: Build
|
||||||
run: |
|
run: |
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
|
5
.github/workflows/docs_check.yml
vendored
5
.github/workflows/docs_check.yml
vendored
@ -13,12 +13,11 @@ on: # yamllint disable-line rule:truthy
|
|||||||
branches:
|
branches:
|
||||||
- master
|
- master
|
||||||
paths:
|
paths:
|
||||||
- 'CHANGELOG.md'
|
- '**.md'
|
||||||
- 'README.md'
|
|
||||||
- 'SECURITY.md'
|
|
||||||
- 'docker/docs/**'
|
- 'docker/docs/**'
|
||||||
- 'docs/**'
|
- 'docs/**'
|
||||||
- 'utils/check-style/aspell-ignore/**'
|
- 'utils/check-style/aspell-ignore/**'
|
||||||
|
- 'tests/ci/docs_check.py'
|
||||||
jobs:
|
jobs:
|
||||||
CheckLabels:
|
CheckLabels:
|
||||||
runs-on: [self-hosted, style-checker]
|
runs-on: [self-hosted, style-checker]
|
||||||
|
14
.github/workflows/master.yml
vendored
14
.github/workflows/master.yml
vendored
@ -581,6 +581,13 @@ jobs:
|
|||||||
clear-repository: true
|
clear-repository: true
|
||||||
submodules: true
|
submodules: true
|
||||||
fetch-depth: 0 # otherwise we will have no info about contributors
|
fetch-depth: 0 # otherwise we will have no info about contributors
|
||||||
|
- name: Apply sparse checkout for contrib # in order to check that it doesn't break build
|
||||||
|
run: |
|
||||||
|
rm -rf "$GITHUB_WORKSPACE/contrib" && echo 'removed'
|
||||||
|
git -C "$GITHUB_WORKSPACE" checkout . && echo 'restored'
|
||||||
|
"$GITHUB_WORKSPACE/contrib/update-submodules.sh" && echo 'OK'
|
||||||
|
du -hs "$GITHUB_WORKSPACE/contrib" ||:
|
||||||
|
find "$GITHUB_WORKSPACE/contrib" -type f | wc -l ||:
|
||||||
- name: Build
|
- name: Build
|
||||||
run: |
|
run: |
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
@ -707,6 +714,13 @@ jobs:
|
|||||||
clear-repository: true
|
clear-repository: true
|
||||||
submodules: true
|
submodules: true
|
||||||
fetch-depth: 0 # otherwise we will have no info about contributors
|
fetch-depth: 0 # otherwise we will have no info about contributors
|
||||||
|
- name: Apply sparse checkout for contrib # in order to check that it doesn't break build
|
||||||
|
run: |
|
||||||
|
rm -rf "$GITHUB_WORKSPACE/contrib" && echo 'removed'
|
||||||
|
git -C "$GITHUB_WORKSPACE" checkout . && echo 'restored'
|
||||||
|
"$GITHUB_WORKSPACE/contrib/update-submodules.sh" && echo 'OK'
|
||||||
|
du -hs "$GITHUB_WORKSPACE/contrib" ||:
|
||||||
|
find "$GITHUB_WORKSPACE/contrib" -type f | wc -l ||:
|
||||||
- name: Build
|
- name: Build
|
||||||
run: |
|
run: |
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
|
4
.github/workflows/nightly.yml
vendored
4
.github/workflows/nightly.yml
vendored
@ -82,8 +82,8 @@ jobs:
|
|||||||
SONAR_SCANNER_VERSION: 4.8.0.2856
|
SONAR_SCANNER_VERSION: 4.8.0.2856
|
||||||
SONAR_SERVER_URL: "https://sonarcloud.io"
|
SONAR_SERVER_URL: "https://sonarcloud.io"
|
||||||
BUILD_WRAPPER_OUT_DIR: build_wrapper_output_directory # Directory where build-wrapper output will be placed
|
BUILD_WRAPPER_OUT_DIR: build_wrapper_output_directory # Directory where build-wrapper output will be placed
|
||||||
CC: clang-16
|
CC: clang-17
|
||||||
CXX: clang++-16
|
CXX: clang++-17
|
||||||
steps:
|
steps:
|
||||||
- name: Check out repository code
|
- name: Check out repository code
|
||||||
uses: ClickHouse/checkout@v1
|
uses: ClickHouse/checkout@v1
|
||||||
|
19
.github/workflows/pull_request.yml
vendored
19
.github/workflows/pull_request.yml
vendored
@ -13,12 +13,11 @@ on: # yamllint disable-line rule:truthy
|
|||||||
branches:
|
branches:
|
||||||
- master
|
- master
|
||||||
paths-ignore:
|
paths-ignore:
|
||||||
- 'CHANGELOG.md'
|
- '**.md'
|
||||||
- 'README.md'
|
|
||||||
- 'SECURITY.md'
|
|
||||||
- 'docker/docs/**'
|
- 'docker/docs/**'
|
||||||
- 'docs/**'
|
- 'docs/**'
|
||||||
- 'utils/check-style/aspell-ignore/**'
|
- 'utils/check-style/aspell-ignore/**'
|
||||||
|
- 'tests/ci/docs_check.py'
|
||||||
##########################################################################################
|
##########################################################################################
|
||||||
##################################### SMALL CHECKS #######################################
|
##################################### SMALL CHECKS #######################################
|
||||||
##########################################################################################
|
##########################################################################################
|
||||||
@ -648,6 +647,13 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
clear-repository: true
|
clear-repository: true
|
||||||
submodules: true
|
submodules: true
|
||||||
|
- name: Apply sparse checkout for contrib # in order to check that it doesn't break build
|
||||||
|
run: |
|
||||||
|
rm -rf "$GITHUB_WORKSPACE/contrib" && echo 'removed'
|
||||||
|
git -C "$GITHUB_WORKSPACE" checkout . && echo 'restored'
|
||||||
|
"$GITHUB_WORKSPACE/contrib/update-submodules.sh" && echo 'OK'
|
||||||
|
du -hs "$GITHUB_WORKSPACE/contrib" ||:
|
||||||
|
find "$GITHUB_WORKSPACE/contrib" -type f | wc -l ||:
|
||||||
- name: Build
|
- name: Build
|
||||||
run: |
|
run: |
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
@ -771,6 +777,13 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
clear-repository: true
|
clear-repository: true
|
||||||
submodules: true
|
submodules: true
|
||||||
|
- name: Apply sparse checkout for contrib # in order to check that it doesn't break build
|
||||||
|
run: |
|
||||||
|
rm -rf "$GITHUB_WORKSPACE/contrib" && echo 'removed'
|
||||||
|
git -C "$GITHUB_WORKSPACE" checkout . && echo 'restored'
|
||||||
|
"$GITHUB_WORKSPACE/contrib/update-submodules.sh" && echo 'OK'
|
||||||
|
du -hs "$GITHUB_WORKSPACE/contrib" ||:
|
||||||
|
find "$GITHUB_WORKSPACE/contrib" -type f | wc -l ||:
|
||||||
- name: Build
|
- name: Build
|
||||||
run: |
|
run: |
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
|
14
.github/workflows/release_branches.yml
vendored
14
.github/workflows/release_branches.yml
vendored
@ -456,6 +456,13 @@ jobs:
|
|||||||
clear-repository: true
|
clear-repository: true
|
||||||
submodules: true
|
submodules: true
|
||||||
fetch-depth: 0 # otherwise we will have no info about contributors
|
fetch-depth: 0 # otherwise we will have no info about contributors
|
||||||
|
- name: Apply sparse checkout for contrib # in order to check that it doesn't break build
|
||||||
|
run: |
|
||||||
|
rm -rf "$GITHUB_WORKSPACE/contrib" && echo 'removed'
|
||||||
|
git -C "$GITHUB_WORKSPACE" checkout . && echo 'restored'
|
||||||
|
"$GITHUB_WORKSPACE/contrib/update-submodules.sh" && echo 'OK'
|
||||||
|
du -hs "$GITHUB_WORKSPACE/contrib" ||:
|
||||||
|
find "$GITHUB_WORKSPACE/contrib" -type f | wc -l ||:
|
||||||
- name: Build
|
- name: Build
|
||||||
run: |
|
run: |
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
@ -498,6 +505,13 @@ jobs:
|
|||||||
clear-repository: true
|
clear-repository: true
|
||||||
submodules: true
|
submodules: true
|
||||||
fetch-depth: 0 # otherwise we will have no info about contributors
|
fetch-depth: 0 # otherwise we will have no info about contributors
|
||||||
|
- name: Apply sparse checkout for contrib # in order to check that it doesn't break build
|
||||||
|
run: |
|
||||||
|
rm -rf "$GITHUB_WORKSPACE/contrib" && echo 'removed'
|
||||||
|
git -C "$GITHUB_WORKSPACE" checkout . && echo 'restored'
|
||||||
|
"$GITHUB_WORKSPACE/contrib/update-submodules.sh" && echo 'OK'
|
||||||
|
du -hs "$GITHUB_WORKSPACE/contrib" ||:
|
||||||
|
find "$GITHUB_WORKSPACE/contrib" -type f | wc -l ||:
|
||||||
- name: Build
|
- name: Build
|
||||||
run: |
|
run: |
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
|
3
.gitmodules
vendored
3
.gitmodules
vendored
@ -257,6 +257,9 @@
|
|||||||
[submodule "contrib/corrosion"]
|
[submodule "contrib/corrosion"]
|
||||||
path = contrib/corrosion
|
path = contrib/corrosion
|
||||||
url = https://github.com/corrosion-rs/corrosion
|
url = https://github.com/corrosion-rs/corrosion
|
||||||
|
[submodule "contrib/libssh"]
|
||||||
|
path = contrib/libssh
|
||||||
|
url = https://github.com/ClickHouse/libssh.git
|
||||||
[submodule "contrib/morton-nd"]
|
[submodule "contrib/morton-nd"]
|
||||||
path = contrib/morton-nd
|
path = contrib/morton-nd
|
||||||
url = https://github.com/morton-nd/morton-nd
|
url = https://github.com/morton-nd/morton-nd
|
||||||
|
186
CHANGELOG.md
186
CHANGELOG.md
@ -1,4 +1,5 @@
|
|||||||
### Table of Contents
|
### Table of Contents
|
||||||
|
**[ClickHouse release v23.9, 2023-09-28](#239)**<br/>
|
||||||
**[ClickHouse release v23.8 LTS, 2023-08-31](#238)**<br/>
|
**[ClickHouse release v23.8 LTS, 2023-08-31](#238)**<br/>
|
||||||
**[ClickHouse release v23.7, 2023-07-27](#237)**<br/>
|
**[ClickHouse release v23.7, 2023-07-27](#237)**<br/>
|
||||||
**[ClickHouse release v23.6, 2023-06-30](#236)**<br/>
|
**[ClickHouse release v23.6, 2023-06-30](#236)**<br/>
|
||||||
@ -11,6 +12,173 @@
|
|||||||
|
|
||||||
# 2023 Changelog
|
# 2023 Changelog
|
||||||
|
|
||||||
|
### ClickHouse release 23.9, 2023-09-28
|
||||||
|
|
||||||
|
#### Backward Incompatible Change
|
||||||
|
* Remove the `status_info` configuration option and dictionaries status from the default Prometheus handler. [#54090](https://github.com/ClickHouse/ClickHouse/pull/54090) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* The experimental parts metadata cache is removed from the codebase. [#54215](https://github.com/ClickHouse/ClickHouse/pull/54215) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* Disable setting `input_format_json_try_infer_numbers_from_strings` by default, so we don't try to infer numbers from strings in JSON formats by default to avoid possible parsing errors when sample data contains strings that looks like a number. [#55099](https://github.com/ClickHouse/ClickHouse/pull/55099) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||||
|
|
||||||
|
#### New Feature
|
||||||
|
* Improve schema inference from JSON formats: 1) Now it's possible to infer named Tuples from JSON objects without experimantal JSON type under a setting `input_format_json_try_infer_named_tuples_from_objects` in JSON formats. Previously without experimantal type JSON we could only infer JSON objects as Strings or Maps, now we can infer named Tuple. Resulting Tuple type will conain all keys of objects that were read in data sample during schema inference. It can be useful for reading structured JSON data without sparse objects. The setting is enabled by default. 2) Allow parsing JSON array into a column with type String under setting `input_format_json_read_arrays_as_strings`. It can help reading arrays with values with different types. 3) Allow to use type String for JSON keys with unkown types (`null`/`[]`/`{}`) in sample data under setting `input_format_json_infer_incomplete_types_as_strings`. Now in JSON formats we can read any value into String column and we can avoid getting error `Cannot determine type for column 'column_name' by first 25000 rows of data, most likely this column contains only Nulls or empty Arrays/Maps` during schema inference by using type String for unknown types, so the data will be read successfully. [#54427](https://github.com/ClickHouse/ClickHouse/pull/54427) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||||
|
* Added IO scheduling support for remote disks. Storage configuration for disk types `s3`, `s3_plain`, `hdfs` and `azure_blob_storage` can now contain `read_resource` and `write_resource` elements holding resource names. Scheduling policies for these resources can be configured in a separate server configuration section `resources`. Queries can be marked using setting `workload` and classified using server configuration section `workload_classifiers` to achieve diverse resource scheduling goals. More details in [the docs](https://clickhouse.com/docs/en/operations/workload-scheduling). [#47009](https://github.com/ClickHouse/ClickHouse/pull/47009) ([Sergei Trifonov](https://github.com/serxa)). Added "bandwidth_limit" IO scheduling node type. It allows you to specify `max_speed` and `max_burst` constraints on traffic passing though this node. [#54618](https://github.com/ClickHouse/ClickHouse/pull/54618) ([Sergei Trifonov](https://github.com/serxa)).
|
||||||
|
* Added new type of authentication based on SSH keys. It works only for the native TCP protocol. [#41109](https://github.com/ClickHouse/ClickHouse/pull/41109) ([George Gamezardashvili](https://github.com/InfJoker)).
|
||||||
|
* Added a new column `_block_number` for MergeTree tables. [#44532](https://github.com/ClickHouse/ClickHouse/issues/44532). [#47532](https://github.com/ClickHouse/ClickHouse/pull/47532) ([SmitaRKulkarni](https://github.com/SmitaRKulkarni)).
|
||||||
|
* Add `IF EMPTY` clause for `DROP TABLE` queries. [#48915](https://github.com/ClickHouse/ClickHouse/pull/48915) ([Pavel Novitskiy](https://github.com/pnovitskiy)).
|
||||||
|
* SQL functions `toString(datetime, timezone)` and `formatDateTime(datetime, format, timezone)` now support non-constant timezone arguments. [#53680](https://github.com/ClickHouse/ClickHouse/pull/53680) ([Yarik Briukhovetskyi](https://github.com/yariks5s)).
|
||||||
|
* Add support for `ALTER TABLE MODIFY COMMENT`. Note: something similar was added by an external contributor a long time ago, but the feature did not work at all and only confused users. This closes [#36377](https://github.com/ClickHouse/ClickHouse/issues/36377). [#51304](https://github.com/ClickHouse/ClickHouse/pull/51304) ([Alexey Milovidov](https://github.com/alexey-milovidov)). Note: this command does not propagate between replicas, so the replicas of a table could have different comments.
|
||||||
|
* Added `GCD` a.k.a. "greatest common denominator" as a new data compression codec. The codec computes the GCD of all column values, and then divides each value by the GCD. The GCD codec is a data preparation codec (similar to Delta and DoubleDelta) and cannot be used stand-alone. It works with data integer, decimal and date/time type. A viable use case for the GCD codec are column values that change (increase/decrease) in multiples of the GCD, e.g. 24 - 28 - 16 - 24 - 8 - 24 (assuming GCD = 4). [#53149](https://github.com/ClickHouse/ClickHouse/pull/53149) ([Alexander Nam](https://github.com/seshWCS)).
|
||||||
|
* Two new type aliases `DECIMAL(P)` (as shortcut for `DECIMAL(P, 0)` and `DECIMAL` (as shortcut for `DECIMAL(10, 0)`) were added. This makes ClickHouse more compatible with MySQL's SQL dialect. [#53328](https://github.com/ClickHouse/ClickHouse/pull/53328) ([Val Doroshchuk](https://github.com/valbok)).
|
||||||
|
* Added a new system log table `backup_log` to track all `BACKUP` and `RESTORE` operations. [#53638](https://github.com/ClickHouse/ClickHouse/pull/53638) ([Victor Krasnov](https://github.com/sirvickr)).
|
||||||
|
* Added a format setting `output_format_markdown_escape_special_characters` (default: false). The setting controls whether special characters like `!`, `#`, `$` etc. are escaped (i.e. prefixed by a backslash) in the `Markdown` output format. [#53860](https://github.com/ClickHouse/ClickHouse/pull/53860) ([irenjj](https://github.com/irenjj)).
|
||||||
|
* Add function `decodeHTMLComponent`. [#54097](https://github.com/ClickHouse/ClickHouse/pull/54097) ([Bharat Nallan](https://github.com/bharatnc)).
|
||||||
|
* Added `peak_threads_usage` to query_log table. [#54335](https://github.com/ClickHouse/ClickHouse/pull/54335) ([Alexey Gerasimchuck](https://github.com/Demilivor)).
|
||||||
|
* Add `SHOW FUNCTIONS` support to clickhouse-client. [#54337](https://github.com/ClickHouse/ClickHouse/pull/54337) ([Julia Kartseva](https://github.com/wat-ze-hex)).
|
||||||
|
* Added function `toDaysSinceYearZero` with alias `TO_DAYS` (for compatibility with MySQL) which returns the number of days passed since `0001-01-01` (in Proleptic Gregorian Calendar). [#54479](https://github.com/ClickHouse/ClickHouse/pull/54479) ([Robert Schulze](https://github.com/rschu1ze)). Function `toDaysSinceYearZero()` now supports arguments of type `DateTime` and `DateTime64`. [#54856](https://github.com/ClickHouse/ClickHouse/pull/54856) ([Serge Klochkov](https://github.com/slvrtrn)).
|
||||||
|
* Added functions `YYYYMMDDtoDate`, `YYYYMMDDtoDate32`, `YYYYMMDDhhmmssToDateTime` and `YYYYMMDDhhmmssToDateTime64`. They convert a date or date with time encoded as integer (e.g. 20230911) into a native date or date with time. As such, they provide the opposite functionality of existing functions `YYYYMMDDToDate`, `YYYYMMDDToDateTime`, `YYYYMMDDhhmmddToDateTime`, `YYYYMMDDhhmmddToDateTime64`. [#54509](https://github.com/ClickHouse/ClickHouse/pull/54509) ([Quanfa Fu](https://github.com/dentiscalprum)) ([Robert Schulze](https://github.com/rschu1ze)).
|
||||||
|
* Add several string distance functions, including `byteHammingDistance`, `editDistance`. [#54935](https://github.com/ClickHouse/ClickHouse/pull/54935) ([flynn](https://github.com/ucasfl)).
|
||||||
|
* Allow specifying the expiration date and, optionally, the time for user credentials with `VALID UNTIL datetime` clause. [#51261](https://github.com/ClickHouse/ClickHouse/pull/51261) ([Nikolay Degterinsky](https://github.com/evillique)).
|
||||||
|
* Allow S3-style URLs for table functions `s3`, `gcs`, `oss`. URL is automatically converted to HTTP. Example: `'s3://clickhouse-public-datasets/hits.csv'` is converted to `'https://clickhouse-public-datasets.s3.amazonaws.com/hits.csv'`. [#54931](https://github.com/ClickHouse/ClickHouse/pull/54931) ([Yarik Briukhovetskyi](https://github.com/yariks5s)).
|
||||||
|
* Add new setting `print_pretty_type_names` to print pretty deep nested types like Tuple/Maps/Arrays. [#55095](https://github.com/ClickHouse/ClickHouse/pull/55095) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||||
|
|
||||||
|
#### Performance Improvement
|
||||||
|
* Speed up reading from S3 by enabling prefetches by default. [#53709](https://github.com/ClickHouse/ClickHouse/pull/53709) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* Do not implicitly read PK and version columns in lonely parts if unnecessary for queries with FINAL. [#53919](https://github.com/ClickHouse/ClickHouse/pull/53919) ([Duc Canh Le](https://github.com/canhld94)).
|
||||||
|
* Optimize group by constant keys. Will optimize queries with group by `_file/_path` after https://github.com/ClickHouse/ClickHouse/pull/53529. [#53549](https://github.com/ClickHouse/ClickHouse/pull/53549) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||||
|
* Improve performance of sorting for `Decimal` columns. Improve performance of insertion into `MergeTree` if ORDER BY contains a `Decimal` column. Improve performance of sorting when data is already sorted or almost sorted. [#35961](https://github.com/ClickHouse/ClickHouse/pull/35961) ([Maksim Kita](https://github.com/kitaisreal)).
|
||||||
|
* Improve performance for huge query analysis. Fixes [#51224](https://github.com/ClickHouse/ClickHouse/issues/51224). [#51469](https://github.com/ClickHouse/ClickHouse/pull/51469) ([frinkr](https://github.com/frinkr)).
|
||||||
|
* An optimization to rewrite `COUNT(DISTINCT ...)` and various `uniq` variants to `count` if it is selected from a subquery with GROUP BY. [#52082](https://github.com/ClickHouse/ClickHouse/pull/52082) [#52645](https://github.com/ClickHouse/ClickHouse/pull/52645) ([JackyWoo](https://github.com/JackyWoo)).
|
||||||
|
* Remove manual calls to `mmap/mremap/munmap` and delegate all this work to `jemalloc` - and it slightly improves performance. [#52792](https://github.com/ClickHouse/ClickHouse/pull/52792) ([Nikita Taranov](https://github.com/nickitat)).
|
||||||
|
* Fixed high in CPU consumption when working with NATS. [#54399](https://github.com/ClickHouse/ClickHouse/pull/54399) ([Vasilev Pyotr](https://github.com/vahpetr)).
|
||||||
|
* Since we use separate instructions for executing `toString()` with datetime argument, it is possible to improve performance a bit for non-datetime arguments and have some parts of the code cleaner. Follows up [#53680](https://github.com/ClickHouse/ClickHouse/issues/53680). [#54443](https://github.com/ClickHouse/ClickHouse/pull/54443) ([Yarik Briukhovetskyi](https://github.com/yariks5s)).
|
||||||
|
* Instead of serializing json elements into a `std::stringstream`, this PR try to put the serialization result into `ColumnString` direclty. [#54613](https://github.com/ClickHouse/ClickHouse/pull/54613) ([lgbo](https://github.com/lgbo-ustc)).
|
||||||
|
* Enable ORDER BY optimization for reading data in corresponding order from a MergeTree table in case that the table is behind a view. [#54628](https://github.com/ClickHouse/ClickHouse/pull/54628) ([Vitaly Baranov](https://github.com/vitlibar)).
|
||||||
|
* Improve JSON SQL functions by reusing `GeneratorJSONPath` and removing several shared pointers. [#54735](https://github.com/ClickHouse/ClickHouse/pull/54735) ([lgbo](https://github.com/lgbo-ustc)).
|
||||||
|
* Keeper tries to batch flush requests for better performance. [#53049](https://github.com/ClickHouse/ClickHouse/pull/53049) ([Antonio Andelic](https://github.com/antonio2368)).
|
||||||
|
* Now `clickhouse-client` processes files in parallel in case of `INFILE 'glob_expression'`. Closes [#54218](https://github.com/ClickHouse/ClickHouse/issues/54218). [#54533](https://github.com/ClickHouse/ClickHouse/pull/54533) ([Max K.](https://github.com/mkaynov)).
|
||||||
|
* Allow to use primary key for IN function where primary key column types are different from `IN` function right side column types. Example: `SELECT id FROM test_table WHERE id IN (SELECT '5')`. Closes [#48936](https://github.com/ClickHouse/ClickHouse/issues/48936). [#54544](https://github.com/ClickHouse/ClickHouse/pull/54544) ([Maksim Kita](https://github.com/kitaisreal)).
|
||||||
|
* Hash JOIN tries to shrink internal buffers consuming half of maximal available memory (set by `max_bytes_in_join`). [#54584](https://github.com/ClickHouse/ClickHouse/pull/54584) ([vdimir](https://github.com/vdimir)).
|
||||||
|
* Respect `max_block_size` for array join to avoid possible OOM. Close [#54290](https://github.com/ClickHouse/ClickHouse/issues/54290). [#54664](https://github.com/ClickHouse/ClickHouse/pull/54664) ([李扬](https://github.com/taiyang-li)).
|
||||||
|
* Reuse HTTP connections in the `s3` table function. [#54812](https://github.com/ClickHouse/ClickHouse/pull/54812) ([Michael Kolupaev](https://github.com/al13n321)).
|
||||||
|
* Replace the linear search in `MergeTreeRangeReader::Stream::ceilRowsToCompleteGranules` with a binary search. [#54869](https://github.com/ClickHouse/ClickHouse/pull/54869) ([usurai](https://github.com/usurai)).
|
||||||
|
|
||||||
|
#### Experimental Feature
|
||||||
|
* The creation of `Annoy` indexes can now be parallelized using setting `max_threads_for_annoy_index_creation`. [#54047](https://github.com/ClickHouse/ClickHouse/pull/54047) ([Robert Schulze](https://github.com/rschu1ze)).
|
||||||
|
* Parallel replicas over distributed don't read from all replicas [#54199](https://github.com/ClickHouse/ClickHouse/pull/54199) ([Igor Nikonov](https://github.com/devcrafter)).
|
||||||
|
|
||||||
|
#### Improvement
|
||||||
|
* Allow to replace long names of files of columns in `MergeTree` data parts to hashes of names. It helps to avoid `File name too long` error in some cases. [#50612](https://github.com/ClickHouse/ClickHouse/pull/50612) ([Anton Popov](https://github.com/CurtizJ)).
|
||||||
|
* Parse data in `JSON` format as `JSONEachRow` if failed to parse metadata. It will allow to read files with `.json` extension even if real format is JSONEachRow. Closes [#45740](https://github.com/ClickHouse/ClickHouse/issues/45740). [#54405](https://github.com/ClickHouse/ClickHouse/pull/54405) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||||
|
* Output valid JSON/XML on excetpion during HTTP query execution. Add setting `http_write_exception_in_output_format` to enable/disable this behaviour (enabled by default). [#52853](https://github.com/ClickHouse/ClickHouse/pull/52853) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||||
|
* View `information_schema.tables` now has a new field `data_length` which shows the approximate size of the data on disk. Required to run queries generated by Amazon QuickSight. [#55037](https://github.com/ClickHouse/ClickHouse/pull/55037) ([Robert Schulze](https://github.com/rschu1ze)).
|
||||||
|
* The MySQL interface gained a minimal implementation of prepared statements, just enough to allow a connection from Tableau Online to ClickHouse via the MySQL connector. [#54115](https://github.com/ClickHouse/ClickHouse/pull/54115) ([Serge Klochkov](https://github.com/slvrtrn)). Please note: the prepared statements implementation is pretty minimal, we do not support arguments binding yet, it is not required in this particular Tableau online use case. It will be implemented as a follow-up if necessary after extensive testing of Tableau Online in case we discover issues.
|
||||||
|
* Support case-insensitive and dot-all matching modes in `regexp_tree` dictionaries. [#50906](https://github.com/ClickHouse/ClickHouse/pull/50906) ([Johann Gan](https://github.com/johanngan)).
|
||||||
|
* Keeper improvement: Add a `createIfNotExists` Keeper command. [#48855](https://github.com/ClickHouse/ClickHouse/pull/48855) ([Konstantin Bogdanov](https://github.com/thevar1able)).
|
||||||
|
* More precise integer type inference, fix [#51236](https://github.com/ClickHouse/ClickHouse/issues/51236). [#53003](https://github.com/ClickHouse/ClickHouse/pull/53003) ([Chen768959](https://github.com/Chen768959)).
|
||||||
|
* Introduced resolving of charsets in the string literals for MaterializedMySQL. [#53220](https://github.com/ClickHouse/ClickHouse/pull/53220) ([Val Doroshchuk](https://github.com/valbok)).
|
||||||
|
* Fix a subtle issue with a rarely used `EmbeddedRocksDB` table engine in an extremely rare scenario: sometimes the `EmbeddedRocksDB` table engine does not close files correctly in NFS after running `DROP TABLE`. [#53502](https://github.com/ClickHouse/ClickHouse/pull/53502) ([Mingliang Pan](https://github.com/liangliangpan)).
|
||||||
|
* `RESTORE TABLE ON CLUSTER` must create replicated tables with a matching UUID on hosts. Otherwise the macro `{uuid}` in ZooKeeper path can't work correctly after RESTORE. This PR implements that. [#53765](https://github.com/ClickHouse/ClickHouse/pull/53765) ([Vitaly Baranov](https://github.com/vitlibar)).
|
||||||
|
* Added restore setting `restore_broken_parts_as_detached`: if it's true the RESTORE process won't stop on broken parts while restoring, instead all the broken parts will be copied to the `detached` folder with the prefix `broken-from-backup'. If it's false the RESTORE process will stop on the first broken part (if any). The default value is false. [#53877](https://github.com/ClickHouse/ClickHouse/pull/53877) ([Vitaly Baranov](https://github.com/vitlibar)).
|
||||||
|
* Add `elapsed_ns` field to HTTP headers X-ClickHouse-Progress and X-ClickHouse-Summary. [#54179](https://github.com/ClickHouse/ClickHouse/pull/54179) ([joelynch](https://github.com/joelynch)).
|
||||||
|
* Implementation of `reconfig` (https://github.com/ClickHouse/ClickHouse/pull/49450), `sync`, and `exists` commands for keeper-client. [#54201](https://github.com/ClickHouse/ClickHouse/pull/54201) ([pufit](https://github.com/pufit)).
|
||||||
|
* `clickhouse-local` and `clickhouse-client` now allow to specify the `--query` parameter multiple times, e.g. `./clickhouse-client --query "SELECT 1" --query "SELECT 2"`. This syntax is slightly more intuitive than `./clickhouse-client --multiquery "SELECT 1;S ELECT 2"`, a bit easier to script (e.g. `queries.push_back('--query "$q"')`) and more consistent with the behavior of existing parameter `--queries-file` (e.g. `./clickhouse client --queries-file queries1.sql --queries-file queries2.sql`). [#54249](https://github.com/ClickHouse/ClickHouse/pull/54249) ([Robert Schulze](https://github.com/rschu1ze)).
|
||||||
|
* Add sub-second precision to `formatReadableTimeDelta`. [#54250](https://github.com/ClickHouse/ClickHouse/pull/54250) ([Andrey Zvonov](https://github.com/zvonand)).
|
||||||
|
* Enable `allow_remove_stale_moving_parts` by default. [#54260](https://github.com/ClickHouse/ClickHouse/pull/54260) ([vdimir](https://github.com/vdimir)).
|
||||||
|
* Fix using count from cache and improve progress bar for reading from archives. [#54271](https://github.com/ClickHouse/ClickHouse/pull/54271) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||||
|
* Add support for S3 credentials using SSO. To define a profile to be used with SSO, set `AWS_PROFILE` environment variable. [#54347](https://github.com/ClickHouse/ClickHouse/pull/54347) ([Antonio Andelic](https://github.com/antonio2368)).
|
||||||
|
* Support NULL as default for nested types Array/Tuple/Map for input formats. Closes [#51100](https://github.com/ClickHouse/ClickHouse/issues/51100). [#54351](https://github.com/ClickHouse/ClickHouse/pull/54351) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||||
|
* Allow reading some unusual configuration of chunks from Arrow/Parquet formats. [#54370](https://github.com/ClickHouse/ClickHouse/pull/54370) ([Arthur Passos](https://github.com/arthurpassos)).
|
||||||
|
* Add `STD` alias to `stddevPop` function for MySQL compatibility. Closes [#54274](https://github.com/ClickHouse/ClickHouse/issues/54274). [#54382](https://github.com/ClickHouse/ClickHouse/pull/54382) ([Nikolay Degterinsky](https://github.com/evillique)).
|
||||||
|
* Add `addDate` function for compatibility with MySQL and `subDate` for consistency. Reference [#54275](https://github.com/ClickHouse/ClickHouse/issues/54275). [#54400](https://github.com/ClickHouse/ClickHouse/pull/54400) ([Nikolay Degterinsky](https://github.com/evillique)).
|
||||||
|
* Add `modification_time` into `system.detached_parts`. [#54506](https://github.com/ClickHouse/ClickHouse/pull/54506) ([Azat Khuzhin](https://github.com/azat)).
|
||||||
|
* Added a setting `splitby_max_substrings_includes_remaining_string` which controls if functions "splitBy*()" with argument "max_substring" > 0 include the remaining string (if any) in the result array (Python/Spark semantics) or not. The default behavior does not change. [#54518](https://github.com/ClickHouse/ClickHouse/pull/54518) ([Robert Schulze](https://github.com/rschu1ze)).
|
||||||
|
* Better integer types inference for `Int64`/`UInt64` fields. Continuation of [#53003](https://github.com/ClickHouse/ClickHouse/pull/53003). Now it works also for nested types like Arrays of Arrays and for functions like `map/tuple`. Issue: [#51236](https://github.com/ClickHouse/ClickHouse/issues/51236). [#54553](https://github.com/ClickHouse/ClickHouse/pull/54553) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||||
|
* Added array operations for multiplying, dividing and modulo on scalar. Works in each way, for example `5 * [5, 5]` and `[5, 5] * 5` - both cases are possible. [#54608](https://github.com/ClickHouse/ClickHouse/pull/54608) ([Yarik Briukhovetskyi](https://github.com/yariks5s)).
|
||||||
|
* Add optional `version` argument to `rm` command in `keeper-client` to support safer deletes. [#54708](https://github.com/ClickHouse/ClickHouse/pull/54708) ([János Benjamin Antal](https://github.com/antaljanosbenjamin)).
|
||||||
|
* Disable killing the server by systemd (that may lead to data loss when using Buffer tables). [#54744](https://github.com/ClickHouse/ClickHouse/pull/54744) ([Azat Khuzhin](https://github.com/azat)).
|
||||||
|
* Added field `is_deterministic` to system table `system.functions` which indicates whether the result of a function is stable between two invocations (given exactly the same inputs) or not. [#54766](https://github.com/ClickHouse/ClickHouse/pull/54766) [#55035](https://github.com/ClickHouse/ClickHouse/pull/55035) ([Robert Schulze](https://github.com/rschu1ze)).
|
||||||
|
* Made the views in schema `information_schema` more compatible with the equivalent views in MySQL (i.e. modified and extended them) up to a point where Tableau Online is able to connect to ClickHouse. More specifically: 1. The type of field `information_schema.tables.table_type` changed from Enum8 to String. 2. Added fields `table_comment` and `table_collation` to view `information_schema.table`. 3. Added views `information_schema.key_column_usage` and `referential_constraints`. 4. Replaced uppercase aliases in `information_schema` views with concrete uppercase columns. [#54773](https://github.com/ClickHouse/ClickHouse/pull/54773) ([Serge Klochkov](https://github.com/slvrtrn)).
|
||||||
|
* The query cache now returns an error if the user tries to cache the result of a query with a non-deterministic function such as `now`, `randomString` and `dictGet`. Compared to the previous behavior (silently don't cache the result), this reduces confusion and surprise for users. [#54801](https://github.com/ClickHouse/ClickHouse/pull/54801) ([Robert Schulze](https://github.com/rschu1ze)).
|
||||||
|
* Forbid special columns like materialized/ephemeral/alias for `file`/`s3`/`url`/... storages, fix insert into ephemeral columns from files. Closes [#53477](https://github.com/ClickHouse/ClickHouse/issues/53477). [#54803](https://github.com/ClickHouse/ClickHouse/pull/54803) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||||
|
* More configurable collecting metadata for backup. [#54804](https://github.com/ClickHouse/ClickHouse/pull/54804) ([Vitaly Baranov](https://github.com/vitlibar)).
|
||||||
|
* `clickhouse-local`'s log file (if enabled with --server_logs_file flag) will now prefix each line with timestamp, thread id, etc, just like `clickhouse-server`. [#54807](https://github.com/ClickHouse/ClickHouse/pull/54807) ([Michael Kolupaev](https://github.com/al13n321)).
|
||||||
|
* Field `is_obsolete` in the `system.merge_tree_settings` table - it is now 1 for obsolete merge tree settings. Previously, only the description indicated that the setting is obsolete. [#54837](https://github.com/ClickHouse/ClickHouse/pull/54837) ([Robert Schulze](https://github.com/rschu1ze)).
|
||||||
|
* Make it possible to use plural when using interval literals. `INTERVAL 2 HOURS` should be equivalent to `INTERVAL 2 HOUR`. [#54860](https://github.com/ClickHouse/ClickHouse/pull/54860) ([Jordi Villar](https://github.com/jrdi)).
|
||||||
|
* Always allow the creation of a projection with `Nullable` PK. This fixes [#54814](https://github.com/ClickHouse/ClickHouse/issues/54814). [#54895](https://github.com/ClickHouse/ClickHouse/pull/54895) ([Amos Bird](https://github.com/amosbird)).
|
||||||
|
* Retry backup's S3 operations after connection reset failure. [#54900](https://github.com/ClickHouse/ClickHouse/pull/54900) ([Vitaly Baranov](https://github.com/vitlibar)).
|
||||||
|
* Make the exception message exact in case of the maximum value of a settings is less than the minimum value. [#54925](https://github.com/ClickHouse/ClickHouse/pull/54925) ([János Benjamin Antal](https://github.com/antaljanosbenjamin)).
|
||||||
|
* `LIKE`, `match`, and other regular expressions matching functions now allow matching with patterns containing non-UTF-8 substrings by falling back to binary matching. Example: you can use `string LIKE '\xFE\xFF%'` to detect BOM. This closes [#54486](https://github.com/ClickHouse/ClickHouse/issues/54486). [#54942](https://github.com/ClickHouse/ClickHouse/pull/54942) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* Added `ContextLockWaitMicroseconds` profile event. [#55029](https://github.com/ClickHouse/ClickHouse/pull/55029) ([Maksim Kita](https://github.com/kitaisreal)).
|
||||||
|
* The Keeper dynamically adjusts log levels. [#50372](https://github.com/ClickHouse/ClickHouse/pull/50372) ([helifu](https://github.com/helifu)).
|
||||||
|
* Added function `timestamp` for compatibility with MySQL. Closes [#54275](https://github.com/ClickHouse/ClickHouse/issues/54275). [#54639](https://github.com/ClickHouse/ClickHouse/pull/54639) ([Nikolay Degterinsky](https://github.com/evillique)).
|
||||||
|
|
||||||
|
#### Build/Testing/Packaging Improvement
|
||||||
|
* Bumped the compiler of official and continuous integration builds of ClickHouse from Clang 16 to 17. [#53831](https://github.com/ClickHouse/ClickHouse/pull/53831) ([Robert Schulze](https://github.com/rschu1ze)).
|
||||||
|
* Regenerated tld data for lookups (`tldLookup.generated.cpp`). [#54269](https://github.com/ClickHouse/ClickHouse/pull/54269) ([Bharat Nallan](https://github.com/bharatnc)).
|
||||||
|
* Remove the redundant `clickhouse-keeper-client` symlink. [#54587](https://github.com/ClickHouse/ClickHouse/pull/54587) ([Tomas Barton](https://github.com/deric)).
|
||||||
|
* Use `/usr/bin/env` to resolve bash - now it supports Nix OS. [#54603](https://github.com/ClickHouse/ClickHouse/pull/54603) ([Fionera](https://github.com/fionera)).
|
||||||
|
* CMake added `PROFILE_CPU` option needed to perform `perf record` without using a DWARF call graph. [#54917](https://github.com/ClickHouse/ClickHouse/pull/54917) ([Maksim Kita](https://github.com/kitaisreal)).
|
||||||
|
* If the linker is different than LLD, stop with a fatal error. [#55036](https://github.com/ClickHouse/ClickHouse/pull/55036) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* Replaced the library to handle (encode/decode) base64 values from Turbo-Base64 to aklomp-base64. Both are SIMD-accelerated on x86 and ARM but 1. the license of the latter (BSD-2) is more favorable for ClickHouse, Turbo64 switched in the meantime to GPL-3, 2. with more GitHub stars, aklomp-base64 seems more future-proof, 3. aklomp-base64 has a slightly nicer API (which is arguably subjective), and 4. aklomp-base64 does not require us to hack around bugs (like non-threadsafe initialization). Note: aklomp-base64 rejects unpadded base64 values whereas Turbo-Base64 decodes them on a best-effort basis. RFC-4648 leaves it open whether padding is mandatory or not, but depending on the context this may be a behavioral change to be aware of. [#54119](https://github.com/ClickHouse/ClickHouse/pull/54119) ([Mikhail Koviazin](https://github.com/mkmkme)).
|
||||||
|
|
||||||
|
#### Bug Fix (user-visible misbehavior in an official stable release)
|
||||||
|
* Fix REPLACE/MOVE PARTITION with zero-copy replication (note: "zero-copy replication" is an experimental feature) [#54193](https://github.com/ClickHouse/ClickHouse/pull/54193) ([Alexander Tokmakov](https://github.com/tavplubix)).
|
||||||
|
* Fix zero copy locks with hardlinks (note: "zero-copy replication" is an experimental feature) [#54859](https://github.com/ClickHouse/ClickHouse/pull/54859) ([Alexander Tokmakov](https://github.com/tavplubix)).
|
||||||
|
* Fix zero copy garbage (note: "zero-copy replication" is an experimental feature) [#54550](https://github.com/ClickHouse/ClickHouse/pull/54550) ([Alexander Tokmakov](https://github.com/tavplubix)).
|
||||||
|
* Pass HTTP retry timeout as milliseconds (it was incorrect before). [#54438](https://github.com/ClickHouse/ClickHouse/pull/54438) ([Duc Canh Le](https://github.com/canhld94)).
|
||||||
|
* Fix misleading error message in OUTFILE with `CapnProto`/`Protobuf` [#52870](https://github.com/ClickHouse/ClickHouse/pull/52870) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||||
|
* Fix summary reporting with parallel replicas with LIMIT [#53050](https://github.com/ClickHouse/ClickHouse/pull/53050) ([Raúl Marín](https://github.com/Algunenano)).
|
||||||
|
* Fix throttling of BACKUPs from/to S3 (in case native copy was not used) and in some other places as well [#53336](https://github.com/ClickHouse/ClickHouse/pull/53336) ([Azat Khuzhin](https://github.com/azat)).
|
||||||
|
* Fix IO throttling during copying whole directories [#53338](https://github.com/ClickHouse/ClickHouse/pull/53338) ([Azat Khuzhin](https://github.com/azat)).
|
||||||
|
* Fix: moved to prewhere condition actions can lose column [#53492](https://github.com/ClickHouse/ClickHouse/pull/53492) ([Yakov Olkhovskiy](https://github.com/yakov-olkhovskiy)).
|
||||||
|
* Fixed internal error when replacing with byte-equal parts [#53735](https://github.com/ClickHouse/ClickHouse/pull/53735) ([Pedro Riera](https://github.com/priera)).
|
||||||
|
* Fix: require columns participating in interpolate expression [#53754](https://github.com/ClickHouse/ClickHouse/pull/53754) ([Yakov Olkhovskiy](https://github.com/yakov-olkhovskiy)).
|
||||||
|
* Fix cluster discovery initialization + setting up fail points in config [#54113](https://github.com/ClickHouse/ClickHouse/pull/54113) ([vdimir](https://github.com/vdimir)).
|
||||||
|
* Fix issues in `accurateCastOrNull` [#54136](https://github.com/ClickHouse/ClickHouse/pull/54136) ([Salvatore Mesoraca](https://github.com/aiven-sal)).
|
||||||
|
* Fix nullable primary key with the FINAL modifier [#54164](https://github.com/ClickHouse/ClickHouse/pull/54164) ([Amos Bird](https://github.com/amosbird)).
|
||||||
|
* Fixed error that prevented insertion in replicated materialized view of new data in presence of duplicated data. [#54184](https://github.com/ClickHouse/ClickHouse/pull/54184) ([Pedro Riera](https://github.com/priera)).
|
||||||
|
* Fix: allow `IPv6` for bloom filter [#54200](https://github.com/ClickHouse/ClickHouse/pull/54200) ([Yakov Olkhovskiy](https://github.com/yakov-olkhovskiy)).
|
||||||
|
* fix possible type mismatch with `IPv4` [#54212](https://github.com/ClickHouse/ClickHouse/pull/54212) ([Bharat Nallan](https://github.com/bharatnc)).
|
||||||
|
* Fix `system.data_skipping_indices` for recreated indices [#54225](https://github.com/ClickHouse/ClickHouse/pull/54225) ([Artur Malchanau](https://github.com/Hexta)).
|
||||||
|
* fix name clash for multiple join rewriter v2 [#54240](https://github.com/ClickHouse/ClickHouse/pull/54240) ([Tao Wang](https://github.com/wangtZJU)).
|
||||||
|
* Fix unexpected errors in `system.errors` after join [#54306](https://github.com/ClickHouse/ClickHouse/pull/54306) ([vdimir](https://github.com/vdimir)).
|
||||||
|
* Fix `isZeroOrNull(NULL)` [#54316](https://github.com/ClickHouse/ClickHouse/pull/54316) ([flynn](https://github.com/ucasfl)).
|
||||||
|
* Fix: parallel replicas over distributed with `prefer_localhost_replica` = 1 [#54334](https://github.com/ClickHouse/ClickHouse/pull/54334) ([Igor Nikonov](https://github.com/devcrafter)).
|
||||||
|
* Fix logical error in vertical merge + replacing merge tree + optimize cleanup [#54368](https://github.com/ClickHouse/ClickHouse/pull/54368) ([alesapin](https://github.com/alesapin)).
|
||||||
|
* Fix possible error `URI contains invalid characters` in the `s3` table function [#54373](https://github.com/ClickHouse/ClickHouse/pull/54373) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||||
|
* Fix segfault in AST optimization of `arrayExists` function [#54379](https://github.com/ClickHouse/ClickHouse/pull/54379) ([Nikolay Degterinsky](https://github.com/evillique)).
|
||||||
|
* Check for overflow before addition in `analysisOfVariance` function [#54385](https://github.com/ClickHouse/ClickHouse/pull/54385) ([Antonio Andelic](https://github.com/antonio2368)).
|
||||||
|
* Reproduce and fix the bug in removeSharedRecursive [#54430](https://github.com/ClickHouse/ClickHouse/pull/54430) ([Sema Checherinda](https://github.com/CheSema)).
|
||||||
|
* Fix possible incorrect result with SimpleAggregateFunction in PREWHERE and FINAL [#54436](https://github.com/ClickHouse/ClickHouse/pull/54436) ([Azat Khuzhin](https://github.com/azat)).
|
||||||
|
* Fix filtering parts with indexHint for non analyzer [#54449](https://github.com/ClickHouse/ClickHouse/pull/54449) ([Azat Khuzhin](https://github.com/azat)).
|
||||||
|
* Fix aggregate projections with normalized states [#54480](https://github.com/ClickHouse/ClickHouse/pull/54480) ([Amos Bird](https://github.com/amosbird)).
|
||||||
|
* `clickhouse-local`: something for multiquery parameter [#54498](https://github.com/ClickHouse/ClickHouse/pull/54498) ([CuiShuoGuo](https://github.com/bakam412)).
|
||||||
|
* `clickhouse-local` supports `--database` command line argument [#54503](https://github.com/ClickHouse/ClickHouse/pull/54503) ([vdimir](https://github.com/vdimir)).
|
||||||
|
* Fix possible parsing error in `-WithNames` formats with disabled `input_format_with_names_use_header` [#54513](https://github.com/ClickHouse/ClickHouse/pull/54513) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||||
|
* Fix rare case of CHECKSUM_DOESNT_MATCH error [#54549](https://github.com/ClickHouse/ClickHouse/pull/54549) ([alesapin](https://github.com/alesapin)).
|
||||||
|
* Fix sorting of UNION ALL of already sorted results [#54564](https://github.com/ClickHouse/ClickHouse/pull/54564) ([Vitaly Baranov](https://github.com/vitlibar)).
|
||||||
|
* Fix snapshot install in Keeper [#54572](https://github.com/ClickHouse/ClickHouse/pull/54572) ([Antonio Andelic](https://github.com/antonio2368)).
|
||||||
|
* Fix race in `ColumnUnique` [#54575](https://github.com/ClickHouse/ClickHouse/pull/54575) ([Nikita Taranov](https://github.com/nickitat)).
|
||||||
|
* Annoy/Usearch index: Fix LOGICAL_ERROR during build-up with default values [#54600](https://github.com/ClickHouse/ClickHouse/pull/54600) ([Robert Schulze](https://github.com/rschu1ze)).
|
||||||
|
* Fix serialization of `ColumnDecimal` [#54601](https://github.com/ClickHouse/ClickHouse/pull/54601) ([Nikita Taranov](https://github.com/nickitat)).
|
||||||
|
* Fix schema inference for *Cluster functions for column names with spaces [#54635](https://github.com/ClickHouse/ClickHouse/pull/54635) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||||
|
* Fix using structure from insertion tables in case of defaults and explicit insert columns [#54655](https://github.com/ClickHouse/ClickHouse/pull/54655) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||||
|
* Fix: avoid using regex match, possibly containing alternation, as a key condition. [#54696](https://github.com/ClickHouse/ClickHouse/pull/54696) ([Yakov Olkhovskiy](https://github.com/yakov-olkhovskiy)).
|
||||||
|
* Fix ReplacingMergeTree with vertical merge and cleanup [#54706](https://github.com/ClickHouse/ClickHouse/pull/54706) ([SmitaRKulkarni](https://github.com/SmitaRKulkarni)).
|
||||||
|
* Fix virtual columns having incorrect values after ORDER BY [#54811](https://github.com/ClickHouse/ClickHouse/pull/54811) ([Michael Kolupaev](https://github.com/al13n321)).
|
||||||
|
* Fix filtering parts with indexHint for non analyzer [#54825](https://github.com/ClickHouse/ClickHouse/pull/54825) [#54449](https://github.com/ClickHouse/ClickHouse/pull/54449) ([Azat Khuzhin](https://github.com/azat)).
|
||||||
|
* Fix Keeper segfault during shutdown [#54841](https://github.com/ClickHouse/ClickHouse/pull/54841) ([Antonio Andelic](https://github.com/antonio2368)).
|
||||||
|
* Fix `Invalid number of rows in Chunk` in MaterializedPostgreSQL [#54844](https://github.com/ClickHouse/ClickHouse/pull/54844) ([Kseniia Sumarokova](https://github.com/kssenii)).
|
||||||
|
* Move obsolete format settings to separate section [#54855](https://github.com/ClickHouse/ClickHouse/pull/54855) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||||
|
* Rebuild `minmax_count_projection` when partition key gets modified [#54943](https://github.com/ClickHouse/ClickHouse/pull/54943) ([Amos Bird](https://github.com/amosbird)).
|
||||||
|
* Fix bad cast to `ColumnVector<Int128>` in function `if` [#55019](https://github.com/ClickHouse/ClickHouse/pull/55019) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||||
|
* Prevent attaching parts from tables with different projections or indices [#55062](https://github.com/ClickHouse/ClickHouse/pull/55062) ([János Benjamin Antal](https://github.com/antaljanosbenjamin)).
|
||||||
|
* Store NULL in scalar result map for empty subquery result [#52240](https://github.com/ClickHouse/ClickHouse/pull/52240) ([vdimir](https://github.com/vdimir)).
|
||||||
|
* Fix `FINAL` produces invalid read ranges in a rare case [#54934](https://github.com/ClickHouse/ClickHouse/pull/54934) ([Nikita Taranov](https://github.com/nickitat)).
|
||||||
|
* Fix: insert quorum w/o keeper retries [#55026](https://github.com/ClickHouse/ClickHouse/pull/55026) ([Igor Nikonov](https://github.com/devcrafter)).
|
||||||
|
* Fix simple state with nullable [#55030](https://github.com/ClickHouse/ClickHouse/pull/55030) ([Pedro Riera](https://github.com/priera)).
|
||||||
|
|
||||||
|
|
||||||
### <a id="238"></a> ClickHouse release 23.8 LTS, 2023-08-31
|
### <a id="238"></a> ClickHouse release 23.8 LTS, 2023-08-31
|
||||||
|
|
||||||
#### Backward Incompatible Change
|
#### Backward Incompatible Change
|
||||||
@ -25,13 +193,13 @@
|
|||||||
* Add column `ptr` to `system.trace_log` for `trace_type = 'MemorySample'`. This column contains an address of allocation. Added function `flameGraph` which can build flamegraph containing allocated and not released memory. Reworking of [#38391](https://github.com/ClickHouse/ClickHouse/issues/38391). [#45322](https://github.com/ClickHouse/ClickHouse/pull/45322) ([Nikolai Kochetov](https://github.com/KochetovNicolai)).
|
* Add column `ptr` to `system.trace_log` for `trace_type = 'MemorySample'`. This column contains an address of allocation. Added function `flameGraph` which can build flamegraph containing allocated and not released memory. Reworking of [#38391](https://github.com/ClickHouse/ClickHouse/issues/38391). [#45322](https://github.com/ClickHouse/ClickHouse/pull/45322) ([Nikolai Kochetov](https://github.com/KochetovNicolai)).
|
||||||
* Added table function `azureBlobStorageCluster`. The supported set of features is very similar to table function `s3Cluster`. [#50795](https://github.com/ClickHouse/ClickHouse/pull/50795) ([SmitaRKulkarni](https://github.com/SmitaRKulkarni)).
|
* Added table function `azureBlobStorageCluster`. The supported set of features is very similar to table function `s3Cluster`. [#50795](https://github.com/ClickHouse/ClickHouse/pull/50795) ([SmitaRKulkarni](https://github.com/SmitaRKulkarni)).
|
||||||
* Allow using `cluster`, `clusterAllReplicas`, `remote`, and `remoteSecure` without table name in issue [#50808](https://github.com/ClickHouse/ClickHouse/issues/50808). [#50848](https://github.com/ClickHouse/ClickHouse/pull/50848) ([Yangkuan Liu](https://github.com/LiuYangkuan)).
|
* Allow using `cluster`, `clusterAllReplicas`, `remote`, and `remoteSecure` without table name in issue [#50808](https://github.com/ClickHouse/ClickHouse/issues/50808). [#50848](https://github.com/ClickHouse/ClickHouse/pull/50848) ([Yangkuan Liu](https://github.com/LiuYangkuan)).
|
||||||
* A system table to monitor kafka consumers. [#50999](https://github.com/ClickHouse/ClickHouse/pull/50999) ([Ilya Golshtein](https://github.com/ilejn)).
|
* A system table to monitor Kafka consumers. [#50999](https://github.com/ClickHouse/ClickHouse/pull/50999) ([Ilya Golshtein](https://github.com/ilejn)).
|
||||||
* Added `max_sessions_for_user` setting. [#51724](https://github.com/ClickHouse/ClickHouse/pull/51724) ([Alexey Gerasimchuck](https://github.com/Demilivor)).
|
* Added `max_sessions_for_user` setting. [#51724](https://github.com/ClickHouse/ClickHouse/pull/51724) ([Alexey Gerasimchuck](https://github.com/Demilivor)).
|
||||||
* New functions `toUTCTimestamp/fromUTCTimestamp` to act same as spark's `to_utc_timestamp/from_utc_timestamp`. [#52117](https://github.com/ClickHouse/ClickHouse/pull/52117) ([KevinyhZou](https://github.com/KevinyhZou)).
|
* New functions `toUTCTimestamp/fromUTCTimestamp` to act same as spark's `to_utc_timestamp/from_utc_timestamp`. [#52117](https://github.com/ClickHouse/ClickHouse/pull/52117) ([KevinyhZou](https://github.com/KevinyhZou)).
|
||||||
* Add new functions `structureToCapnProtoSchema`/`structureToProtobufSchema` that convert ClickHouse table structure to CapnProto/Protobuf format schema. Allow to input/output data in CapnProto/Protobuf format without external format schema using autogenerated schema from table structure (controled by settings `format_capn_proto_use_autogenerated_schema`/`format_protobuf_use_autogenerated_schema`). Allow to export autogenerated schema while input/outoput using setting `output_format_schema`. [#52278](https://github.com/ClickHouse/ClickHouse/pull/52278) ([Kruglov Pavel](https://github.com/Avogar)).
|
* Add new functions `structureToCapnProtoSchema`/`structureToProtobufSchema` that convert ClickHouse table structure to CapnProto/Protobuf format schema. Allow to input/output data in CapnProto/Protobuf format without external format schema using autogenerated schema from table structure (controlled by settings `format_capn_proto_use_autogenerated_schema`/`format_protobuf_use_autogenerated_schema`). Allow to export autogenerated schema while input/output using setting `output_format_schema`. [#52278](https://github.com/ClickHouse/ClickHouse/pull/52278) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||||
* A new field `query_cache_usage` in `system.query_log` now shows if and how the query cache was used. [#52384](https://github.com/ClickHouse/ClickHouse/pull/52384) ([Robert Schulze](https://github.com/rschu1ze)).
|
* A new field `query_cache_usage` in `system.query_log` now shows if and how the query cache was used. [#52384](https://github.com/ClickHouse/ClickHouse/pull/52384) ([Robert Schulze](https://github.com/rschu1ze)).
|
||||||
* Add new function `startsWithUTF8` and `endsWithUTF8`. [#52555](https://github.com/ClickHouse/ClickHouse/pull/52555) ([李扬](https://github.com/taiyang-li)).
|
* Add new function `startsWithUTF8` and `endsWithUTF8`. [#52555](https://github.com/ClickHouse/ClickHouse/pull/52555) ([李扬](https://github.com/taiyang-li)).
|
||||||
* Allow variable number of columns in TSV/CuatomSeprarated/JSONCompactEachRow, make schema inference work with variable number of columns. Add settings `input_format_tsv_allow_variable_number_of_columns`, `input_format_custom_allow_variable_number_of_columns`, `input_format_json_compact_allow_variable_number_of_columns`. [#52692](https://github.com/ClickHouse/ClickHouse/pull/52692) ([Kruglov Pavel](https://github.com/Avogar)).
|
* Allow variable number of columns in TSV/CustomSeparated/JSONCompactEachRow, make schema inference work with variable number of columns. Add settings `input_format_tsv_allow_variable_number_of_columns`, `input_format_custom_allow_variable_number_of_columns`, `input_format_json_compact_allow_variable_number_of_columns`. [#52692](https://github.com/ClickHouse/ClickHouse/pull/52692) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||||
* Added `SYSTEM STOP/START PULLING REPLICATION LOG` queries (for testing `ReplicatedMergeTree`). [#52881](https://github.com/ClickHouse/ClickHouse/pull/52881) ([Alexander Tokmakov](https://github.com/tavplubix)).
|
* Added `SYSTEM STOP/START PULLING REPLICATION LOG` queries (for testing `ReplicatedMergeTree`). [#52881](https://github.com/ClickHouse/ClickHouse/pull/52881) ([Alexander Tokmakov](https://github.com/tavplubix)).
|
||||||
* Allow to execute constant non-deterministic functions in mutations on initiator. [#53129](https://github.com/ClickHouse/ClickHouse/pull/53129) ([Anton Popov](https://github.com/CurtizJ)).
|
* Allow to execute constant non-deterministic functions in mutations on initiator. [#53129](https://github.com/ClickHouse/ClickHouse/pull/53129) ([Anton Popov](https://github.com/CurtizJ)).
|
||||||
* Add input format `One` that doesn't read any data and always returns single row with column `dummy` with type `UInt8` and value `0` like `system.one`. It can be used together with `_file/_path` virtual columns to list files in file/s3/url/hdfs/etc table functions without reading any data. [#53209](https://github.com/ClickHouse/ClickHouse/pull/53209) ([Kruglov Pavel](https://github.com/Avogar)).
|
* Add input format `One` that doesn't read any data and always returns single row with column `dummy` with type `UInt8` and value `0` like `system.one`. It can be used together with `_file/_path` virtual columns to list files in file/s3/url/hdfs/etc table functions without reading any data. [#53209](https://github.com/ClickHouse/ClickHouse/pull/53209) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||||
@ -55,7 +223,7 @@
|
|||||||
* Parquet filter pushdown. I.e. when reading Parquet files, row groups (chunks of the file) are skipped based on the WHERE condition and the min/max values in each column. In particular, if the file is roughly sorted by some column, queries that filter by a short range of that column will be much faster. [#52951](https://github.com/ClickHouse/ClickHouse/pull/52951) ([Michael Kolupaev](https://github.com/al13n321)).
|
* Parquet filter pushdown. I.e. when reading Parquet files, row groups (chunks of the file) are skipped based on the WHERE condition and the min/max values in each column. In particular, if the file is roughly sorted by some column, queries that filter by a short range of that column will be much faster. [#52951](https://github.com/ClickHouse/ClickHouse/pull/52951) ([Michael Kolupaev](https://github.com/al13n321)).
|
||||||
* Optimize reading small row groups by batching them together in Parquet. Closes [#53069](https://github.com/ClickHouse/ClickHouse/issues/53069). [#53281](https://github.com/ClickHouse/ClickHouse/pull/53281) ([Kruglov Pavel](https://github.com/Avogar)).
|
* Optimize reading small row groups by batching them together in Parquet. Closes [#53069](https://github.com/ClickHouse/ClickHouse/issues/53069). [#53281](https://github.com/ClickHouse/ClickHouse/pull/53281) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||||
* Optimize count from files in most input formats. Closes [#44334](https://github.com/ClickHouse/ClickHouse/issues/44334). [#53637](https://github.com/ClickHouse/ClickHouse/pull/53637) ([Kruglov Pavel](https://github.com/Avogar)).
|
* Optimize count from files in most input formats. Closes [#44334](https://github.com/ClickHouse/ClickHouse/issues/44334). [#53637](https://github.com/ClickHouse/ClickHouse/pull/53637) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||||
* Use filter by file/path before reading in `url`/`file`/`hdfs` table functins. [#53529](https://github.com/ClickHouse/ClickHouse/pull/53529) ([Kruglov Pavel](https://github.com/Avogar)).
|
* Use filter by file/path before reading in `url`/`file`/`hdfs` table functions. [#53529](https://github.com/ClickHouse/ClickHouse/pull/53529) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||||
* Enable JIT compilation for AArch64, PowerPC, SystemZ, RISC-V. [#38217](https://github.com/ClickHouse/ClickHouse/pull/38217) ([Maksim Kita](https://github.com/kitaisreal)).
|
* Enable JIT compilation for AArch64, PowerPC, SystemZ, RISC-V. [#38217](https://github.com/ClickHouse/ClickHouse/pull/38217) ([Maksim Kita](https://github.com/kitaisreal)).
|
||||||
* Add setting `rewrite_count_distinct_if_with_count_distinct_implementation` to rewrite `countDistinctIf` with `count_distinct_implementation`. Closes [#30642](https://github.com/ClickHouse/ClickHouse/issues/30642). [#46051](https://github.com/ClickHouse/ClickHouse/pull/46051) ([flynn](https://github.com/ucasfl)).
|
* Add setting `rewrite_count_distinct_if_with_count_distinct_implementation` to rewrite `countDistinctIf` with `count_distinct_implementation`. Closes [#30642](https://github.com/ClickHouse/ClickHouse/issues/30642). [#46051](https://github.com/ClickHouse/ClickHouse/pull/46051) ([flynn](https://github.com/ucasfl)).
|
||||||
* Speed up merging of states of `uniq` and `uniqExact` aggregate functions by parallelizing conversion before merge. [#50748](https://github.com/ClickHouse/ClickHouse/pull/50748) ([Jiebin Sun](https://github.com/jiebinn)).
|
* Speed up merging of states of `uniq` and `uniqExact` aggregate functions by parallelizing conversion before merge. [#50748](https://github.com/ClickHouse/ClickHouse/pull/50748) ([Jiebin Sun](https://github.com/jiebinn)).
|
||||||
@ -535,7 +703,7 @@
|
|||||||
* Add MemoryTracker for the background tasks (merges and mutation). Introduces `merges_mutations_memory_usage_soft_limit` and `merges_mutations_memory_usage_to_ram_ratio` settings that represent the soft memory limit for merges and mutations. If this limit is reached ClickHouse won't schedule new merge or mutation tasks. Also `MergesMutationsMemoryTracking` metric is introduced to allow observing current memory usage of background tasks. Resubmit [#46089](https://github.com/ClickHouse/ClickHouse/issues/46089). Closes [#48774](https://github.com/ClickHouse/ClickHouse/issues/48774). [#48787](https://github.com/ClickHouse/ClickHouse/pull/48787) ([Dmitry Novik](https://github.com/novikd)).
|
* Add MemoryTracker for the background tasks (merges and mutation). Introduces `merges_mutations_memory_usage_soft_limit` and `merges_mutations_memory_usage_to_ram_ratio` settings that represent the soft memory limit for merges and mutations. If this limit is reached ClickHouse won't schedule new merge or mutation tasks. Also `MergesMutationsMemoryTracking` metric is introduced to allow observing current memory usage of background tasks. Resubmit [#46089](https://github.com/ClickHouse/ClickHouse/issues/46089). Closes [#48774](https://github.com/ClickHouse/ClickHouse/issues/48774). [#48787](https://github.com/ClickHouse/ClickHouse/pull/48787) ([Dmitry Novik](https://github.com/novikd)).
|
||||||
* Function `dotProduct` work for array. [#49050](https://github.com/ClickHouse/ClickHouse/pull/49050) ([FFFFFFFHHHHHHH](https://github.com/FFFFFFFHHHHHHH)).
|
* Function `dotProduct` work for array. [#49050](https://github.com/ClickHouse/ClickHouse/pull/49050) ([FFFFFFFHHHHHHH](https://github.com/FFFFFFFHHHHHHH)).
|
||||||
* Support statement `SHOW INDEX` to improve compatibility with MySQL. [#49158](https://github.com/ClickHouse/ClickHouse/pull/49158) ([Robert Schulze](https://github.com/rschu1ze)).
|
* Support statement `SHOW INDEX` to improve compatibility with MySQL. [#49158](https://github.com/ClickHouse/ClickHouse/pull/49158) ([Robert Schulze](https://github.com/rschu1ze)).
|
||||||
* Add virtual column `_file` and `_path` support to table function `url`. - Impove error message for table function `url`. - resolves [#49231](https://github.com/ClickHouse/ClickHouse/issues/49231) - resolves [#49232](https://github.com/ClickHouse/ClickHouse/issues/49232). [#49356](https://github.com/ClickHouse/ClickHouse/pull/49356) ([Ziyi Tan](https://github.com/Ziy1-Tan)).
|
* Add virtual column `_file` and `_path` support to table function `url`. - Improve error message for table function `url`. - resolves [#49231](https://github.com/ClickHouse/ClickHouse/issues/49231) - resolves [#49232](https://github.com/ClickHouse/ClickHouse/issues/49232). [#49356](https://github.com/ClickHouse/ClickHouse/pull/49356) ([Ziyi Tan](https://github.com/Ziy1-Tan)).
|
||||||
* Adding the `grants` field in the users.xml file, which allows specifying grants for users. [#49381](https://github.com/ClickHouse/ClickHouse/pull/49381) ([pufit](https://github.com/pufit)).
|
* Adding the `grants` field in the users.xml file, which allows specifying grants for users. [#49381](https://github.com/ClickHouse/ClickHouse/pull/49381) ([pufit](https://github.com/pufit)).
|
||||||
* Support full/right join by using grace hash join algorithm. [#49483](https://github.com/ClickHouse/ClickHouse/pull/49483) ([lgbo](https://github.com/lgbo-ustc)).
|
* Support full/right join by using grace hash join algorithm. [#49483](https://github.com/ClickHouse/ClickHouse/pull/49483) ([lgbo](https://github.com/lgbo-ustc)).
|
||||||
* `WITH FILL` modifier groups filling by sorting prefix. Controlled by `use_with_fill_by_sorting_prefix` setting (enabled by default). Related to [#33203](https://github.com/ClickHouse/ClickHouse/issues/33203)#issuecomment-1418736794. [#49503](https://github.com/ClickHouse/ClickHouse/pull/49503) ([Igor Nikonov](https://github.com/devcrafter)).
|
* `WITH FILL` modifier groups filling by sorting prefix. Controlled by `use_with_fill_by_sorting_prefix` setting (enabled by default). Related to [#33203](https://github.com/ClickHouse/ClickHouse/issues/33203)#issuecomment-1418736794. [#49503](https://github.com/ClickHouse/ClickHouse/pull/49503) ([Igor Nikonov](https://github.com/devcrafter)).
|
||||||
@ -580,7 +748,7 @@
|
|||||||
* `DEFLATE_QPL` codec lower the minimum simd version to SSE 4.2. [doc change in qpl](https://github.com/intel/qpl/commit/3f8f5cea27739f5261e8fd577dc233ffe88bf679) - Intel® QPL relies on a run-time kernels dispatcher and cpuid check to choose the best available implementation(sse/avx2/avx512) - restructured cmakefile for qpl build in clickhouse to align with latest upstream qpl. [#49811](https://github.com/ClickHouse/ClickHouse/pull/49811) ([jasperzhu](https://github.com/jinjunzh)).
|
* `DEFLATE_QPL` codec lower the minimum simd version to SSE 4.2. [doc change in qpl](https://github.com/intel/qpl/commit/3f8f5cea27739f5261e8fd577dc233ffe88bf679) - Intel® QPL relies on a run-time kernels dispatcher and cpuid check to choose the best available implementation(sse/avx2/avx512) - restructured cmakefile for qpl build in clickhouse to align with latest upstream qpl. [#49811](https://github.com/ClickHouse/ClickHouse/pull/49811) ([jasperzhu](https://github.com/jinjunzh)).
|
||||||
* Add initial support to do JOINs with pure parallel replicas. [#49544](https://github.com/ClickHouse/ClickHouse/pull/49544) ([Raúl Marín](https://github.com/Algunenano)).
|
* Add initial support to do JOINs with pure parallel replicas. [#49544](https://github.com/ClickHouse/ClickHouse/pull/49544) ([Raúl Marín](https://github.com/Algunenano)).
|
||||||
* More parallelism on `Outdated` parts removal with "zero-copy replication". [#49630](https://github.com/ClickHouse/ClickHouse/pull/49630) ([Alexander Tokmakov](https://github.com/tavplubix)).
|
* More parallelism on `Outdated` parts removal with "zero-copy replication". [#49630](https://github.com/ClickHouse/ClickHouse/pull/49630) ([Alexander Tokmakov](https://github.com/tavplubix)).
|
||||||
* Parallel Replicas: 1) Fixed an error `NOT_FOUND_COLUMN_IN_BLOCK` in case of using parallel replicas with non-replicated storage with disabled setting `parallel_replicas_for_non_replicated_merge_tree` 2) Now `allow_experimental_parallel_reading_from_replicas` have 3 possible values - 0, 1 and 2. 0 - disabled, 1 - enabled, silently disable them in case of failure (in case of FINAL or JOIN), 2 - enabled, throw an expection in case of failure. 3) If FINAL modifier is used in SELECT query and parallel replicas are enabled, ClickHouse will try to disable them if `allow_experimental_parallel_reading_from_replicas` is set to 1 and throw an exception otherwise. [#50195](https://github.com/ClickHouse/ClickHouse/pull/50195) ([Nikita Mikhaylov](https://github.com/nikitamikhaylov)).
|
* Parallel Replicas: 1) Fixed an error `NOT_FOUND_COLUMN_IN_BLOCK` in case of using parallel replicas with non-replicated storage with disabled setting `parallel_replicas_for_non_replicated_merge_tree` 2) Now `allow_experimental_parallel_reading_from_replicas` have 3 possible values - 0, 1 and 2. 0 - disabled, 1 - enabled, silently disable them in case of failure (in case of FINAL or JOIN), 2 - enabled, throw an exception in case of failure. 3) If FINAL modifier is used in SELECT query and parallel replicas are enabled, ClickHouse will try to disable them if `allow_experimental_parallel_reading_from_replicas` is set to 1 and throw an exception otherwise. [#50195](https://github.com/ClickHouse/ClickHouse/pull/50195) ([Nikita Mikhaylov](https://github.com/nikitamikhaylov)).
|
||||||
* When parallel replicas are enabled they will always skip unavailable servers (the behavior is controlled by the setting `skip_unavailable_shards`, enabled by default and can be only disabled). This closes: [#48565](https://github.com/ClickHouse/ClickHouse/issues/48565). [#50293](https://github.com/ClickHouse/ClickHouse/pull/50293) ([Nikita Mikhaylov](https://github.com/nikitamikhaylov)).
|
* When parallel replicas are enabled they will always skip unavailable servers (the behavior is controlled by the setting `skip_unavailable_shards`, enabled by default and can be only disabled). This closes: [#48565](https://github.com/ClickHouse/ClickHouse/issues/48565). [#50293](https://github.com/ClickHouse/ClickHouse/pull/50293) ([Nikita Mikhaylov](https://github.com/nikitamikhaylov)).
|
||||||
|
|
||||||
#### Improvement
|
#### Improvement
|
||||||
@ -599,7 +767,7 @@
|
|||||||
* Add a new column `zookeeper_name` in system.replicas, to indicate on which (auxiliary) zookeeper cluster the replicated table's metadata is stored. [#48549](https://github.com/ClickHouse/ClickHouse/pull/48549) ([cangyin](https://github.com/cangyin)).
|
* Add a new column `zookeeper_name` in system.replicas, to indicate on which (auxiliary) zookeeper cluster the replicated table's metadata is stored. [#48549](https://github.com/ClickHouse/ClickHouse/pull/48549) ([cangyin](https://github.com/cangyin)).
|
||||||
* `IN` operator support the comparison of `Date` and `Date32`. Closes [#48736](https://github.com/ClickHouse/ClickHouse/issues/48736). [#48806](https://github.com/ClickHouse/ClickHouse/pull/48806) ([flynn](https://github.com/ucasfl)).
|
* `IN` operator support the comparison of `Date` and `Date32`. Closes [#48736](https://github.com/ClickHouse/ClickHouse/issues/48736). [#48806](https://github.com/ClickHouse/ClickHouse/pull/48806) ([flynn](https://github.com/ucasfl)).
|
||||||
* Support for erasure codes in `HDFS`, author: @M1eyu2018, @tomscut. [#48833](https://github.com/ClickHouse/ClickHouse/pull/48833) ([M1eyu](https://github.com/M1eyu2018)).
|
* Support for erasure codes in `HDFS`, author: @M1eyu2018, @tomscut. [#48833](https://github.com/ClickHouse/ClickHouse/pull/48833) ([M1eyu](https://github.com/M1eyu2018)).
|
||||||
* Implement SYSTEM DROP REPLICA from auxillary ZooKeeper clusters, may be close [#48931](https://github.com/ClickHouse/ClickHouse/issues/48931). [#48932](https://github.com/ClickHouse/ClickHouse/pull/48932) ([wangxiaobo](https://github.com/wzb5212)).
|
* Implement SYSTEM DROP REPLICA from auxiliary ZooKeeper clusters, may be close [#48931](https://github.com/ClickHouse/ClickHouse/issues/48931). [#48932](https://github.com/ClickHouse/ClickHouse/pull/48932) ([wangxiaobo](https://github.com/wzb5212)).
|
||||||
* Add Array data type to MongoDB. Closes [#48598](https://github.com/ClickHouse/ClickHouse/issues/48598). [#48983](https://github.com/ClickHouse/ClickHouse/pull/48983) ([Nikolay Degterinsky](https://github.com/evillique)).
|
* Add Array data type to MongoDB. Closes [#48598](https://github.com/ClickHouse/ClickHouse/issues/48598). [#48983](https://github.com/ClickHouse/ClickHouse/pull/48983) ([Nikolay Degterinsky](https://github.com/evillique)).
|
||||||
* Support storing `Interval` data types in tables. [#49085](https://github.com/ClickHouse/ClickHouse/pull/49085) ([larryluogit](https://github.com/larryluogit)).
|
* Support storing `Interval` data types in tables. [#49085](https://github.com/ClickHouse/ClickHouse/pull/49085) ([larryluogit](https://github.com/larryluogit)).
|
||||||
* Allow using `ntile` window function without explicit window frame definition: `ntile(3) OVER (ORDER BY a)`, close [#46763](https://github.com/ClickHouse/ClickHouse/issues/46763). [#49093](https://github.com/ClickHouse/ClickHouse/pull/49093) ([vdimir](https://github.com/vdimir)).
|
* Allow using `ntile` window function without explicit window frame definition: `ntile(3) OVER (ORDER BY a)`, close [#46763](https://github.com/ClickHouse/ClickHouse/issues/46763). [#49093](https://github.com/ClickHouse/ClickHouse/pull/49093) ([vdimir](https://github.com/vdimir)).
|
||||||
@ -655,7 +823,7 @@
|
|||||||
|
|
||||||
#### Build/Testing/Packaging Improvement
|
#### Build/Testing/Packaging Improvement
|
||||||
* New and improved `keeper-bench`. Everything can be customized from YAML/XML file: - request generator - each type of request generator can have a specific set of fields - multi requests can be generated just by doing the same under `multi` key - for each request or subrequest in multi a `weight` field can be defined to control distribution - define trees that need to be setup for a test run - hosts can be defined with all timeouts customizable and it's possible to control how many sessions to generate for each host - integers defined with `min_value` and `max_value` fields are random number generators. [#48547](https://github.com/ClickHouse/ClickHouse/pull/48547) ([Antonio Andelic](https://github.com/antonio2368)).
|
* New and improved `keeper-bench`. Everything can be customized from YAML/XML file: - request generator - each type of request generator can have a specific set of fields - multi requests can be generated just by doing the same under `multi` key - for each request or subrequest in multi a `weight` field can be defined to control distribution - define trees that need to be setup for a test run - hosts can be defined with all timeouts customizable and it's possible to control how many sessions to generate for each host - integers defined with `min_value` and `max_value` fields are random number generators. [#48547](https://github.com/ClickHouse/ClickHouse/pull/48547) ([Antonio Andelic](https://github.com/antonio2368)).
|
||||||
* Io_uring is not supported on macos, don't choose it when running tests on local to avoid occassional failures. [#49250](https://github.com/ClickHouse/ClickHouse/pull/49250) ([Frank Chen](https://github.com/FrankChen021)).
|
* Io_uring is not supported on macos, don't choose it when running tests on local to avoid occasional failures. [#49250](https://github.com/ClickHouse/ClickHouse/pull/49250) ([Frank Chen](https://github.com/FrankChen021)).
|
||||||
* Support named fault injection for testing. [#49361](https://github.com/ClickHouse/ClickHouse/pull/49361) ([Han Fei](https://github.com/hanfei1991)).
|
* Support named fault injection for testing. [#49361](https://github.com/ClickHouse/ClickHouse/pull/49361) ([Han Fei](https://github.com/hanfei1991)).
|
||||||
* Allow running ClickHouse in the OS where the `prctl` (process control) syscall is not available, such as AWS Lambda. [#49538](https://github.com/ClickHouse/ClickHouse/pull/49538) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
* Allow running ClickHouse in the OS where the `prctl` (process control) syscall is not available, such as AWS Lambda. [#49538](https://github.com/ClickHouse/ClickHouse/pull/49538) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
* Fixed the issue of build conflict between contrib/isa-l and isa-l in qpl [49296](https://github.com/ClickHouse/ClickHouse/issues/49296). [#49584](https://github.com/ClickHouse/ClickHouse/pull/49584) ([jasperzhu](https://github.com/jinjunzh)).
|
* Fixed the issue of build conflict between contrib/isa-l and isa-l in qpl [49296](https://github.com/ClickHouse/ClickHouse/issues/49296). [#49584](https://github.com/ClickHouse/ClickHouse/pull/49584) ([jasperzhu](https://github.com/jinjunzh)).
|
||||||
@ -1359,7 +1527,7 @@ Add settings input_format_tsv/csv/custom_detect_header that enable this behaviou
|
|||||||
* Use already written part of the query for fuzzy search (pass to the `skim` library, which is written in Rust and linked statically to ClickHouse). [#44600](https://github.com/ClickHouse/ClickHouse/pull/44600) ([Azat Khuzhin](https://github.com/azat)).
|
* Use already written part of the query for fuzzy search (pass to the `skim` library, which is written in Rust and linked statically to ClickHouse). [#44600](https://github.com/ClickHouse/ClickHouse/pull/44600) ([Azat Khuzhin](https://github.com/azat)).
|
||||||
* Enable `input_format_json_read_objects_as_strings` by default to be able to read nested JSON objects while JSON Object type is experimental. [#44657](https://github.com/ClickHouse/ClickHouse/pull/44657) ([Kruglov Pavel](https://github.com/Avogar)).
|
* Enable `input_format_json_read_objects_as_strings` by default to be able to read nested JSON objects while JSON Object type is experimental. [#44657](https://github.com/ClickHouse/ClickHouse/pull/44657) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||||
* Improvement for deduplication of async inserts: when users do duplicate async inserts, we should deduplicate inside the memory before we query Keeper. [#44682](https://github.com/ClickHouse/ClickHouse/pull/44682) ([Han Fei](https://github.com/hanfei1991)).
|
* Improvement for deduplication of async inserts: when users do duplicate async inserts, we should deduplicate inside the memory before we query Keeper. [#44682](https://github.com/ClickHouse/ClickHouse/pull/44682) ([Han Fei](https://github.com/hanfei1991)).
|
||||||
* Input/ouptut `Avro` format will parse bool type as ClickHouse bool type. [#44684](https://github.com/ClickHouse/ClickHouse/pull/44684) ([Kruglov Pavel](https://github.com/Avogar)).
|
* Input/output `Avro` format will parse bool type as ClickHouse bool type. [#44684](https://github.com/ClickHouse/ClickHouse/pull/44684) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||||
* Support Bool type in Arrow/Parquet/ORC. Closes [#43970](https://github.com/ClickHouse/ClickHouse/issues/43970). [#44698](https://github.com/ClickHouse/ClickHouse/pull/44698) ([Kruglov Pavel](https://github.com/Avogar)).
|
* Support Bool type in Arrow/Parquet/ORC. Closes [#43970](https://github.com/ClickHouse/ClickHouse/issues/43970). [#44698](https://github.com/ClickHouse/ClickHouse/pull/44698) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||||
* Don't greedily parse beyond the quotes when reading UUIDs - it may lead to mistakenly successful parsing of incorrect data. [#44686](https://github.com/ClickHouse/ClickHouse/pull/44686) ([Raúl Marín](https://github.com/Algunenano)).
|
* Don't greedily parse beyond the quotes when reading UUIDs - it may lead to mistakenly successful parsing of incorrect data. [#44686](https://github.com/ClickHouse/ClickHouse/pull/44686) ([Raúl Marín](https://github.com/Algunenano)).
|
||||||
* Infer UInt64 in case of Int64 overflow and fix some transforms in schema inference. [#44696](https://github.com/ClickHouse/ClickHouse/pull/44696) ([Kruglov Pavel](https://github.com/Avogar)).
|
* Infer UInt64 in case of Int64 overflow and fix some transforms in schema inference. [#44696](https://github.com/ClickHouse/ClickHouse/pull/44696) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||||
|
@ -102,17 +102,6 @@ if (ENABLE_FUZZING)
|
|||||||
set (ENABLE_PROTOBUF 1)
|
set (ENABLE_PROTOBUF 1)
|
||||||
endif()
|
endif()
|
||||||
|
|
||||||
option (ENABLE_WOBOQ_CODEBROWSER "Build for woboq codebrowser" OFF)
|
|
||||||
|
|
||||||
if (ENABLE_WOBOQ_CODEBROWSER)
|
|
||||||
set (ENABLE_EMBEDDED_COMPILER 0)
|
|
||||||
set (CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -Wno-poison-system-directories")
|
|
||||||
# woboq codebrowser uses clang tooling, and they could add default system
|
|
||||||
# clang includes, and later clang will warn for those added by itself
|
|
||||||
# includes.
|
|
||||||
set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wno-poison-system-directories")
|
|
||||||
endif()
|
|
||||||
|
|
||||||
# Global libraries
|
# Global libraries
|
||||||
# See:
|
# See:
|
||||||
# - default_libs.cmake
|
# - default_libs.cmake
|
||||||
@ -329,7 +318,16 @@ set (COMPILER_FLAGS "${COMPILER_FLAGS}")
|
|||||||
# Our built-in unwinder only supports DWARF version up to 4.
|
# Our built-in unwinder only supports DWARF version up to 4.
|
||||||
set (DEBUG_INFO_FLAGS "-g -gdwarf-4")
|
set (DEBUG_INFO_FLAGS "-g -gdwarf-4")
|
||||||
|
|
||||||
set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${COMPILER_FLAGS}")
|
# Disable omit frame pointer compiler optimization using -fno-omit-frame-pointer
|
||||||
|
option(DISABLE_OMIT_FRAME_POINTER "Disable omit frame pointer compiler optimization" OFF)
|
||||||
|
|
||||||
|
if (DISABLE_OMIT_FRAME_POINTER)
|
||||||
|
set (CMAKE_CXX_FLAGS_ADD "${CMAKE_CXX_FLAGS_ADD} -fno-omit-frame-pointer")
|
||||||
|
set (CMAKE_C_FLAGS_ADD "${CMAKE_C_FLAGS_ADD} -fno-omit-frame-pointer")
|
||||||
|
set (CMAKE_ASM_FLAGS_ADD "${CMAKE_ASM_FLAGS_ADD} -fno-omit-frame-pointer")
|
||||||
|
endif()
|
||||||
|
|
||||||
|
set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${COMPILER_FLAGS} ${CMAKE_CXX_FLAGS_ADD}")
|
||||||
set (CMAKE_CXX_FLAGS_RELWITHDEBINFO "${CMAKE_CXX_FLAGS_RELWITHDEBINFO} -O3 ${DEBUG_INFO_FLAGS} ${CMAKE_CXX_FLAGS_ADD}")
|
set (CMAKE_CXX_FLAGS_RELWITHDEBINFO "${CMAKE_CXX_FLAGS_RELWITHDEBINFO} -O3 ${DEBUG_INFO_FLAGS} ${CMAKE_CXX_FLAGS_ADD}")
|
||||||
set (CMAKE_CXX_FLAGS_DEBUG "${CMAKE_CXX_FLAGS_DEBUG} -O0 ${DEBUG_INFO_FLAGS} ${CMAKE_CXX_FLAGS_ADD}")
|
set (CMAKE_CXX_FLAGS_DEBUG "${CMAKE_CXX_FLAGS_DEBUG} -O0 ${DEBUG_INFO_FLAGS} ${CMAKE_CXX_FLAGS_ADD}")
|
||||||
|
|
||||||
|
12
README.md
12
README.md
@ -16,16 +16,20 @@ curl https://clickhouse.com/ | sh
|
|||||||
* [YouTube channel](https://www.youtube.com/c/ClickHouseDB) has a lot of content about ClickHouse in video format.
|
* [YouTube channel](https://www.youtube.com/c/ClickHouseDB) has a lot of content about ClickHouse in video format.
|
||||||
* [Slack](https://clickhouse.com/slack) and [Telegram](https://telegram.me/clickhouse_en) allow chatting with ClickHouse users in real-time.
|
* [Slack](https://clickhouse.com/slack) and [Telegram](https://telegram.me/clickhouse_en) allow chatting with ClickHouse users in real-time.
|
||||||
* [Blog](https://clickhouse.com/blog/) contains various ClickHouse-related articles, as well as announcements and reports about events.
|
* [Blog](https://clickhouse.com/blog/) contains various ClickHouse-related articles, as well as announcements and reports about events.
|
||||||
* [Code Browser (Woboq)](https://clickhouse.com/codebrowser/ClickHouse/index.html) with syntax highlighting and navigation.
|
|
||||||
* [Code Browser (github.dev)](https://github.dev/ClickHouse/ClickHouse) with syntax highlighting, powered by github.dev.
|
* [Code Browser (github.dev)](https://github.dev/ClickHouse/ClickHouse) with syntax highlighting, powered by github.dev.
|
||||||
* [Static Analysis (SonarCloud)](https://sonarcloud.io/project/issues?resolved=false&id=ClickHouse_ClickHouse) proposes C++ quality improvements.
|
* [Static Analysis (SonarCloud)](https://sonarcloud.io/project/issues?resolved=false&id=ClickHouse_ClickHouse) proposes C++ quality improvements.
|
||||||
* [Contacts](https://clickhouse.com/company/contact) can help to get your questions answered if there are any.
|
* [Contacts](https://clickhouse.com/company/contact) can help to get your questions answered if there are any.
|
||||||
|
|
||||||
## Upcoming Events
|
## Upcoming Events
|
||||||
|
|
||||||
* [**v23.8 Community Call**](https://clickhouse.com/company/events/v23-8-community-release-call?utm_source=github&utm_medium=social&utm_campaign=release-webinar-2023-08) - Aug 31 - 23.8 is rapidly approaching. Original creator, co-founder, and CTO of ClickHouse Alexey Milovidov will walk us through the highlights of the release.
|
* [**v23.9 Community Call**]([https://clickhouse.com/company/events/v23-8-community-release-call](https://clickhouse.com/company/events/v23-9-community-release-call)?utm_source=github&utm_medium=social&utm_campaign=release-webinar-2023-08) - Sep 28 - 23.9 is rapidly approaching. Original creator, co-founder, and CTO of ClickHouse Alexey Milovidov will walk us through the highlights of the release.
|
||||||
* [**ClickHouse & AI - A Meetup in San Francisco**](https://www.meetup.com/clickhouse-silicon-valley-meetup-group/events/294472987) - Aug 8
|
* [**ClickHouse Meetup in Amsterdam**](https://www.meetup.com/clickhouse-netherlands-user-group/events/296334590/) - Oct 31
|
||||||
* [**ClickHouse Meetup in Paris**](https://www.meetup.com/clickhouse-france-user-group/events/294283460) - Sep 12
|
* [**ClickHouse Meetup in Beijing**](https://www.meetup.com/clickhouse-beijing-user-group/events/296334856/) - Nov 4
|
||||||
|
* [**ClickHouse Meetup in San Francisco**](https://www.meetup.com/clickhouse-silicon-valley-meetup-group/events/296334923/) - Nov 8
|
||||||
|
* [**ClickHouse Meetup in Singapore**](https://www.meetup.com/clickhouse-singapore-meetup-group/events/296334976/) - Nov 15
|
||||||
|
* [**ClickHouse Meetup in Berlin**](https://www.meetup.com/clickhouse-berlin-user-group/events/296488501/) - Nov 30
|
||||||
|
* [**ClickHouse Meetup in NYC**](https://www.meetup.com/clickhouse-new-york-user-group/events/296488779/) - Dec 11
|
||||||
|
* [**ClickHouse Meetup in Boston**](https://www.meetup.com/clickhouse-boston-user-group/events/296488840/) - Dec 12
|
||||||
|
|
||||||
Also, keep an eye out for upcoming meetups around the world. Somewhere else you want us to be? Please feel free to reach out to tyler <at> clickhouse <dot> com.
|
Also, keep an eye out for upcoming meetups around the world. Somewhere else you want us to be? Please feel free to reach out to tyler <at> clickhouse <dot> com.
|
||||||
|
|
||||||
|
@ -13,9 +13,10 @@ The following versions of ClickHouse server are currently being supported with s
|
|||||||
|
|
||||||
| Version | Supported |
|
| Version | Supported |
|
||||||
|:-|:-|
|
|:-|:-|
|
||||||
|
| 23.9 | ✔️ |
|
||||||
| 23.8 | ✔️ |
|
| 23.8 | ✔️ |
|
||||||
| 23.7 | ✔️ |
|
| 23.7 | ✔️ |
|
||||||
| 23.6 | ✔️ |
|
| 23.6 | ❌ |
|
||||||
| 23.5 | ❌ |
|
| 23.5 | ❌ |
|
||||||
| 23.4 | ❌ |
|
| 23.4 | ❌ |
|
||||||
| 23.3 | ✔️ |
|
| 23.3 | ✔️ |
|
||||||
|
@ -177,7 +177,7 @@ inline bool memequalWide(const char * p1, const char * p2, size_t size)
|
|||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
switch (size / 16)
|
switch (size / 16) // NOLINT(bugprone-switch-missing-default-case)
|
||||||
{
|
{
|
||||||
case 3: if (!compare8(p1 + 32, p2 + 32)) return false; [[fallthrough]];
|
case 3: if (!compare8(p1 + 32, p2 + 32)) return false; [[fallthrough]];
|
||||||
case 2: if (!compare8(p1 + 16, p2 + 16)) return false; [[fallthrough]];
|
case 2: if (!compare8(p1 + 16, p2 + 16)) return false; [[fallthrough]];
|
||||||
|
@ -185,6 +185,6 @@
|
|||||||
|
|
||||||
/// A template function for suppressing warnings about unused variables or function results.
|
/// A template function for suppressing warnings about unused variables or function results.
|
||||||
template <typename... Args>
|
template <typename... Args>
|
||||||
constexpr void UNUSED(Args &&... args [[maybe_unused]])
|
constexpr void UNUSED(Args &&... args [[maybe_unused]]) // NOLINT(cppcoreguidelines-missing-std-forward)
|
||||||
{
|
{
|
||||||
}
|
}
|
||||||
|
@ -289,3 +289,13 @@ inline void writeBinByte(UInt8 byte, void * out)
|
|||||||
{
|
{
|
||||||
memcpy(out, &impl::bin_byte_to_char_table[static_cast<size_t>(byte) * 8], 8);
|
memcpy(out, &impl::bin_byte_to_char_table[static_cast<size_t>(byte) * 8], 8);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Converts byte array to a hex string. Useful for debug logging.
|
||||||
|
inline std::string hexString(const void * data, size_t size)
|
||||||
|
{
|
||||||
|
const char * p = reinterpret_cast<const char *>(data);
|
||||||
|
std::string s(size * 2, '\0');
|
||||||
|
for (size_t i = 0; i < size; ++i)
|
||||||
|
writeHexByteLowercase(p[i], s.data() + i * 2);
|
||||||
|
return s;
|
||||||
|
}
|
||||||
|
@ -20,14 +20,14 @@ Out & dumpValue(Out &, T &&);
|
|||||||
|
|
||||||
/// Catch-all case.
|
/// Catch-all case.
|
||||||
template <int priority, typename Out, typename T>
|
template <int priority, typename Out, typename T>
|
||||||
std::enable_if_t<priority == -1, Out> & dumpImpl(Out & out, T &&)
|
std::enable_if_t<priority == -1, Out> & dumpImpl(Out & out, T &&) // NOLINT(cppcoreguidelines-missing-std-forward)
|
||||||
{
|
{
|
||||||
return out << "{...}";
|
return out << "{...}";
|
||||||
}
|
}
|
||||||
|
|
||||||
/// An object, that could be output with operator <<.
|
/// An object, that could be output with operator <<.
|
||||||
template <int priority, typename Out, typename T>
|
template <int priority, typename Out, typename T>
|
||||||
std::enable_if_t<priority == 0, Out> & dumpImpl(Out & out, T && x, std::decay_t<decltype(std::declval<Out &>() << std::declval<T>())> * = nullptr)
|
std::enable_if_t<priority == 0, Out> & dumpImpl(Out & out, T && x, std::decay_t<decltype(std::declval<Out &>() << std::declval<T>())> * = nullptr) // NOLINT(cppcoreguidelines-missing-std-forward)
|
||||||
{
|
{
|
||||||
return out << x;
|
return out << x;
|
||||||
}
|
}
|
||||||
@ -37,7 +37,7 @@ template <int priority, typename Out, typename T>
|
|||||||
std::enable_if_t<priority == 1
|
std::enable_if_t<priority == 1
|
||||||
/// Protect from the case when operator * do effectively nothing (function pointer).
|
/// Protect from the case when operator * do effectively nothing (function pointer).
|
||||||
&& !std::is_same_v<std::decay_t<T>, std::decay_t<decltype(*std::declval<T>())>>
|
&& !std::is_same_v<std::decay_t<T>, std::decay_t<decltype(*std::declval<T>())>>
|
||||||
, Out> & dumpImpl(Out & out, T && x, std::decay_t<decltype(*std::declval<T>())> * = nullptr)
|
, Out> & dumpImpl(Out & out, T && x, std::decay_t<decltype(*std::declval<T>())> * = nullptr) // NOLINT(cppcoreguidelines-missing-std-forward)
|
||||||
{
|
{
|
||||||
if (!x)
|
if (!x)
|
||||||
return out << "nullptr";
|
return out << "nullptr";
|
||||||
@ -46,7 +46,7 @@ std::enable_if_t<priority == 1
|
|||||||
|
|
||||||
/// Container.
|
/// Container.
|
||||||
template <int priority, typename Out, typename T>
|
template <int priority, typename Out, typename T>
|
||||||
std::enable_if_t<priority == 2, Out> & dumpImpl(Out & out, T && x, std::decay_t<decltype(std::begin(std::declval<T>()))> * = nullptr)
|
std::enable_if_t<priority == 2, Out> & dumpImpl(Out & out, T && x, std::decay_t<decltype(std::begin(std::declval<T>()))> * = nullptr) // NOLINT(cppcoreguidelines-missing-std-forward)
|
||||||
{
|
{
|
||||||
bool first = true;
|
bool first = true;
|
||||||
out << "{";
|
out << "{";
|
||||||
@ -64,7 +64,7 @@ std::enable_if_t<priority == 2, Out> & dumpImpl(Out & out, T && x, std::decay_t<
|
|||||||
|
|
||||||
template <int priority, typename Out, typename T>
|
template <int priority, typename Out, typename T>
|
||||||
std::enable_if_t<priority == 3 && std::is_enum_v<std::decay_t<T>>, Out> &
|
std::enable_if_t<priority == 3 && std::is_enum_v<std::decay_t<T>>, Out> &
|
||||||
dumpImpl(Out & out, T && x)
|
dumpImpl(Out & out, T && x) // NOLINT(cppcoreguidelines-missing-std-forward)
|
||||||
{
|
{
|
||||||
return out << magic_enum::enum_name(x);
|
return out << magic_enum::enum_name(x);
|
||||||
}
|
}
|
||||||
@ -73,7 +73,7 @@ dumpImpl(Out & out, T && x)
|
|||||||
|
|
||||||
template <int priority, typename Out, typename T>
|
template <int priority, typename Out, typename T>
|
||||||
std::enable_if_t<priority == 3 && (std::is_same_v<std::decay_t<T>, std::string> || std::is_same_v<std::decay_t<T>, const char *>), Out> &
|
std::enable_if_t<priority == 3 && (std::is_same_v<std::decay_t<T>, std::string> || std::is_same_v<std::decay_t<T>, const char *>), Out> &
|
||||||
dumpImpl(Out & out, T && x)
|
dumpImpl(Out & out, T && x) // NOLINT(cppcoreguidelines-missing-std-forward)
|
||||||
{
|
{
|
||||||
return out << std::quoted(x);
|
return out << std::quoted(x);
|
||||||
}
|
}
|
||||||
@ -82,7 +82,7 @@ dumpImpl(Out & out, T && x)
|
|||||||
|
|
||||||
template <int priority, typename Out, typename T>
|
template <int priority, typename Out, typename T>
|
||||||
std::enable_if_t<priority == 3 && std::is_same_v<std::decay_t<T>, unsigned char>, Out> &
|
std::enable_if_t<priority == 3 && std::is_same_v<std::decay_t<T>, unsigned char>, Out> &
|
||||||
dumpImpl(Out & out, T && x)
|
dumpImpl(Out & out, T && x) // NOLINT(cppcoreguidelines-missing-std-forward)
|
||||||
{
|
{
|
||||||
return out << int(x);
|
return out << int(x);
|
||||||
}
|
}
|
||||||
@ -90,7 +90,7 @@ dumpImpl(Out & out, T && x)
|
|||||||
|
|
||||||
/// Tuple, pair
|
/// Tuple, pair
|
||||||
template <size_t N, typename Out, typename T>
|
template <size_t N, typename Out, typename T>
|
||||||
Out & dumpTupleImpl(Out & out, T && x)
|
Out & dumpTupleImpl(Out & out, T && x) // NOLINT(cppcoreguidelines-missing-std-forward)
|
||||||
{
|
{
|
||||||
if constexpr (N == 0)
|
if constexpr (N == 0)
|
||||||
out << "{";
|
out << "{";
|
||||||
@ -108,14 +108,14 @@ Out & dumpTupleImpl(Out & out, T && x)
|
|||||||
}
|
}
|
||||||
|
|
||||||
template <int priority, typename Out, typename T>
|
template <int priority, typename Out, typename T>
|
||||||
std::enable_if_t<priority == 4, Out> & dumpImpl(Out & out, T && x, std::decay_t<decltype(std::get<0>(std::declval<T>()))> * = nullptr)
|
std::enable_if_t<priority == 4, Out> & dumpImpl(Out & out, T && x, std::decay_t<decltype(std::get<0>(std::declval<T>()))> * = nullptr) // NOLINT(cppcoreguidelines-missing-std-forward)
|
||||||
{
|
{
|
||||||
return dumpTupleImpl<0>(out, x);
|
return dumpTupleImpl<0>(out, x);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
template <int priority, typename Out, typename T>
|
template <int priority, typename Out, typename T>
|
||||||
Out & dumpDispatchPriorities(Out & out, T && x, std::decay_t<decltype(dumpImpl<priority>(std::declval<Out &>(), std::declval<T>()))> *)
|
Out & dumpDispatchPriorities(Out & out, T && x, std::decay_t<decltype(dumpImpl<priority>(std::declval<Out &>(), std::declval<T>()))> *) // NOLINT(cppcoreguidelines-missing-std-forward)
|
||||||
{
|
{
|
||||||
return dumpImpl<priority>(out, x);
|
return dumpImpl<priority>(out, x);
|
||||||
}
|
}
|
||||||
@ -124,21 +124,21 @@ Out & dumpDispatchPriorities(Out & out, T && x, std::decay_t<decltype(dumpImpl<p
|
|||||||
struct LowPriority { LowPriority(void *) {} };
|
struct LowPriority { LowPriority(void *) {} };
|
||||||
|
|
||||||
template <int priority, typename Out, typename T>
|
template <int priority, typename Out, typename T>
|
||||||
Out & dumpDispatchPriorities(Out & out, T && x, LowPriority)
|
Out & dumpDispatchPriorities(Out & out, T && x, LowPriority) // NOLINT(cppcoreguidelines-missing-std-forward)
|
||||||
{
|
{
|
||||||
return dumpDispatchPriorities<priority - 1>(out, x, nullptr);
|
return dumpDispatchPriorities<priority - 1>(out, x, nullptr);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
template <typename Out, typename T>
|
template <typename Out, typename T>
|
||||||
Out & dumpValue(Out & out, T && x)
|
Out & dumpValue(Out & out, T && x) // NOLINT(cppcoreguidelines-missing-std-forward)
|
||||||
{
|
{
|
||||||
return dumpDispatchPriorities<5>(out, x, nullptr);
|
return dumpDispatchPriorities<5>(out, x, nullptr);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
template <typename Out, typename T>
|
template <typename Out, typename T>
|
||||||
Out & dump(Out & out, const char * name, T && x)
|
Out & dump(Out & out, const char * name, T && x) // NOLINT(cppcoreguidelines-missing-std-forward)
|
||||||
{
|
{
|
||||||
// Dumping string literal, printing name and demangled type is irrelevant.
|
// Dumping string literal, printing name and demangled type is irrelevant.
|
||||||
if constexpr (std::is_same_v<const char *, std::decay_t<std::remove_reference_t<T>>>)
|
if constexpr (std::is_same_v<const char *, std::decay_t<std::remove_reference_t<T>>>)
|
||||||
|
@ -9,9 +9,9 @@ class [[nodiscard]] BasicScopeGuard
|
|||||||
{
|
{
|
||||||
public:
|
public:
|
||||||
constexpr BasicScopeGuard() = default;
|
constexpr BasicScopeGuard() = default;
|
||||||
constexpr BasicScopeGuard(BasicScopeGuard && src) : function{src.release()} {} // NOLINT(hicpp-noexcept-move, performance-noexcept-move-constructor)
|
constexpr BasicScopeGuard(BasicScopeGuard && src) : function{src.release()} {} // NOLINT(hicpp-noexcept-move, performance-noexcept-move-constructor, cppcoreguidelines-noexcept-move-operations)
|
||||||
|
|
||||||
constexpr BasicScopeGuard & operator=(BasicScopeGuard && src) // NOLINT(hicpp-noexcept-move, performance-noexcept-move-constructor)
|
constexpr BasicScopeGuard & operator=(BasicScopeGuard && src) // NOLINT(hicpp-noexcept-move, performance-noexcept-move-constructor, cppcoreguidelines-noexcept-move-operations)
|
||||||
{
|
{
|
||||||
if (this != &src)
|
if (this != &src)
|
||||||
{
|
{
|
||||||
@ -23,11 +23,11 @@ public:
|
|||||||
|
|
||||||
template <typename G>
|
template <typename G>
|
||||||
requires std::is_convertible_v<G, F>
|
requires std::is_convertible_v<G, F>
|
||||||
constexpr BasicScopeGuard(BasicScopeGuard<G> && src) : function{src.release()} {} // NOLINT(google-explicit-constructor)
|
constexpr BasicScopeGuard(BasicScopeGuard<G> && src) : function{src.release()} {} // NOLINT(google-explicit-constructor, cppcoreguidelines-rvalue-reference-param-not-moved, cppcoreguidelines-noexcept-move-operations)
|
||||||
|
|
||||||
template <typename G>
|
template <typename G>
|
||||||
requires std::is_convertible_v<G, F>
|
requires std::is_convertible_v<G, F>
|
||||||
constexpr BasicScopeGuard & operator=(BasicScopeGuard<G> && src)
|
constexpr BasicScopeGuard & operator=(BasicScopeGuard<G> && src) // NOLINT(cppcoreguidelines-rvalue-reference-param-not-moved, cppcoreguidelines-noexcept-move-operations)
|
||||||
{
|
{
|
||||||
if (this != &src)
|
if (this != &src)
|
||||||
{
|
{
|
||||||
@ -43,7 +43,7 @@ public:
|
|||||||
|
|
||||||
template <typename G>
|
template <typename G>
|
||||||
requires std::is_convertible_v<G, F>
|
requires std::is_convertible_v<G, F>
|
||||||
constexpr BasicScopeGuard(G && function_) : function{std::move(function_)} {} // NOLINT(google-explicit-constructor, bugprone-forwarding-reference-overload, bugprone-move-forwarding-reference)
|
constexpr BasicScopeGuard(G && function_) : function{std::move(function_)} {} // NOLINT(google-explicit-constructor, bugprone-forwarding-reference-overload, bugprone-move-forwarding-reference, cppcoreguidelines-missing-std-forward)
|
||||||
|
|
||||||
~BasicScopeGuard() { invoke(); }
|
~BasicScopeGuard() { invoke(); }
|
||||||
|
|
||||||
@ -70,7 +70,7 @@ public:
|
|||||||
|
|
||||||
template <typename G>
|
template <typename G>
|
||||||
requires std::is_convertible_v<G, F>
|
requires std::is_convertible_v<G, F>
|
||||||
BasicScopeGuard<F> & join(BasicScopeGuard<G> && other)
|
BasicScopeGuard<F> & join(BasicScopeGuard<G> && other) // NOLINT(cppcoreguidelines-rvalue-reference-param-not-moved)
|
||||||
{
|
{
|
||||||
if (other.function)
|
if (other.function)
|
||||||
{
|
{
|
||||||
|
@ -131,3 +131,29 @@ void sort(RandomIt first, RandomIt last)
|
|||||||
using comparator = std::less<value_type>;
|
using comparator = std::less<value_type>;
|
||||||
::sort(first, last, comparator());
|
::sort(first, last, comparator());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/** Try to fast sort elements for common sorting patterns:
|
||||||
|
* 1. If elements are already sorted.
|
||||||
|
* 2. If elements are already almost sorted.
|
||||||
|
* 3. If elements are already sorted in reverse order.
|
||||||
|
*
|
||||||
|
* Returns true if fast sort was performed or elements were already sorted, false otherwise.
|
||||||
|
*/
|
||||||
|
template <typename RandomIt, typename Compare>
|
||||||
|
bool trySort(RandomIt first, RandomIt last, Compare compare)
|
||||||
|
{
|
||||||
|
#ifndef NDEBUG
|
||||||
|
::shuffle(first, last);
|
||||||
|
#endif
|
||||||
|
|
||||||
|
ComparatorWrapper<Compare> compare_wrapper = compare;
|
||||||
|
return ::pdqsort_try_sort(first, last, compare_wrapper);
|
||||||
|
}
|
||||||
|
|
||||||
|
template <typename RandomIt>
|
||||||
|
bool trySort(RandomIt first, RandomIt last)
|
||||||
|
{
|
||||||
|
using value_type = typename std::iterator_traits<RandomIt>::value_type;
|
||||||
|
using comparator = std::less<value_type>;
|
||||||
|
return ::trySort(first, last, comparator());
|
||||||
|
}
|
||||||
|
@ -23,10 +23,10 @@ public:
|
|||||||
constexpr StrongTypedef(): t() {}
|
constexpr StrongTypedef(): t() {}
|
||||||
|
|
||||||
constexpr StrongTypedef(const Self &) = default;
|
constexpr StrongTypedef(const Self &) = default;
|
||||||
constexpr StrongTypedef(Self &&) noexcept(std::is_nothrow_move_constructible_v<T>) = default;
|
constexpr StrongTypedef(Self &&) noexcept(std::is_nothrow_move_constructible_v<T>) = default; // NOLINT(cppcoreguidelines-noexcept-move-operations, hicpp-noexcept-move, performance-noexcept-move-constructor)
|
||||||
|
|
||||||
Self & operator=(const Self &) = default;
|
Self & operator=(const Self &) = default;
|
||||||
Self & operator=(Self &&) noexcept(std::is_nothrow_move_assignable_v<T>)= default;
|
Self & operator=(Self &&) noexcept(std::is_nothrow_move_assignable_v<T>)= default; // NOLINT(cppcoreguidelines-noexcept-move-operations, hicpp-noexcept-move, performance-noexcept-move-constructor)
|
||||||
|
|
||||||
template <class Enable = typename std::is_copy_assignable<T>::type>
|
template <class Enable = typename std::is_copy_assignable<T>::type>
|
||||||
Self & operator=(const T & rhs) { t = rhs; return *this;}
|
Self & operator=(const T & rhs) { t = rhs; return *this;}
|
||||||
|
@ -463,7 +463,7 @@ auto bounded_rand(RngType& rng, typename RngType::result_type upper_bound)
|
|||||||
}
|
}
|
||||||
|
|
||||||
template <typename Iter, typename RandType>
|
template <typename Iter, typename RandType>
|
||||||
void shuffle(Iter from, Iter to, RandType&& rng)
|
void shuffle(Iter from, Iter to, RandType&& rng) // NOLINT(cppcoreguidelines-missing-std-forward)
|
||||||
{
|
{
|
||||||
typedef typename std::iterator_traits<Iter>::difference_type delta_t;
|
typedef typename std::iterator_traits<Iter>::difference_type delta_t;
|
||||||
typedef typename std::remove_reference<RandType>::type::result_type result_t;
|
typedef typename std::remove_reference<RandType>::type::result_type result_t;
|
||||||
|
@ -2,11 +2,11 @@
|
|||||||
|
|
||||||
# NOTE: has nothing common with DBMS_TCP_PROTOCOL_VERSION,
|
# NOTE: has nothing common with DBMS_TCP_PROTOCOL_VERSION,
|
||||||
# only DBMS_TCP_PROTOCOL_VERSION should be incremented on protocol changes.
|
# only DBMS_TCP_PROTOCOL_VERSION should be incremented on protocol changes.
|
||||||
SET(VERSION_REVISION 54478)
|
SET(VERSION_REVISION 54479)
|
||||||
SET(VERSION_MAJOR 23)
|
SET(VERSION_MAJOR 23)
|
||||||
SET(VERSION_MINOR 9)
|
SET(VERSION_MINOR 10)
|
||||||
SET(VERSION_PATCH 1)
|
SET(VERSION_PATCH 1)
|
||||||
SET(VERSION_GITHASH ebc7d9a9f3b40be89e0b3e738b35d394aabeea3e)
|
SET(VERSION_GITHASH 8f9a227de1f530cdbda52c145d41a6b0f1d29961)
|
||||||
SET(VERSION_DESCRIBE v23.9.1.1-testing)
|
SET(VERSION_DESCRIBE v23.10.1.1-testing)
|
||||||
SET(VERSION_STRING 23.9.1.1)
|
SET(VERSION_STRING 23.10.1.1)
|
||||||
# end of autochange
|
# end of autochange
|
||||||
|
@ -12,7 +12,7 @@ endif()
|
|||||||
set(COMPILER_CACHE "auto" CACHE STRING "Speedup re-compilations using the caching tools; valid options are 'auto' (ccache, then sccache), 'ccache', 'sccache', or 'disabled'")
|
set(COMPILER_CACHE "auto" CACHE STRING "Speedup re-compilations using the caching tools; valid options are 'auto' (ccache, then sccache), 'ccache', 'sccache', or 'disabled'")
|
||||||
|
|
||||||
if(COMPILER_CACHE STREQUAL "auto")
|
if(COMPILER_CACHE STREQUAL "auto")
|
||||||
find_program (CCACHE_EXECUTABLE ccache sccache)
|
find_program (CCACHE_EXECUTABLE NAMES ccache sccache)
|
||||||
elseif (COMPILER_CACHE STREQUAL "ccache")
|
elseif (COMPILER_CACHE STREQUAL "ccache")
|
||||||
find_program (CCACHE_EXECUTABLE ccache)
|
find_program (CCACHE_EXECUTABLE ccache)
|
||||||
elseif(COMPILER_CACHE STREQUAL "sccache")
|
elseif(COMPILER_CACHE STREQUAL "sccache")
|
||||||
|
@ -5,14 +5,14 @@ if (ENABLE_CLANG_TIDY)
|
|||||||
|
|
||||||
find_program (CLANG_TIDY_CACHE_PATH NAMES "clang-tidy-cache")
|
find_program (CLANG_TIDY_CACHE_PATH NAMES "clang-tidy-cache")
|
||||||
if (CLANG_TIDY_CACHE_PATH)
|
if (CLANG_TIDY_CACHE_PATH)
|
||||||
find_program (_CLANG_TIDY_PATH NAMES "clang-tidy-16" "clang-tidy-15" "clang-tidy-14" "clang-tidy")
|
find_program (_CLANG_TIDY_PATH NAMES "clang-tidy-17" "clang-tidy-16" "clang-tidy")
|
||||||
|
|
||||||
# Why do we use ';' here?
|
# Why do we use ';' here?
|
||||||
# It's a cmake black magic: https://cmake.org/cmake/help/latest/prop_tgt/LANG_CLANG_TIDY.html#prop_tgt:%3CLANG%3E_CLANG_TIDY
|
# It's a cmake black magic: https://cmake.org/cmake/help/latest/prop_tgt/LANG_CLANG_TIDY.html#prop_tgt:%3CLANG%3E_CLANG_TIDY
|
||||||
# The CLANG_TIDY_PATH is passed to CMAKE_CXX_CLANG_TIDY, which follows CXX_CLANG_TIDY syntax.
|
# The CLANG_TIDY_PATH is passed to CMAKE_CXX_CLANG_TIDY, which follows CXX_CLANG_TIDY syntax.
|
||||||
set (CLANG_TIDY_PATH "${CLANG_TIDY_CACHE_PATH};${_CLANG_TIDY_PATH}" CACHE STRING "A combined command to run clang-tidy with caching wrapper")
|
set (CLANG_TIDY_PATH "${CLANG_TIDY_CACHE_PATH};${_CLANG_TIDY_PATH}" CACHE STRING "A combined command to run clang-tidy with caching wrapper")
|
||||||
else ()
|
else ()
|
||||||
find_program (CLANG_TIDY_PATH NAMES "clang-tidy-16" "clang-tidy-15" "clang-tidy-14" "clang-tidy")
|
find_program (CLANG_TIDY_PATH NAMES "clang-tidy-17" "clang-tidy-16" "clang-tidy")
|
||||||
endif ()
|
endif ()
|
||||||
|
|
||||||
if (CLANG_TIDY_PATH)
|
if (CLANG_TIDY_PATH)
|
||||||
|
@ -1,17 +0,0 @@
|
|||||||
#!/usr/bin/env bash
|
|
||||||
|
|
||||||
# This is a workaround for bug in llvm/clang,
|
|
||||||
# that does not produce .debug_aranges with LTO
|
|
||||||
#
|
|
||||||
# NOTE: this is a temporary solution, that should be removed after upgrading to
|
|
||||||
# clang-16/llvm-16.
|
|
||||||
#
|
|
||||||
# Refs: https://reviews.llvm.org/D133092
|
|
||||||
|
|
||||||
# NOTE: only -flto=thin is supported.
|
|
||||||
# NOTE: it is not possible to check was there -gdwarf-aranges initially or not.
|
|
||||||
if [[ "$*" =~ -plugin-opt=thinlto ]]; then
|
|
||||||
exec "@LLD_PATH@" -plugin-opt=-generate-arange-section "$@"
|
|
||||||
else
|
|
||||||
exec "@LLD_PATH@" "$@"
|
|
||||||
fi
|
|
@ -14,15 +14,6 @@ set (SAN_FLAGS "${SAN_FLAGS} -g -fno-omit-frame-pointer -DSANITIZER")
|
|||||||
if (SANITIZE)
|
if (SANITIZE)
|
||||||
if (SANITIZE STREQUAL "address")
|
if (SANITIZE STREQUAL "address")
|
||||||
set (ASAN_FLAGS "-fsanitize=address -fsanitize-address-use-after-scope")
|
set (ASAN_FLAGS "-fsanitize=address -fsanitize-address-use-after-scope")
|
||||||
if (COMPILER_CLANG)
|
|
||||||
if (${CMAKE_CXX_COMPILER_VERSION} VERSION_GREATER_EQUAL 15 AND ${CMAKE_CXX_COMPILER_VERSION} VERSION_LESS 16)
|
|
||||||
# LLVM-15 has a bug in Address Sanitizer, preventing the usage
|
|
||||||
# of 'sanitize-address-use-after-scope', see [1].
|
|
||||||
#
|
|
||||||
# [1]: https://github.com/llvm/llvm-project/issues/58633
|
|
||||||
set (ASAN_FLAGS "${ASAN_FLAGS} -fno-sanitize-address-use-after-scope")
|
|
||||||
endif()
|
|
||||||
endif()
|
|
||||||
set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${SAN_FLAGS} ${ASAN_FLAGS}")
|
set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${SAN_FLAGS} ${ASAN_FLAGS}")
|
||||||
set (CMAKE_C_FLAGS "${CMAKE_C_FLAGS} ${SAN_FLAGS} ${ASAN_FLAGS}")
|
set (CMAKE_C_FLAGS "${CMAKE_C_FLAGS} ${SAN_FLAGS} ${ASAN_FLAGS}")
|
||||||
|
|
||||||
|
@ -13,7 +13,7 @@ execute_process(COMMAND ${CMAKE_CXX_COMPILER} --version OUTPUT_VARIABLE COMPILER
|
|||||||
message (STATUS "Using compiler:\n${COMPILER_SELF_IDENTIFICATION}")
|
message (STATUS "Using compiler:\n${COMPILER_SELF_IDENTIFICATION}")
|
||||||
|
|
||||||
# Require minimum compiler versions
|
# Require minimum compiler versions
|
||||||
set (CLANG_MINIMUM_VERSION 15)
|
set (CLANG_MINIMUM_VERSION 16)
|
||||||
set (XCODE_MINIMUM_VERSION 12.0)
|
set (XCODE_MINIMUM_VERSION 12.0)
|
||||||
set (APPLE_CLANG_MINIMUM_VERSION 12.0.0)
|
set (APPLE_CLANG_MINIMUM_VERSION 12.0.0)
|
||||||
|
|
||||||
@ -49,14 +49,14 @@ endif ()
|
|||||||
|
|
||||||
if (NOT LINKER_NAME)
|
if (NOT LINKER_NAME)
|
||||||
if (COMPILER_CLANG)
|
if (COMPILER_CLANG)
|
||||||
if (OS_LINUX)
|
if (OS_LINUX AND NOT ARCH_S390X)
|
||||||
if (NOT ARCH_S390X) # s390x doesnt support lld
|
find_program (LLD_PATH NAMES "ld.lld-${COMPILER_VERSION_MAJOR}" "ld.lld")
|
||||||
find_program (LLD_PATH NAMES "ld.lld-${COMPILER_VERSION_MAJOR}" "ld.lld")
|
elseif (OS_DARWIN)
|
||||||
endif ()
|
find_program (LLD_PATH NAMES "ld")
|
||||||
endif ()
|
endif ()
|
||||||
endif ()
|
endif ()
|
||||||
if (OS_LINUX)
|
if (LLD_PATH)
|
||||||
if (LLD_PATH)
|
if (OS_LINUX OR OS_DARWIN)
|
||||||
if (COMPILER_CLANG)
|
if (COMPILER_CLANG)
|
||||||
# Clang driver simply allows full linker path.
|
# Clang driver simply allows full linker path.
|
||||||
set (LINKER_NAME ${LLD_PATH})
|
set (LINKER_NAME ${LLD_PATH})
|
||||||
@ -70,23 +70,16 @@ if (LINKER_NAME)
|
|||||||
if (NOT LLD_PATH)
|
if (NOT LLD_PATH)
|
||||||
message (FATAL_ERROR "Using linker ${LINKER_NAME} but can't find its path.")
|
message (FATAL_ERROR "Using linker ${LINKER_NAME} but can't find its path.")
|
||||||
endif ()
|
endif ()
|
||||||
# This a temporary quirk to emit .debug_aranges with ThinLTO, it is only the case clang/llvm <16
|
set (CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} --ld-path=${LLD_PATH}")
|
||||||
if (COMPILER_CLANG AND CMAKE_CXX_COMPILER_VERSION VERSION_LESS 16)
|
|
||||||
set (LLD_WRAPPER "${CMAKE_CURRENT_BINARY_DIR}/ld.lld")
|
|
||||||
configure_file ("${CMAKE_CURRENT_SOURCE_DIR}/cmake/ld.lld.in" "${LLD_WRAPPER}" @ONLY)
|
|
||||||
|
|
||||||
set (CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} --ld-path=${LLD_WRAPPER}")
|
|
||||||
else ()
|
|
||||||
set (CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} --ld-path=${LLD_PATH}")
|
|
||||||
endif()
|
|
||||||
|
|
||||||
endif ()
|
endif ()
|
||||||
|
|
||||||
if (LINKER_NAME)
|
if (LINKER_NAME)
|
||||||
message(STATUS "Using linker: ${LINKER_NAME}")
|
message(STATUS "Using linker: ${LINKER_NAME}")
|
||||||
else()
|
elseif (NOT ARCH_S390X AND NOT OS_FREEBSD)
|
||||||
|
message (FATAL_ERROR "The only supported linker is LLVM's LLD, but we cannot find it.")
|
||||||
|
else ()
|
||||||
message(STATUS "Using linker: <default>")
|
message(STATUS "Using linker: <default>")
|
||||||
endif()
|
endif ()
|
||||||
|
|
||||||
# Archiver
|
# Archiver
|
||||||
|
|
||||||
|
2
contrib/CMakeLists.txt
vendored
2
contrib/CMakeLists.txt
vendored
@ -212,6 +212,8 @@ add_contrib (libbcrypt-cmake libbcrypt)
|
|||||||
add_contrib (google-benchmark-cmake google-benchmark)
|
add_contrib (google-benchmark-cmake google-benchmark)
|
||||||
add_contrib (ulid-c-cmake ulid-c)
|
add_contrib (ulid-c-cmake ulid-c)
|
||||||
|
|
||||||
|
add_contrib (libssh-cmake libssh)
|
||||||
|
|
||||||
# Put all targets defined here and in subdirectories under "contrib/<immediate-subdir>" folders in GUI-based IDEs.
|
# Put all targets defined here and in subdirectories under "contrib/<immediate-subdir>" folders in GUI-based IDEs.
|
||||||
# Some of third-party projects may override CMAKE_FOLDER or FOLDER property of their targets, so they would not appear
|
# Some of third-party projects may override CMAKE_FOLDER or FOLDER property of their targets, so they would not appear
|
||||||
# in "contrib/..." as originally planned, so we workaround this by fixing FOLDER properties of all targets manually,
|
# in "contrib/..." as originally planned, so we workaround this by fixing FOLDER properties of all targets manually,
|
||||||
|
2
contrib/croaring
vendored
2
contrib/croaring
vendored
@ -1 +1 @@
|
|||||||
Subproject commit f40ed52bcdd635840a79877cef4857315dba817c
|
Subproject commit e4a7ad5542746103e71ca8b5e56225baf0014c87
|
@ -2,23 +2,25 @@ set(LIBRARY_DIR "${ClickHouse_SOURCE_DIR}/contrib/croaring")
|
|||||||
|
|
||||||
set(SRCS
|
set(SRCS
|
||||||
"${LIBRARY_DIR}/src/array_util.c"
|
"${LIBRARY_DIR}/src/array_util.c"
|
||||||
|
"${LIBRARY_DIR}/src/bitset.c"
|
||||||
"${LIBRARY_DIR}/src/bitset_util.c"
|
"${LIBRARY_DIR}/src/bitset_util.c"
|
||||||
|
"${LIBRARY_DIR}/src/isadetection.c"
|
||||||
|
"${LIBRARY_DIR}/src/memory.c"
|
||||||
|
"${LIBRARY_DIR}/src/roaring.c"
|
||||||
|
"${LIBRARY_DIR}/src/roaring_array.c"
|
||||||
|
"${LIBRARY_DIR}/src/roaring_priority_queue.c"
|
||||||
"${LIBRARY_DIR}/src/containers/array.c"
|
"${LIBRARY_DIR}/src/containers/array.c"
|
||||||
"${LIBRARY_DIR}/src/containers/bitset.c"
|
"${LIBRARY_DIR}/src/containers/bitset.c"
|
||||||
"${LIBRARY_DIR}/src/containers/containers.c"
|
"${LIBRARY_DIR}/src/containers/containers.c"
|
||||||
"${LIBRARY_DIR}/src/containers/convert.c"
|
"${LIBRARY_DIR}/src/containers/convert.c"
|
||||||
"${LIBRARY_DIR}/src/containers/mixed_intersection.c"
|
|
||||||
"${LIBRARY_DIR}/src/containers/mixed_union.c"
|
|
||||||
"${LIBRARY_DIR}/src/containers/mixed_equal.c"
|
|
||||||
"${LIBRARY_DIR}/src/containers/mixed_subset.c"
|
|
||||||
"${LIBRARY_DIR}/src/containers/mixed_negation.c"
|
|
||||||
"${LIBRARY_DIR}/src/containers/mixed_xor.c"
|
|
||||||
"${LIBRARY_DIR}/src/containers/mixed_andnot.c"
|
"${LIBRARY_DIR}/src/containers/mixed_andnot.c"
|
||||||
"${LIBRARY_DIR}/src/containers/run.c"
|
"${LIBRARY_DIR}/src/containers/mixed_equal.c"
|
||||||
"${LIBRARY_DIR}/src/roaring.c"
|
"${LIBRARY_DIR}/src/containers/mixed_intersection.c"
|
||||||
"${LIBRARY_DIR}/src/roaring_priority_queue.c"
|
"${LIBRARY_DIR}/src/containers/mixed_negation.c"
|
||||||
"${LIBRARY_DIR}/src/roaring_array.c"
|
"${LIBRARY_DIR}/src/containers/mixed_subset.c"
|
||||||
"${LIBRARY_DIR}/src/memory.c")
|
"${LIBRARY_DIR}/src/containers/mixed_union.c"
|
||||||
|
"${LIBRARY_DIR}/src/containers/mixed_xor.c"
|
||||||
|
"${LIBRARY_DIR}/src/containers/run.c")
|
||||||
|
|
||||||
add_library(_roaring ${SRCS})
|
add_library(_roaring ${SRCS})
|
||||||
|
|
||||||
|
2
contrib/googletest
vendored
2
contrib/googletest
vendored
@ -1 +1 @@
|
|||||||
Subproject commit 71140c3ca7a87bb1b5b9c9f1500fea8858cce344
|
Subproject commit e47544ad31cb3ceecd04cc13e8fe556f8df9fe0b
|
1
contrib/libssh
vendored
Submodule
1
contrib/libssh
vendored
Submodule
@ -0,0 +1 @@
|
|||||||
|
Subproject commit 2c76332ef56d90f55965ab24da6b6dbcbef29c4c
|
74
contrib/libssh-cmake/CMakeLists.txt
Normal file
74
contrib/libssh-cmake/CMakeLists.txt
Normal file
@ -0,0 +1,74 @@
|
|||||||
|
set(LIB_SOURCE_DIR "${ClickHouse_SOURCE_DIR}/contrib/libssh")
|
||||||
|
set(LIB_BINARY_DIR "${ClickHouse_BINARY_DIR}/contrib/libssh")
|
||||||
|
# Specify search path for CMake modules to be loaded by include()
|
||||||
|
# and find_package()
|
||||||
|
list(APPEND CMAKE_MODULE_PATH "${LIB_SOURCE_DIR}/cmake/Modules")
|
||||||
|
|
||||||
|
include(DefineCMakeDefaults)
|
||||||
|
include(DefineCompilerFlags)
|
||||||
|
|
||||||
|
project(libssh VERSION 0.9.7 LANGUAGES C)
|
||||||
|
|
||||||
|
# global needed variable
|
||||||
|
set(APPLICATION_NAME ${PROJECT_NAME})
|
||||||
|
|
||||||
|
# SOVERSION scheme: CURRENT.AGE.REVISION
|
||||||
|
# If there was an incompatible interface change:
|
||||||
|
# Increment CURRENT. Set AGE and REVISION to 0
|
||||||
|
# If there was a compatible interface change:
|
||||||
|
# Increment AGE. Set REVISION to 0
|
||||||
|
# If the source code was changed, but there were no interface changes:
|
||||||
|
# Increment REVISION.
|
||||||
|
set(LIBRARY_VERSION "4.8.7")
|
||||||
|
set(LIBRARY_SOVERSION "4")
|
||||||
|
|
||||||
|
# where to look first for cmake modules, before ${CMAKE_ROOT}/Modules/ is checked
|
||||||
|
|
||||||
|
# add definitions
|
||||||
|
|
||||||
|
include(DefinePlatformDefaults)
|
||||||
|
|
||||||
|
# Copy library files to a lib sub-directory
|
||||||
|
set(CMAKE_LIBRARY_OUTPUT_DIRECTORY "${LIB_BINARY_DIR}/lib")
|
||||||
|
|
||||||
|
set(CMAKE_THREAD_PREFER_PTHREADS ON)
|
||||||
|
set(THREADS_PREFER_PTHREAD_FLAG ON)
|
||||||
|
|
||||||
|
set(WITH_ZLIB OFF)
|
||||||
|
set(WITH_SYMBOL_VERSIONING OFF)
|
||||||
|
set(WITH_SERVER ON)
|
||||||
|
|
||||||
|
include(IncludeSources.cmake)
|
||||||
|
if (OS_LINUX)
|
||||||
|
if (ARCH_AMD64)
|
||||||
|
if (USE_MUSL)
|
||||||
|
target_include_directories(_ssh PRIVATE "${ClickHouse_SOURCE_DIR}/contrib/libssh-cmake/linux/x86-64-musl")
|
||||||
|
else()
|
||||||
|
target_include_directories(_ssh PRIVATE "${ClickHouse_SOURCE_DIR}/contrib/libssh-cmake/linux/x86-64")
|
||||||
|
endif ()
|
||||||
|
elseif (ARCH_AARCH64)
|
||||||
|
if (USE_MUSL)
|
||||||
|
target_include_directories(_ssh PRIVATE "${ClickHouse_SOURCE_DIR}/contrib/libssh-cmake/linux/aarch64-musl")
|
||||||
|
else()
|
||||||
|
target_include_directories(_ssh PRIVATE "${ClickHouse_SOURCE_DIR}/contrib/libssh-cmake/linux/aarch64")
|
||||||
|
endif ()
|
||||||
|
elseif (ARCH_PPC64LE)
|
||||||
|
target_include_directories(_ssh PRIVATE "${ClickHouse_SOURCE_DIR}/contrib/libssh-cmake/linux/ppc64le")
|
||||||
|
elseif (ARCH_S390X)
|
||||||
|
target_include_directories(_ssh PRIVATE "${ClickHouse_SOURCE_DIR}/contrib/libssh-cmake/linux/s390x")
|
||||||
|
elseif (ARCH_RISCV64)
|
||||||
|
target_include_directories(_ssh PRIVATE "${ClickHouse_SOURCE_DIR}/contrib/libssh-cmake/linux/riscv64")
|
||||||
|
else ()
|
||||||
|
message(FATAL_ERROR "Platform is not supported")
|
||||||
|
endif ()
|
||||||
|
elseif (OS_DARWIN)
|
||||||
|
target_include_directories(_ssh PRIVATE "${ClickHouse_SOURCE_DIR}/contrib/libssh-cmake/darwin")
|
||||||
|
elseif (OS_FREEBSD)
|
||||||
|
target_include_directories(_ssh PRIVATE "${ClickHouse_SOURCE_DIR}/contrib/libssh-cmake/freebsd")
|
||||||
|
else ()
|
||||||
|
message(FATAL_ERROR "Platform is not supported")
|
||||||
|
endif()
|
||||||
|
|
||||||
|
configure_file(${LIB_SOURCE_DIR}/include/libssh/libssh_version.h.cmake
|
||||||
|
${LIB_BINARY_DIR}/include/libssh/libssh_version.h
|
||||||
|
@ONLY)
|
140
contrib/libssh-cmake/IncludeSources.cmake
Normal file
140
contrib/libssh-cmake/IncludeSources.cmake
Normal file
@ -0,0 +1,140 @@
|
|||||||
|
set(LIBSSH_LINK_LIBRARIES
|
||||||
|
${LIBSSH_REQUIRED_LIBRARIES}
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
set(LIBSSH_LINK_LIBRARIES
|
||||||
|
${LIBSSH_LINK_LIBRARIES}
|
||||||
|
OpenSSL::Crypto
|
||||||
|
)
|
||||||
|
|
||||||
|
if (MINGW AND Threads_FOUND)
|
||||||
|
set(LIBSSH_LINK_LIBRARIES
|
||||||
|
${LIBSSH_LINK_LIBRARIES}
|
||||||
|
Threads::Threads
|
||||||
|
)
|
||||||
|
endif()
|
||||||
|
|
||||||
|
set(libssh_SRCS
|
||||||
|
${LIB_SOURCE_DIR}/src/agent.c
|
||||||
|
${LIB_SOURCE_DIR}/src/auth.c
|
||||||
|
${LIB_SOURCE_DIR}/src/base64.c
|
||||||
|
${LIB_SOURCE_DIR}/src/bignum.c
|
||||||
|
${LIB_SOURCE_DIR}/src/buffer.c
|
||||||
|
${LIB_SOURCE_DIR}/src/callbacks.c
|
||||||
|
${LIB_SOURCE_DIR}/src/channels.c
|
||||||
|
${LIB_SOURCE_DIR}/src/client.c
|
||||||
|
${LIB_SOURCE_DIR}/src/config.c
|
||||||
|
${LIB_SOURCE_DIR}/src/connect.c
|
||||||
|
${LIB_SOURCE_DIR}/src/connector.c
|
||||||
|
${LIB_SOURCE_DIR}/src/curve25519.c
|
||||||
|
${LIB_SOURCE_DIR}/src/dh.c
|
||||||
|
${LIB_SOURCE_DIR}/src/ecdh.c
|
||||||
|
${LIB_SOURCE_DIR}/src/error.c
|
||||||
|
${LIB_SOURCE_DIR}/src/getpass.c
|
||||||
|
${LIB_SOURCE_DIR}/src/init.c
|
||||||
|
${LIB_SOURCE_DIR}/src/kdf.c
|
||||||
|
${LIB_SOURCE_DIR}/src/kex.c
|
||||||
|
${LIB_SOURCE_DIR}/src/known_hosts.c
|
||||||
|
${LIB_SOURCE_DIR}/src/knownhosts.c
|
||||||
|
${LIB_SOURCE_DIR}/src/legacy.c
|
||||||
|
${LIB_SOURCE_DIR}/src/log.c
|
||||||
|
${LIB_SOURCE_DIR}/src/match.c
|
||||||
|
${LIB_SOURCE_DIR}/src/messages.c
|
||||||
|
${LIB_SOURCE_DIR}/src/misc.c
|
||||||
|
${LIB_SOURCE_DIR}/src/options.c
|
||||||
|
${LIB_SOURCE_DIR}/src/packet.c
|
||||||
|
${LIB_SOURCE_DIR}/src/packet_cb.c
|
||||||
|
${LIB_SOURCE_DIR}/src/packet_crypt.c
|
||||||
|
${LIB_SOURCE_DIR}/src/pcap.c
|
||||||
|
${LIB_SOURCE_DIR}/src/pki.c
|
||||||
|
${LIB_SOURCE_DIR}/src/pki_container_openssh.c
|
||||||
|
${LIB_SOURCE_DIR}/src/poll.c
|
||||||
|
${LIB_SOURCE_DIR}/src/session.c
|
||||||
|
${LIB_SOURCE_DIR}/src/scp.c
|
||||||
|
${LIB_SOURCE_DIR}/src/socket.c
|
||||||
|
${LIB_SOURCE_DIR}/src/string.c
|
||||||
|
${LIB_SOURCE_DIR}/src/threads.c
|
||||||
|
${LIB_SOURCE_DIR}/src/wrapper.c
|
||||||
|
${LIB_SOURCE_DIR}/src/external/bcrypt_pbkdf.c
|
||||||
|
${LIB_SOURCE_DIR}/src/external/blowfish.c
|
||||||
|
${LIB_SOURCE_DIR}/src/external/chacha.c
|
||||||
|
${LIB_SOURCE_DIR}/src/external/poly1305.c
|
||||||
|
${LIB_SOURCE_DIR}/src/chachapoly.c
|
||||||
|
${LIB_SOURCE_DIR}/src/config_parser.c
|
||||||
|
${LIB_SOURCE_DIR}/src/token.c
|
||||||
|
${LIB_SOURCE_DIR}/src/pki_ed25519_common.c
|
||||||
|
)
|
||||||
|
|
||||||
|
if (DEFAULT_C_NO_DEPRECATION_FLAGS)
|
||||||
|
set_source_files_properties(known_hosts.c
|
||||||
|
PROPERTIES
|
||||||
|
COMPILE_FLAGS ${DEFAULT_C_NO_DEPRECATION_FLAGS})
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if (CMAKE_USE_PTHREADS_INIT)
|
||||||
|
set(libssh_SRCS
|
||||||
|
${libssh_SRCS}
|
||||||
|
${LIB_SOURCE_DIR}/src/threads/noop.c
|
||||||
|
${LIB_SOURCE_DIR}/src/threads/pthread.c
|
||||||
|
)
|
||||||
|
elseif (CMAKE_USE_WIN32_THREADS_INIT)
|
||||||
|
set(libssh_SRCS
|
||||||
|
${libssh_SRCS}
|
||||||
|
${LIB_SOURCE_DIR}/src/threads/noop.c
|
||||||
|
${LIB_SOURCE_DIR}/src/threads/winlocks.c
|
||||||
|
)
|
||||||
|
else()
|
||||||
|
set(libssh_SRCS
|
||||||
|
${libssh_SRCS}
|
||||||
|
${LIB_SOURCE_DIR}/src/threads/noop.c
|
||||||
|
)
|
||||||
|
endif()
|
||||||
|
|
||||||
|
# LIBCRYPT specific
|
||||||
|
set(libssh_SRCS
|
||||||
|
${libssh_SRCS}
|
||||||
|
${LIB_SOURCE_DIR}/src/threads/libcrypto.c
|
||||||
|
${LIB_SOURCE_DIR}/src/pki_crypto.c
|
||||||
|
${LIB_SOURCE_DIR}/src/ecdh_crypto.c
|
||||||
|
${LIB_SOURCE_DIR}/src/libcrypto.c
|
||||||
|
${LIB_SOURCE_DIR}/src/dh_crypto.c
|
||||||
|
)
|
||||||
|
|
||||||
|
if (NOT (ENABLE_OPENSSL OR ENABLE_OPENSSL_DYNAMIC))
|
||||||
|
add_compile_definitions(USE_BORINGSSL=1)
|
||||||
|
endif()
|
||||||
|
|
||||||
|
set(libssh_SRCS
|
||||||
|
${libssh_SRCS}
|
||||||
|
${LIB_SOURCE_DIR}/src/options.c
|
||||||
|
${LIB_SOURCE_DIR}/src/server.c
|
||||||
|
${LIB_SOURCE_DIR}/src/bind.c
|
||||||
|
${LIB_SOURCE_DIR}/src/bind_config.c
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
add_library(_ssh STATIC ${libssh_SRCS})
|
||||||
|
|
||||||
|
target_include_directories(_ssh PRIVATE ${LIB_BINARY_DIR})
|
||||||
|
target_include_directories(_ssh PUBLIC "${LIB_SOURCE_DIR}/include" "${LIB_BINARY_DIR}/include")
|
||||||
|
target_link_libraries(_ssh
|
||||||
|
PRIVATE ${LIBSSH_LINK_LIBRARIES})
|
||||||
|
|
||||||
|
add_library(ch_contrib::ssh ALIAS _ssh)
|
||||||
|
|
||||||
|
target_compile_options(_ssh
|
||||||
|
PRIVATE
|
||||||
|
${DEFAULT_C_COMPILE_FLAGS}
|
||||||
|
-D_GNU_SOURCE)
|
||||||
|
|
||||||
|
|
||||||
|
set_target_properties(_ssh
|
||||||
|
PROPERTIES
|
||||||
|
VERSION
|
||||||
|
${LIBRARY_VERSION}
|
||||||
|
SOVERSION
|
||||||
|
${LIBRARY_SOVERSION}
|
||||||
|
DEFINE_SYMBOL
|
||||||
|
LIBSSH_EXPORTS
|
||||||
|
)
|
287
contrib/libssh-cmake/darwin/config.h
Normal file
287
contrib/libssh-cmake/darwin/config.h
Normal file
@ -0,0 +1,287 @@
|
|||||||
|
/* Name of package */
|
||||||
|
#define PACKAGE "libssh"
|
||||||
|
|
||||||
|
/* Version number of package */
|
||||||
|
#define VERSION "0.9.7"
|
||||||
|
|
||||||
|
#define SYSCONFDIR "etc"
|
||||||
|
#define BINARYDIR "/home/ubuntu/workdir/ClickHouse/build/darwin"
|
||||||
|
#define SOURCEDIR "/home/ubuntu/workdir/ClickHouse"
|
||||||
|
|
||||||
|
/* Global bind configuration file path */
|
||||||
|
#define GLOBAL_BIND_CONFIG "/etc/ssh/libssh_server_config"
|
||||||
|
|
||||||
|
/* Global client configuration file path */
|
||||||
|
#define GLOBAL_CLIENT_CONFIG "/etc/ssh/ssh_config"
|
||||||
|
|
||||||
|
/************************** HEADER FILES *************************/
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <argp.h> header file. */
|
||||||
|
/* #undef HAVE_ARGP_H */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <aprpa/inet.h> header file. */
|
||||||
|
#define HAVE_ARPA_INET_H 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <glob.h> header file. */
|
||||||
|
#define HAVE_GLOB_H 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <valgrind/valgrind.h> header file. */
|
||||||
|
/* #undef HAVE_VALGRIND_VALGRIND_H */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <pty.h> header file. */
|
||||||
|
/* #undef HAVE_PTY_H */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <utmp.h> header file. */
|
||||||
|
#define HAVE_UTMP_H 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <util.h> header file. */
|
||||||
|
#define HAVE_UTIL_H 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <libutil.h> header file. */
|
||||||
|
/* #undef HAVE_LIBUTIL_H */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <sys/time.h> header file. */
|
||||||
|
#define HAVE_SYS_TIME_H 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <sys/utime.h> header file. */
|
||||||
|
/* #undef HAVE_SYS_UTIME_H */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <io.h> header file. */
|
||||||
|
/* #undef HAVE_IO_H */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <termios.h> header file. */
|
||||||
|
#define HAVE_TERMIOS_H 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <unistd.h> header file. */
|
||||||
|
#define HAVE_UNISTD_H 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <stdint.h> header file. */
|
||||||
|
#define HAVE_STDINT_H 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <openssl/aes.h> header file. */
|
||||||
|
#define HAVE_OPENSSL_AES_H 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <wspiapi.h> header file. */
|
||||||
|
/* #undef HAVE_WSPIAPI_H */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <openssl/blowfish.h> header file. */
|
||||||
|
/* #undef HAVE_OPENSSL_BLOWFISH_H */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <openssl/des.h> header file. */
|
||||||
|
#define HAVE_OPENSSL_DES_H 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <openssl/ecdh.h> header file. */
|
||||||
|
#define HAVE_OPENSSL_ECDH_H 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <openssl/ec.h> header file. */
|
||||||
|
#define HAVE_OPENSSL_EC_H 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <openssl/ecdsa.h> header file. */
|
||||||
|
#define HAVE_OPENSSL_ECDSA_H 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <pthread.h> header file. */
|
||||||
|
#define HAVE_PTHREAD_H 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have eliptic curve cryptography in openssl */
|
||||||
|
#define HAVE_OPENSSL_ECC 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have eliptic curve cryptography in gcrypt */
|
||||||
|
/* #undef HAVE_GCRYPT_ECC */
|
||||||
|
|
||||||
|
/* Define to 1 if you have eliptic curve cryptography */
|
||||||
|
#define HAVE_ECC 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have DSA */
|
||||||
|
/* #undef HAVE_DSA */
|
||||||
|
|
||||||
|
/* Define to 1 if you have gl_flags as a glob_t sturct member */
|
||||||
|
#define HAVE_GLOB_GL_FLAGS_MEMBER 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have OpenSSL with Ed25519 support */
|
||||||
|
#define HAVE_OPENSSL_ED25519 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have OpenSSL with X25519 support */
|
||||||
|
#define HAVE_OPENSSL_X25519 1
|
||||||
|
|
||||||
|
/*************************** FUNCTIONS ***************************/
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `EVP_aes128_ctr' function. */
|
||||||
|
#define HAVE_OPENSSL_EVP_AES_CTR 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `EVP_aes128_cbc' function. */
|
||||||
|
#define HAVE_OPENSSL_EVP_AES_CBC 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `EVP_aes128_gcm' function. */
|
||||||
|
/* #undef HAVE_OPENSSL_EVP_AES_GCM */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `CRYPTO_THREADID_set_callback' function. */
|
||||||
|
#define HAVE_OPENSSL_CRYPTO_THREADID_SET_CALLBACK 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `CRYPTO_ctr128_encrypt' function. */
|
||||||
|
#define HAVE_OPENSSL_CRYPTO_CTR128_ENCRYPT 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `EVP_CIPHER_CTX_new' function. */
|
||||||
|
#define HAVE_OPENSSL_EVP_CIPHER_CTX_NEW 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `EVP_KDF_CTX_new_id' function. */
|
||||||
|
/* #undef HAVE_OPENSSL_EVP_KDF_CTX_NEW_ID */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `FIPS_mode' function. */
|
||||||
|
#if USE_BORINGSSL
|
||||||
|
#define HAVE_OPENSSL_FIPS_MODE 1
|
||||||
|
#endif
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `EVP_DigestSign' function. */
|
||||||
|
#define HAVE_OPENSSL_EVP_DIGESTSIGN 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `EVP_DigestVerify' function. */
|
||||||
|
#define HAVE_OPENSSL_EVP_DIGESTVERIFY 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `OPENSSL_ia32cap_loc' function. */
|
||||||
|
/* #undef HAVE_OPENSSL_IA32CAP_LOC */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `snprintf' function. */
|
||||||
|
#define HAVE_SNPRINTF 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `_snprintf' function. */
|
||||||
|
/* #undef HAVE__SNPRINTF */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `_snprintf_s' function. */
|
||||||
|
/* #undef HAVE__SNPRINTF_S */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `vsnprintf' function. */
|
||||||
|
#define HAVE_VSNPRINTF 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `_vsnprintf' function. */
|
||||||
|
/* #undef HAVE__VSNPRINTF */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `_vsnprintf_s' function. */
|
||||||
|
/* #undef HAVE__VSNPRINTF_S */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `isblank' function. */
|
||||||
|
#define HAVE_ISBLANK 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `strncpy' function. */
|
||||||
|
#define HAVE_STRNCPY 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `strndup' function. */
|
||||||
|
#define HAVE_STRNDUP 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `cfmakeraw' function. */
|
||||||
|
/* #undef HAVE_CFMAKERAW */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `getaddrinfo' function. */
|
||||||
|
#define HAVE_GETADDRINFO 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `poll' function. */
|
||||||
|
#define HAVE_POLL 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `select' function. */
|
||||||
|
#define HAVE_SELECT 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `clock_gettime' function. */
|
||||||
|
/* #undef HAVE_CLOCK_GETTIME */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `ntohll' function. */
|
||||||
|
#define HAVE_NTOHLL 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `htonll' function. */
|
||||||
|
#define HAVE_HTONLL 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `strtoull' function. */
|
||||||
|
#define HAVE_STRTOULL 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `__strtoull' function. */
|
||||||
|
/* #undef HAVE___STRTOULL */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `_strtoui64' function. */
|
||||||
|
/* #undef HAVE__STRTOUI64 */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `glob' function. */
|
||||||
|
#define HAVE_GLOB 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `explicit_bzero' function. */
|
||||||
|
/* #undef HAVE_EXPLICIT_BZERO */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `memset_s' function. */
|
||||||
|
#define HAVE_MEMSET_S 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `SecureZeroMemory' function. */
|
||||||
|
/* #undef HAVE_SECURE_ZERO_MEMORY */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `cmocka_set_test_filter' function. */
|
||||||
|
/* #undef HAVE_CMOCKA_SET_TEST_FILTER */
|
||||||
|
|
||||||
|
/*************************** LIBRARIES ***************************/
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `crypto' library (-lcrypto). */
|
||||||
|
#define HAVE_LIBCRYPTO 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `gcrypt' library (-lgcrypt). */
|
||||||
|
/* #undef HAVE_LIBGCRYPT */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the 'mbedTLS' library (-lmbedtls). */
|
||||||
|
/* #undef HAVE_LIBMBEDCRYPTO */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `pthread' library (-lpthread). */
|
||||||
|
#define HAVE_PTHREAD 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `cmocka' library (-lcmocka). */
|
||||||
|
/* #undef HAVE_CMOCKA */
|
||||||
|
|
||||||
|
/**************************** OPTIONS ****************************/
|
||||||
|
|
||||||
|
#define HAVE_GCC_THREAD_LOCAL_STORAGE 1
|
||||||
|
/* #undef HAVE_MSC_THREAD_LOCAL_STORAGE */
|
||||||
|
|
||||||
|
#define HAVE_FALLTHROUGH_ATTRIBUTE 1
|
||||||
|
#define HAVE_UNUSED_ATTRIBUTE 1
|
||||||
|
|
||||||
|
#define HAVE_CONSTRUCTOR_ATTRIBUTE 1
|
||||||
|
#define HAVE_DESTRUCTOR_ATTRIBUTE 1
|
||||||
|
|
||||||
|
#define HAVE_GCC_VOLATILE_MEMORY_PROTECTION 1
|
||||||
|
|
||||||
|
#define HAVE_COMPILER__FUNC__ 1
|
||||||
|
#define HAVE_COMPILER__FUNCTION__ 1
|
||||||
|
|
||||||
|
/* #undef HAVE_GCC_BOUNDED_ATTRIBUTE */
|
||||||
|
|
||||||
|
/* Define to 1 if you want to enable GSSAPI */
|
||||||
|
/* #undef WITH_GSSAPI */
|
||||||
|
|
||||||
|
/* Define to 1 if you want to enable ZLIB */
|
||||||
|
/* #undef WITH_ZLIB */
|
||||||
|
|
||||||
|
/* Define to 1 if you want to enable SFTP */
|
||||||
|
/* #undef WITH_SFTP */
|
||||||
|
|
||||||
|
/* Define to 1 if you want to enable server support */
|
||||||
|
#define WITH_SERVER 1
|
||||||
|
|
||||||
|
/* Define to 1 if you want to enable DH group exchange algorithms */
|
||||||
|
/* #undef WITH_GEX */
|
||||||
|
|
||||||
|
/* Define to 1 if you want to enable blowfish cipher support */
|
||||||
|
/* #undef WITH_BLOWFISH_CIPHER */
|
||||||
|
|
||||||
|
/* Define to 1 if you want to enable debug output for crypto functions */
|
||||||
|
/* #undef DEBUG_CRYPTO */
|
||||||
|
|
||||||
|
/* Define to 1 if you want to enable debug output for packet functions */
|
||||||
|
/* #undef DEBUG_PACKET */
|
||||||
|
|
||||||
|
/* Define to 1 if you want to enable pcap output support (experimental) */
|
||||||
|
/* #undef WITH_PCAP */
|
||||||
|
|
||||||
|
/* Define to 1 if you want to enable calltrace debug output */
|
||||||
|
/* #undef DEBUG_CALLTRACE */
|
||||||
|
|
||||||
|
/* Define to 1 if you want to enable NaCl support */
|
||||||
|
/* #undef WITH_NACL */
|
||||||
|
|
||||||
|
/*************************** ENDIAN *****************************/
|
||||||
|
|
||||||
|
/* Define WORDS_BIGENDIAN to 1 if your processor stores words with the most
|
||||||
|
significant byte first (like Motorola and SPARC, unlike Intel). */
|
||||||
|
/* #undef WORDS_BIGENDIAN */
|
287
contrib/libssh-cmake/freebsd/config.h
Normal file
287
contrib/libssh-cmake/freebsd/config.h
Normal file
@ -0,0 +1,287 @@
|
|||||||
|
/* Name of package */
|
||||||
|
#define PACKAGE "libssh"
|
||||||
|
|
||||||
|
/* Version number of package */
|
||||||
|
#define VERSION "0.9.7"
|
||||||
|
|
||||||
|
#define SYSCONFDIR "etc"
|
||||||
|
#define BINARYDIR "/home/ubuntu/workdir/ClickHouse/build/freebsd"
|
||||||
|
#define SOURCEDIR "/home/ubuntu/workdir/ClickHouse"
|
||||||
|
|
||||||
|
/* Global bind configuration file path */
|
||||||
|
#define GLOBAL_BIND_CONFIG "/etc/ssh/libssh_server_config"
|
||||||
|
|
||||||
|
/* Global client configuration file path */
|
||||||
|
#define GLOBAL_CLIENT_CONFIG "/etc/ssh/ssh_config"
|
||||||
|
|
||||||
|
/************************** HEADER FILES *************************/
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <argp.h> header file. */
|
||||||
|
/* #undef HAVE_ARGP_H */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <aprpa/inet.h> header file. */
|
||||||
|
#define HAVE_ARPA_INET_H 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <glob.h> header file. */
|
||||||
|
#define HAVE_GLOB_H 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <valgrind/valgrind.h> header file. */
|
||||||
|
/* #undef HAVE_VALGRIND_VALGRIND_H */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <pty.h> header file. */
|
||||||
|
/* #undef HAVE_PTY_H */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <utmp.h> header file. */
|
||||||
|
/* #undef HAVE_UTMP_H */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <util.h> header file. */
|
||||||
|
/* #undef HAVE_UTIL_H */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <libutil.h> header file. */
|
||||||
|
/* #undef HAVE_LIBUTIL_H */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <sys/time.h> header file. */
|
||||||
|
#define HAVE_SYS_TIME_H 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <sys/utime.h> header file. */
|
||||||
|
/* #undef HAVE_SYS_UTIME_H */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <io.h> header file. */
|
||||||
|
/* #undef HAVE_IO_H */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <termios.h> header file. */
|
||||||
|
#define HAVE_TERMIOS_H 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <unistd.h> header file. */
|
||||||
|
#define HAVE_UNISTD_H 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <stdint.h> header file. */
|
||||||
|
#define HAVE_STDINT_H 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <openssl/aes.h> header file. */
|
||||||
|
#define HAVE_OPENSSL_AES_H 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <wspiapi.h> header file. */
|
||||||
|
/* #undef HAVE_WSPIAPI_H */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <openssl/blowfish.h> header file. */
|
||||||
|
/* #undef HAVE_OPENSSL_BLOWFISH_H */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <openssl/des.h> header file. */
|
||||||
|
#define HAVE_OPENSSL_DES_H 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <openssl/ecdh.h> header file. */
|
||||||
|
#define HAVE_OPENSSL_ECDH_H 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <openssl/ec.h> header file. */
|
||||||
|
#define HAVE_OPENSSL_EC_H 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <openssl/ecdsa.h> header file. */
|
||||||
|
#define HAVE_OPENSSL_ECDSA_H 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <pthread.h> header file. */
|
||||||
|
#define HAVE_PTHREAD_H 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have eliptic curve cryptography in openssl */
|
||||||
|
#define HAVE_OPENSSL_ECC 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have eliptic curve cryptography in gcrypt */
|
||||||
|
/* #undef HAVE_GCRYPT_ECC */
|
||||||
|
|
||||||
|
/* Define to 1 if you have eliptic curve cryptography */
|
||||||
|
#define HAVE_ECC 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have DSA */
|
||||||
|
/* #undef HAVE_DSA */
|
||||||
|
|
||||||
|
/* Define to 1 if you have gl_flags as a glob_t sturct member */
|
||||||
|
#define HAVE_GLOB_GL_FLAGS_MEMBER 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have OpenSSL with Ed25519 support */
|
||||||
|
#define HAVE_OPENSSL_ED25519 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have OpenSSL with X25519 support */
|
||||||
|
#define HAVE_OPENSSL_X25519 1
|
||||||
|
|
||||||
|
/*************************** FUNCTIONS ***************************/
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `EVP_aes128_ctr' function. */
|
||||||
|
#define HAVE_OPENSSL_EVP_AES_CTR 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `EVP_aes128_cbc' function. */
|
||||||
|
#define HAVE_OPENSSL_EVP_AES_CBC 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `EVP_aes128_gcm' function. */
|
||||||
|
/* #undef HAVE_OPENSSL_EVP_AES_GCM */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `CRYPTO_THREADID_set_callback' function. */
|
||||||
|
#define HAVE_OPENSSL_CRYPTO_THREADID_SET_CALLBACK 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `CRYPTO_ctr128_encrypt' function. */
|
||||||
|
#define HAVE_OPENSSL_CRYPTO_CTR128_ENCRYPT 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `EVP_CIPHER_CTX_new' function. */
|
||||||
|
#define HAVE_OPENSSL_EVP_CIPHER_CTX_NEW 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `EVP_KDF_CTX_new_id' function. */
|
||||||
|
/* #undef HAVE_OPENSSL_EVP_KDF_CTX_NEW_ID */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `FIPS_mode' function. */
|
||||||
|
#if USE_BORINGSSL
|
||||||
|
#define HAVE_OPENSSL_FIPS_MODE 1
|
||||||
|
#endif
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `EVP_DigestSign' function. */
|
||||||
|
#define HAVE_OPENSSL_EVP_DIGESTSIGN 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `EVP_DigestVerify' function. */
|
||||||
|
#define HAVE_OPENSSL_EVP_DIGESTVERIFY 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `OPENSSL_ia32cap_loc' function. */
|
||||||
|
/* #undef HAVE_OPENSSL_IA32CAP_LOC */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `snprintf' function. */
|
||||||
|
#define HAVE_SNPRINTF 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `_snprintf' function. */
|
||||||
|
/* #undef HAVE__SNPRINTF */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `_snprintf_s' function. */
|
||||||
|
/* #undef HAVE__SNPRINTF_S */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `vsnprintf' function. */
|
||||||
|
#define HAVE_VSNPRINTF 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `_vsnprintf' function. */
|
||||||
|
/* #undef HAVE__VSNPRINTF */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `_vsnprintf_s' function. */
|
||||||
|
/* #undef HAVE__VSNPRINTF_S */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `isblank' function. */
|
||||||
|
#define HAVE_ISBLANK 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `strncpy' function. */
|
||||||
|
#define HAVE_STRNCPY 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `strndup' function. */
|
||||||
|
#define HAVE_STRNDUP 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `cfmakeraw' function. */
|
||||||
|
/* #undef HAVE_CFMAKERAW */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `getaddrinfo' function. */
|
||||||
|
#define HAVE_GETADDRINFO 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `poll' function. */
|
||||||
|
#define HAVE_POLL 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `select' function. */
|
||||||
|
#define HAVE_SELECT 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `clock_gettime' function. */
|
||||||
|
/* #undef HAVE_CLOCK_GETTIME */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `ntohll' function. */
|
||||||
|
/* #undef HAVE_NTOHLL */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `htonll' function. */
|
||||||
|
/* #undef HAVE_HTONLL */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `strtoull' function. */
|
||||||
|
#define HAVE_STRTOULL 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `__strtoull' function. */
|
||||||
|
/* #undef HAVE___STRTOULL */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `_strtoui64' function. */
|
||||||
|
/* #undef HAVE__STRTOUI64 */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `glob' function. */
|
||||||
|
#define HAVE_GLOB 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `explicit_bzero' function. */
|
||||||
|
#define HAVE_EXPLICIT_BZERO 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `memset_s' function. */
|
||||||
|
/* #undef HAVE_MEMSET_S */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `SecureZeroMemory' function. */
|
||||||
|
/* #undef HAVE_SECURE_ZERO_MEMORY */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `cmocka_set_test_filter' function. */
|
||||||
|
/* #undef HAVE_CMOCKA_SET_TEST_FILTER */
|
||||||
|
|
||||||
|
/*************************** LIBRARIES ***************************/
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `crypto' library (-lcrypto). */
|
||||||
|
#define HAVE_LIBCRYPTO 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `gcrypt' library (-lgcrypt). */
|
||||||
|
/* #undef HAVE_LIBGCRYPT */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the 'mbedTLS' library (-lmbedtls). */
|
||||||
|
/* #undef HAVE_LIBMBEDCRYPTO */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `pthread' library (-lpthread). */
|
||||||
|
#define HAVE_PTHREAD 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `cmocka' library (-lcmocka). */
|
||||||
|
/* #undef HAVE_CMOCKA */
|
||||||
|
|
||||||
|
/**************************** OPTIONS ****************************/
|
||||||
|
|
||||||
|
#define HAVE_GCC_THREAD_LOCAL_STORAGE 1
|
||||||
|
/* #undef HAVE_MSC_THREAD_LOCAL_STORAGE */
|
||||||
|
|
||||||
|
#define HAVE_FALLTHROUGH_ATTRIBUTE 1
|
||||||
|
#define HAVE_UNUSED_ATTRIBUTE 1
|
||||||
|
|
||||||
|
#define HAVE_CONSTRUCTOR_ATTRIBUTE 1
|
||||||
|
#define HAVE_DESTRUCTOR_ATTRIBUTE 1
|
||||||
|
|
||||||
|
#define HAVE_GCC_VOLATILE_MEMORY_PROTECTION 1
|
||||||
|
|
||||||
|
#define HAVE_COMPILER__FUNC__ 1
|
||||||
|
#define HAVE_COMPILER__FUNCTION__ 1
|
||||||
|
|
||||||
|
/* #undef HAVE_GCC_BOUNDED_ATTRIBUTE */
|
||||||
|
|
||||||
|
/* Define to 1 if you want to enable GSSAPI */
|
||||||
|
/* #undef WITH_GSSAPI */
|
||||||
|
|
||||||
|
/* Define to 1 if you want to enable ZLIB */
|
||||||
|
/* #undef WITH_ZLIB */
|
||||||
|
|
||||||
|
/* Define to 1 if you want to enable SFTP */
|
||||||
|
/* #undef WITH_SFTP */
|
||||||
|
|
||||||
|
/* Define to 1 if you want to enable server support */
|
||||||
|
#define WITH_SERVER 1
|
||||||
|
|
||||||
|
/* Define to 1 if you want to enable DH group exchange algorithms */
|
||||||
|
/* #undef WITH_GEX */
|
||||||
|
|
||||||
|
/* Define to 1 if you want to enable blowfish cipher support */
|
||||||
|
/* #undef WITH_BLOWFISH_CIPHER */
|
||||||
|
|
||||||
|
/* Define to 1 if you want to enable debug output for crypto functions */
|
||||||
|
/* #undef DEBUG_CRYPTO */
|
||||||
|
|
||||||
|
/* Define to 1 if you want to enable debug output for packet functions */
|
||||||
|
/* #undef DEBUG_PACKET */
|
||||||
|
|
||||||
|
/* Define to 1 if you want to enable pcap output support (experimental) */
|
||||||
|
/* #undef WITH_PCAP */
|
||||||
|
|
||||||
|
/* Define to 1 if you want to enable calltrace debug output */
|
||||||
|
/* #undef DEBUG_CALLTRACE */
|
||||||
|
|
||||||
|
/* Define to 1 if you want to enable NaCl support */
|
||||||
|
/* #undef WITH_NACL */
|
||||||
|
|
||||||
|
/*************************** ENDIAN *****************************/
|
||||||
|
|
||||||
|
/* Define WORDS_BIGENDIAN to 1 if your processor stores words with the most
|
||||||
|
significant byte first (like Motorola and SPARC, unlike Intel). */
|
||||||
|
/* #undef WORDS_BIGENDIAN */
|
287
contrib/libssh-cmake/linux/aarch64-musl/config.h
Normal file
287
contrib/libssh-cmake/linux/aarch64-musl/config.h
Normal file
@ -0,0 +1,287 @@
|
|||||||
|
/* Name of package */
|
||||||
|
#define PACKAGE "libssh"
|
||||||
|
|
||||||
|
/* Version number of package */
|
||||||
|
#define VERSION "0.9.7"
|
||||||
|
|
||||||
|
#define SYSCONFDIR "etc"
|
||||||
|
#define BINARYDIR "/home/ubuntu/workdir/ClickHouse/build/aarch64-musl"
|
||||||
|
#define SOURCEDIR "/home/ubuntu/workdir/ClickHouse"
|
||||||
|
|
||||||
|
/* Global bind configuration file path */
|
||||||
|
#define GLOBAL_BIND_CONFIG "/etc/ssh/libssh_server_config"
|
||||||
|
|
||||||
|
/* Global client configuration file path */
|
||||||
|
#define GLOBAL_CLIENT_CONFIG "/etc/ssh/ssh_config"
|
||||||
|
|
||||||
|
/************************** HEADER FILES *************************/
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <argp.h> header file. */
|
||||||
|
/* #undef HAVE_ARGP_H */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <aprpa/inet.h> header file. */
|
||||||
|
#define HAVE_ARPA_INET_H 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <glob.h> header file. */
|
||||||
|
#define HAVE_GLOB_H 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <valgrind/valgrind.h> header file. */
|
||||||
|
/* #undef HAVE_VALGRIND_VALGRIND_H */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <pty.h> header file. */
|
||||||
|
/* #undef HAVE_PTY_H */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <utmp.h> header file. */
|
||||||
|
/* #undef HAVE_UTMP_H */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <util.h> header file. */
|
||||||
|
/* #undef HAVE_UTIL_H */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <libutil.h> header file. */
|
||||||
|
/* #undef HAVE_LIBUTIL_H */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <sys/time.h> header file. */
|
||||||
|
#define HAVE_SYS_TIME_H 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <sys/utime.h> header file. */
|
||||||
|
/* #undef HAVE_SYS_UTIME_H */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <io.h> header file. */
|
||||||
|
/* #undef HAVE_IO_H */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <termios.h> header file. */
|
||||||
|
#define HAVE_TERMIOS_H 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <unistd.h> header file. */
|
||||||
|
#define HAVE_UNISTD_H 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <stdint.h> header file. */
|
||||||
|
#define HAVE_STDINT_H 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <openssl/aes.h> header file. */
|
||||||
|
#define HAVE_OPENSSL_AES_H 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <wspiapi.h> header file. */
|
||||||
|
/* #undef HAVE_WSPIAPI_H */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <openssl/blowfish.h> header file. */
|
||||||
|
/* #undef HAVE_OPENSSL_BLOWFISH_H */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <openssl/des.h> header file. */
|
||||||
|
#define HAVE_OPENSSL_DES_H 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <openssl/ecdh.h> header file. */
|
||||||
|
#define HAVE_OPENSSL_ECDH_H 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <openssl/ec.h> header file. */
|
||||||
|
#define HAVE_OPENSSL_EC_H 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <openssl/ecdsa.h> header file. */
|
||||||
|
#define HAVE_OPENSSL_ECDSA_H 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <pthread.h> header file. */
|
||||||
|
#define HAVE_PTHREAD_H 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have eliptic curve cryptography in openssl */
|
||||||
|
#define HAVE_OPENSSL_ECC 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have eliptic curve cryptography in gcrypt */
|
||||||
|
/* #undef HAVE_GCRYPT_ECC */
|
||||||
|
|
||||||
|
/* Define to 1 if you have eliptic curve cryptography */
|
||||||
|
#define HAVE_ECC 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have DSA */
|
||||||
|
/* #undef HAVE_DSA */
|
||||||
|
|
||||||
|
/* Define to 1 if you have gl_flags as a glob_t sturct member */
|
||||||
|
/* #undef HAVE_GLOB_GL_FLAGS_MEMBER
|
||||||
|
|
||||||
|
/* Define to 1 if you have OpenSSL with Ed25519 support */
|
||||||
|
#define HAVE_OPENSSL_ED25519 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have OpenSSL with X25519 support */
|
||||||
|
#define HAVE_OPENSSL_X25519 1
|
||||||
|
|
||||||
|
/*************************** FUNCTIONS ***************************/
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `EVP_aes128_ctr' function. */
|
||||||
|
#define HAVE_OPENSSL_EVP_AES_CTR 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `EVP_aes128_cbc' function. */
|
||||||
|
#define HAVE_OPENSSL_EVP_AES_CBC 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `EVP_aes128_gcm' function. */
|
||||||
|
/* #undef HAVE_OPENSSL_EVP_AES_GCM */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `CRYPTO_THREADID_set_callback' function. */
|
||||||
|
#define HAVE_OPENSSL_CRYPTO_THREADID_SET_CALLBACK 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `CRYPTO_ctr128_encrypt' function. */
|
||||||
|
#define HAVE_OPENSSL_CRYPTO_CTR128_ENCRYPT 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `EVP_CIPHER_CTX_new' function. */
|
||||||
|
#define HAVE_OPENSSL_EVP_CIPHER_CTX_NEW 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `EVP_KDF_CTX_new_id' function. */
|
||||||
|
/* #undef HAVE_OPENSSL_EVP_KDF_CTX_NEW_ID */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `FIPS_mode' function. */
|
||||||
|
#if USE_BORINGSSL
|
||||||
|
#define HAVE_OPENSSL_FIPS_MODE 1
|
||||||
|
#endif
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `EVP_DigestSign' function. */
|
||||||
|
#define HAVE_OPENSSL_EVP_DIGESTSIGN 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `EVP_DigestVerify' function. */
|
||||||
|
#define HAVE_OPENSSL_EVP_DIGESTVERIFY 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `OPENSSL_ia32cap_loc' function. */
|
||||||
|
/* #undef HAVE_OPENSSL_IA32CAP_LOC */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `snprintf' function. */
|
||||||
|
#define HAVE_SNPRINTF 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `_snprintf' function. */
|
||||||
|
/* #undef HAVE__SNPRINTF */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `_snprintf_s' function. */
|
||||||
|
/* #undef HAVE__SNPRINTF_S */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `vsnprintf' function. */
|
||||||
|
#define HAVE_VSNPRINTF 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `_vsnprintf' function. */
|
||||||
|
/* #undef HAVE__VSNPRINTF */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `_vsnprintf_s' function. */
|
||||||
|
/* #undef HAVE__VSNPRINTF_S */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `isblank' function. */
|
||||||
|
#define HAVE_ISBLANK 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `strncpy' function. */
|
||||||
|
#define HAVE_STRNCPY 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `strndup' function. */
|
||||||
|
#define HAVE_STRNDUP 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `cfmakeraw' function. */
|
||||||
|
/* #undef HAVE_CFMAKERAW */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `getaddrinfo' function. */
|
||||||
|
#define HAVE_GETADDRINFO 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `poll' function. */
|
||||||
|
#define HAVE_POLL 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `select' function. */
|
||||||
|
#define HAVE_SELECT 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `clock_gettime' function. */
|
||||||
|
/* #undef HAVE_CLOCK_GETTIME */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `ntohll' function. */
|
||||||
|
/* #undef HAVE_NTOHLL */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `htonll' function. */
|
||||||
|
/* #undef HAVE_HTONLL */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `strtoull' function. */
|
||||||
|
#define HAVE_STRTOULL 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `__strtoull' function. */
|
||||||
|
/* #undef HAVE___STRTOULL */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `_strtoui64' function. */
|
||||||
|
/* #undef HAVE__STRTOUI64 */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `glob' function. */
|
||||||
|
#define HAVE_GLOB 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `explicit_bzero' function. */
|
||||||
|
#define HAVE_EXPLICIT_BZERO 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `memset_s' function. */
|
||||||
|
/* #undef HAVE_MEMSET_S */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `SecureZeroMemory' function. */
|
||||||
|
/* #undef HAVE_SECURE_ZERO_MEMORY */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `cmocka_set_test_filter' function. */
|
||||||
|
/* #undef HAVE_CMOCKA_SET_TEST_FILTER */
|
||||||
|
|
||||||
|
/*************************** LIBRARIES ***************************/
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `crypto' library (-lcrypto). */
|
||||||
|
#define HAVE_LIBCRYPTO 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `gcrypt' library (-lgcrypt). */
|
||||||
|
/* #undef HAVE_LIBGCRYPT */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the 'mbedTLS' library (-lmbedtls). */
|
||||||
|
/* #undef HAVE_LIBMBEDCRYPTO */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `pthread' library (-lpthread). */
|
||||||
|
#define HAVE_PTHREAD 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `cmocka' library (-lcmocka). */
|
||||||
|
/* #undef HAVE_CMOCKA */
|
||||||
|
|
||||||
|
/**************************** OPTIONS ****************************/
|
||||||
|
|
||||||
|
#define HAVE_GCC_THREAD_LOCAL_STORAGE 1
|
||||||
|
/* #undef HAVE_MSC_THREAD_LOCAL_STORAGE */
|
||||||
|
|
||||||
|
#define HAVE_FALLTHROUGH_ATTRIBUTE 1
|
||||||
|
#define HAVE_UNUSED_ATTRIBUTE 1
|
||||||
|
|
||||||
|
#define HAVE_CONSTRUCTOR_ATTRIBUTE 1
|
||||||
|
#define HAVE_DESTRUCTOR_ATTRIBUTE 1
|
||||||
|
|
||||||
|
#define HAVE_GCC_VOLATILE_MEMORY_PROTECTION 1
|
||||||
|
|
||||||
|
#define HAVE_COMPILER__FUNC__ 1
|
||||||
|
#define HAVE_COMPILER__FUNCTION__ 1
|
||||||
|
|
||||||
|
/* #undef HAVE_GCC_BOUNDED_ATTRIBUTE */
|
||||||
|
|
||||||
|
/* Define to 1 if you want to enable GSSAPI */
|
||||||
|
/* #undef WITH_GSSAPI */
|
||||||
|
|
||||||
|
/* Define to 1 if you want to enable ZLIB */
|
||||||
|
/* #undef WITH_ZLIB */
|
||||||
|
|
||||||
|
/* Define to 1 if you want to enable SFTP */
|
||||||
|
/* #undef WITH_SFTP */
|
||||||
|
|
||||||
|
/* Define to 1 if you want to enable server support */
|
||||||
|
#define WITH_SERVER 1
|
||||||
|
|
||||||
|
/* Define to 1 if you want to enable DH group exchange algorithms */
|
||||||
|
/* #undef WITH_GEX */
|
||||||
|
|
||||||
|
/* Define to 1 if you want to enable blowfish cipher support */
|
||||||
|
/* #undef WITH_BLOWFISH_CIPHER */
|
||||||
|
|
||||||
|
/* Define to 1 if you want to enable debug output for crypto functions */
|
||||||
|
/* #undef DEBUG_CRYPTO */
|
||||||
|
|
||||||
|
/* Define to 1 if you want to enable debug output for packet functions */
|
||||||
|
/* #undef DEBUG_PACKET */
|
||||||
|
|
||||||
|
/* Define to 1 if you want to enable pcap output support (experimental) */
|
||||||
|
/* #undef WITH_PCAP */
|
||||||
|
|
||||||
|
/* Define to 1 if you want to enable calltrace debug output */
|
||||||
|
/* #undef DEBUG_CALLTRACE */
|
||||||
|
|
||||||
|
/* Define to 1 if you want to enable NaCl support */
|
||||||
|
/* #undef WITH_NACL */
|
||||||
|
|
||||||
|
/*************************** ENDIAN *****************************/
|
||||||
|
|
||||||
|
/* Define WORDS_BIGENDIAN to 1 if your processor stores words with the most
|
||||||
|
significant byte first (like Motorola and SPARC, unlike Intel). */
|
||||||
|
/* #undef WORDS_BIGENDIAN */
|
287
contrib/libssh-cmake/linux/aarch64/config.h
Normal file
287
contrib/libssh-cmake/linux/aarch64/config.h
Normal file
@ -0,0 +1,287 @@
|
|||||||
|
/* Name of package */
|
||||||
|
#define PACKAGE "libssh"
|
||||||
|
|
||||||
|
/* Version number of package */
|
||||||
|
#define VERSION "0.9.7"
|
||||||
|
|
||||||
|
#define SYSCONFDIR "etc"
|
||||||
|
#define BINARYDIR "/home/ubuntu/workdir/ClickHouse/build/aarch64"
|
||||||
|
#define SOURCEDIR "/home/ubuntu/workdir/ClickHouse"
|
||||||
|
|
||||||
|
/* Global bind configuration file path */
|
||||||
|
#define GLOBAL_BIND_CONFIG "/etc/ssh/libssh_server_config"
|
||||||
|
|
||||||
|
/* Global client configuration file path */
|
||||||
|
#define GLOBAL_CLIENT_CONFIG "/etc/ssh/ssh_config"
|
||||||
|
|
||||||
|
/************************** HEADER FILES *************************/
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <argp.h> header file. */
|
||||||
|
/* #undef HAVE_ARGP_H */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <aprpa/inet.h> header file. */
|
||||||
|
#define HAVE_ARPA_INET_H 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <glob.h> header file. */
|
||||||
|
#define HAVE_GLOB_H 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <valgrind/valgrind.h> header file. */
|
||||||
|
/* #undef HAVE_VALGRIND_VALGRIND_H */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <pty.h> header file. */
|
||||||
|
/* #undef HAVE_PTY_H */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <utmp.h> header file. */
|
||||||
|
/* #undef HAVE_UTMP_H */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <util.h> header file. */
|
||||||
|
/* #undef HAVE_UTIL_H */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <libutil.h> header file. */
|
||||||
|
/* #undef HAVE_LIBUTIL_H */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <sys/time.h> header file. */
|
||||||
|
#define HAVE_SYS_TIME_H 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <sys/utime.h> header file. */
|
||||||
|
/* #undef HAVE_SYS_UTIME_H */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <io.h> header file. */
|
||||||
|
/* #undef HAVE_IO_H */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <termios.h> header file. */
|
||||||
|
#define HAVE_TERMIOS_H 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <unistd.h> header file. */
|
||||||
|
#define HAVE_UNISTD_H 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <stdint.h> header file. */
|
||||||
|
#define HAVE_STDINT_H 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <openssl/aes.h> header file. */
|
||||||
|
#define HAVE_OPENSSL_AES_H 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <wspiapi.h> header file. */
|
||||||
|
/* #undef HAVE_WSPIAPI_H */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <openssl/blowfish.h> header file. */
|
||||||
|
/* #undef HAVE_OPENSSL_BLOWFISH_H */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <openssl/des.h> header file. */
|
||||||
|
#define HAVE_OPENSSL_DES_H 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <openssl/ecdh.h> header file. */
|
||||||
|
#define HAVE_OPENSSL_ECDH_H 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <openssl/ec.h> header file. */
|
||||||
|
#define HAVE_OPENSSL_EC_H 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <openssl/ecdsa.h> header file. */
|
||||||
|
#define HAVE_OPENSSL_ECDSA_H 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <pthread.h> header file. */
|
||||||
|
#define HAVE_PTHREAD_H 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have eliptic curve cryptography in openssl */
|
||||||
|
#define HAVE_OPENSSL_ECC 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have eliptic curve cryptography in gcrypt */
|
||||||
|
/* #undef HAVE_GCRYPT_ECC */
|
||||||
|
|
||||||
|
/* Define to 1 if you have eliptic curve cryptography */
|
||||||
|
#define HAVE_ECC 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have DSA */
|
||||||
|
/* #undef HAVE_DSA */
|
||||||
|
|
||||||
|
/* Define to 1 if you have gl_flags as a glob_t sturct member */
|
||||||
|
#define HAVE_GLOB_GL_FLAGS_MEMBER 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have OpenSSL with Ed25519 support */
|
||||||
|
#define HAVE_OPENSSL_ED25519 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have OpenSSL with X25519 support */
|
||||||
|
#define HAVE_OPENSSL_X25519 1
|
||||||
|
|
||||||
|
/*************************** FUNCTIONS ***************************/
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `EVP_aes128_ctr' function. */
|
||||||
|
#define HAVE_OPENSSL_EVP_AES_CTR 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `EVP_aes128_cbc' function. */
|
||||||
|
#define HAVE_OPENSSL_EVP_AES_CBC 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `EVP_aes128_gcm' function. */
|
||||||
|
/* #undef HAVE_OPENSSL_EVP_AES_GCM */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `CRYPTO_THREADID_set_callback' function. */
|
||||||
|
#define HAVE_OPENSSL_CRYPTO_THREADID_SET_CALLBACK 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `CRYPTO_ctr128_encrypt' function. */
|
||||||
|
#define HAVE_OPENSSL_CRYPTO_CTR128_ENCRYPT 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `EVP_CIPHER_CTX_new' function. */
|
||||||
|
#define HAVE_OPENSSL_EVP_CIPHER_CTX_NEW 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `EVP_KDF_CTX_new_id' function. */
|
||||||
|
/* #undef HAVE_OPENSSL_EVP_KDF_CTX_NEW_ID */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `FIPS_mode' function. */
|
||||||
|
#if USE_BORINGSSL
|
||||||
|
#define HAVE_OPENSSL_FIPS_MODE 1
|
||||||
|
#endif
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `EVP_DigestSign' function. */
|
||||||
|
#define HAVE_OPENSSL_EVP_DIGESTSIGN 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `EVP_DigestVerify' function. */
|
||||||
|
#define HAVE_OPENSSL_EVP_DIGESTVERIFY 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `OPENSSL_ia32cap_loc' function. */
|
||||||
|
/* #undef HAVE_OPENSSL_IA32CAP_LOC */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `snprintf' function. */
|
||||||
|
#define HAVE_SNPRINTF 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `_snprintf' function. */
|
||||||
|
/* #undef HAVE__SNPRINTF */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `_snprintf_s' function. */
|
||||||
|
/* #undef HAVE__SNPRINTF_S */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `vsnprintf' function. */
|
||||||
|
#define HAVE_VSNPRINTF 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `_vsnprintf' function. */
|
||||||
|
/* #undef HAVE__VSNPRINTF */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `_vsnprintf_s' function. */
|
||||||
|
/* #undef HAVE__VSNPRINTF_S */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `isblank' function. */
|
||||||
|
#define HAVE_ISBLANK 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `strncpy' function. */
|
||||||
|
#define HAVE_STRNCPY 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `strndup' function. */
|
||||||
|
#define HAVE_STRNDUP 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `cfmakeraw' function. */
|
||||||
|
/* #undef HAVE_CFMAKERAW */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `getaddrinfo' function. */
|
||||||
|
#define HAVE_GETADDRINFO 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `poll' function. */
|
||||||
|
#define HAVE_POLL 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `select' function. */
|
||||||
|
#define HAVE_SELECT 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `clock_gettime' function. */
|
||||||
|
/* #undef HAVE_CLOCK_GETTIME */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `ntohll' function. */
|
||||||
|
/* #undef HAVE_NTOHLL */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `htonll' function. */
|
||||||
|
/* #undef HAVE_HTONLL */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `strtoull' function. */
|
||||||
|
#define HAVE_STRTOULL 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `__strtoull' function. */
|
||||||
|
/* #undef HAVE___STRTOULL */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `_strtoui64' function. */
|
||||||
|
/* #undef HAVE__STRTOUI64 */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `glob' function. */
|
||||||
|
#define HAVE_GLOB 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `explicit_bzero' function. */
|
||||||
|
#define HAVE_EXPLICIT_BZERO 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `memset_s' function. */
|
||||||
|
#define HAVE_MEMSET_S 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `SecureZeroMemory' function. */
|
||||||
|
/* #undef HAVE_SECURE_ZERO_MEMORY */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `cmocka_set_test_filter' function. */
|
||||||
|
/* #undef HAVE_CMOCKA_SET_TEST_FILTER */
|
||||||
|
|
||||||
|
/*************************** LIBRARIES ***************************/
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `crypto' library (-lcrypto). */
|
||||||
|
#define HAVE_LIBCRYPTO 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `gcrypt' library (-lgcrypt). */
|
||||||
|
/* #undef HAVE_LIBGCRYPT */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the 'mbedTLS' library (-lmbedtls). */
|
||||||
|
/* #undef HAVE_LIBMBEDCRYPTO */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `pthread' library (-lpthread). */
|
||||||
|
#define HAVE_PTHREAD 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `cmocka' library (-lcmocka). */
|
||||||
|
/* #undef HAVE_CMOCKA */
|
||||||
|
|
||||||
|
/**************************** OPTIONS ****************************/
|
||||||
|
|
||||||
|
#define HAVE_GCC_THREAD_LOCAL_STORAGE 1
|
||||||
|
/* #undef HAVE_MSC_THREAD_LOCAL_STORAGE */
|
||||||
|
|
||||||
|
#define HAVE_FALLTHROUGH_ATTRIBUTE 1
|
||||||
|
#define HAVE_UNUSED_ATTRIBUTE 1
|
||||||
|
|
||||||
|
#define HAVE_CONSTRUCTOR_ATTRIBUTE 1
|
||||||
|
#define HAVE_DESTRUCTOR_ATTRIBUTE 1
|
||||||
|
|
||||||
|
#define HAVE_GCC_VOLATILE_MEMORY_PROTECTION 1
|
||||||
|
|
||||||
|
#define HAVE_COMPILER__FUNC__ 1
|
||||||
|
#define HAVE_COMPILER__FUNCTION__ 1
|
||||||
|
|
||||||
|
/* #undef HAVE_GCC_BOUNDED_ATTRIBUTE */
|
||||||
|
|
||||||
|
/* Define to 1 if you want to enable GSSAPI */
|
||||||
|
/* #undef WITH_GSSAPI */
|
||||||
|
|
||||||
|
/* Define to 1 if you want to enable ZLIB */
|
||||||
|
/* #undef WITH_ZLIB */
|
||||||
|
|
||||||
|
/* Define to 1 if you want to enable SFTP */
|
||||||
|
/* #undef WITH_SFTP */
|
||||||
|
|
||||||
|
/* Define to 1 if you want to enable server support */
|
||||||
|
#define WITH_SERVER 1
|
||||||
|
|
||||||
|
/* Define to 1 if you want to enable DH group exchange algorithms */
|
||||||
|
/* #undef WITH_GEX */
|
||||||
|
|
||||||
|
/* Define to 1 if you want to enable blowfish cipher support */
|
||||||
|
/* #undef WITH_BLOWFISH_CIPHER */
|
||||||
|
|
||||||
|
/* Define to 1 if you want to enable debug output for crypto functions */
|
||||||
|
/* #undef DEBUG_CRYPTO */
|
||||||
|
|
||||||
|
/* Define to 1 if you want to enable debug output for packet functions */
|
||||||
|
/* #undef DEBUG_PACKET */
|
||||||
|
|
||||||
|
/* Define to 1 if you want to enable pcap output support (experimental) */
|
||||||
|
/* #undef WITH_PCAP */
|
||||||
|
|
||||||
|
/* Define to 1 if you want to enable calltrace debug output */
|
||||||
|
/* #undef DEBUG_CALLTRACE */
|
||||||
|
|
||||||
|
/* Define to 1 if you want to enable NaCl support */
|
||||||
|
/* #undef WITH_NACL */
|
||||||
|
|
||||||
|
/*************************** ENDIAN *****************************/
|
||||||
|
|
||||||
|
/* Define WORDS_BIGENDIAN to 1 if your processor stores words with the most
|
||||||
|
significant byte first (like Motorola and SPARC, unlike Intel). */
|
||||||
|
/* #undef WORDS_BIGENDIAN */
|
287
contrib/libssh-cmake/linux/ppc64le/config.h
Normal file
287
contrib/libssh-cmake/linux/ppc64le/config.h
Normal file
@ -0,0 +1,287 @@
|
|||||||
|
/* Name of package */
|
||||||
|
#define PACKAGE "libssh"
|
||||||
|
|
||||||
|
/* Version number of package */
|
||||||
|
#define VERSION "0.9.7"
|
||||||
|
|
||||||
|
#define SYSCONFDIR "etc"
|
||||||
|
#define BINARYDIR "/home/ubuntu/workdir/ClickHouse/build/ppc64le"
|
||||||
|
#define SOURCEDIR "/home/ubuntu/workdir/ClickHouse"
|
||||||
|
|
||||||
|
/* Global bind configuration file path */
|
||||||
|
#define GLOBAL_BIND_CONFIG "/etc/ssh/libssh_server_config"
|
||||||
|
|
||||||
|
/* Global client configuration file path */
|
||||||
|
#define GLOBAL_CLIENT_CONFIG "/etc/ssh/ssh_config"
|
||||||
|
|
||||||
|
/************************** HEADER FILES *************************/
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <argp.h> header file. */
|
||||||
|
/* #undef HAVE_ARGP_H */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <aprpa/inet.h> header file. */
|
||||||
|
#define HAVE_ARPA_INET_H 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <glob.h> header file. */
|
||||||
|
#define HAVE_GLOB_H 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <valgrind/valgrind.h> header file. */
|
||||||
|
/* #undef HAVE_VALGRIND_VALGRIND_H */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <pty.h> header file. */
|
||||||
|
/* #undef HAVE_PTY_H */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <utmp.h> header file. */
|
||||||
|
/* #undef HAVE_UTMP_H */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <util.h> header file. */
|
||||||
|
/* #undef HAVE_UTIL_H */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <libutil.h> header file. */
|
||||||
|
/* #undef HAVE_LIBUTIL_H */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <sys/time.h> header file. */
|
||||||
|
#define HAVE_SYS_TIME_H 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <sys/utime.h> header file. */
|
||||||
|
/* #undef HAVE_SYS_UTIME_H */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <io.h> header file. */
|
||||||
|
/* #undef HAVE_IO_H */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <termios.h> header file. */
|
||||||
|
#define HAVE_TERMIOS_H 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <unistd.h> header file. */
|
||||||
|
#define HAVE_UNISTD_H 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <stdint.h> header file. */
|
||||||
|
#define HAVE_STDINT_H 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <openssl/aes.h> header file. */
|
||||||
|
#define HAVE_OPENSSL_AES_H 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <wspiapi.h> header file. */
|
||||||
|
/* #undef HAVE_WSPIAPI_H */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <openssl/blowfish.h> header file. */
|
||||||
|
/* #undef HAVE_OPENSSL_BLOWFISH_H */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <openssl/des.h> header file. */
|
||||||
|
#define HAVE_OPENSSL_DES_H 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <openssl/ecdh.h> header file. */
|
||||||
|
#define HAVE_OPENSSL_ECDH_H 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <openssl/ec.h> header file. */
|
||||||
|
#define HAVE_OPENSSL_EC_H 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <openssl/ecdsa.h> header file. */
|
||||||
|
#define HAVE_OPENSSL_ECDSA_H 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <pthread.h> header file. */
|
||||||
|
#define HAVE_PTHREAD_H 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have eliptic curve cryptography in openssl */
|
||||||
|
#define HAVE_OPENSSL_ECC 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have eliptic curve cryptography in gcrypt */
|
||||||
|
/* #undef HAVE_GCRYPT_ECC */
|
||||||
|
|
||||||
|
/* Define to 1 if you have eliptic curve cryptography */
|
||||||
|
#define HAVE_ECC 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have DSA */
|
||||||
|
/* #undef HAVE_DSA */
|
||||||
|
|
||||||
|
/* Define to 1 if you have gl_flags as a glob_t sturct member */
|
||||||
|
#define HAVE_GLOB_GL_FLAGS_MEMBER 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have OpenSSL with Ed25519 support */
|
||||||
|
#define HAVE_OPENSSL_ED25519 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have OpenSSL with X25519 support */
|
||||||
|
#define HAVE_OPENSSL_X25519 1
|
||||||
|
|
||||||
|
/*************************** FUNCTIONS ***************************/
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `EVP_aes128_ctr' function. */
|
||||||
|
#define HAVE_OPENSSL_EVP_AES_CTR 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `EVP_aes128_cbc' function. */
|
||||||
|
#define HAVE_OPENSSL_EVP_AES_CBC 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `EVP_aes128_gcm' function. */
|
||||||
|
/* #undef HAVE_OPENSSL_EVP_AES_GCM */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `CRYPTO_THREADID_set_callback' function. */
|
||||||
|
#define HAVE_OPENSSL_CRYPTO_THREADID_SET_CALLBACK 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `CRYPTO_ctr128_encrypt' function. */
|
||||||
|
#define HAVE_OPENSSL_CRYPTO_CTR128_ENCRYPT 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `EVP_CIPHER_CTX_new' function. */
|
||||||
|
#define HAVE_OPENSSL_EVP_CIPHER_CTX_NEW 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `EVP_KDF_CTX_new_id' function. */
|
||||||
|
/* #undef HAVE_OPENSSL_EVP_KDF_CTX_NEW_ID */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `FIPS_mode' function. */
|
||||||
|
#if USE_BORINGSSL
|
||||||
|
#define HAVE_OPENSSL_FIPS_MODE 1
|
||||||
|
#endif
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `EVP_DigestSign' function. */
|
||||||
|
#define HAVE_OPENSSL_EVP_DIGESTSIGN 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `EVP_DigestVerify' function. */
|
||||||
|
#define HAVE_OPENSSL_EVP_DIGESTVERIFY 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `OPENSSL_ia32cap_loc' function. */
|
||||||
|
/* #undef HAVE_OPENSSL_IA32CAP_LOC */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `snprintf' function. */
|
||||||
|
#define HAVE_SNPRINTF 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `_snprintf' function. */
|
||||||
|
/* #undef HAVE__SNPRINTF */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `_snprintf_s' function. */
|
||||||
|
/* #undef HAVE__SNPRINTF_S */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `vsnprintf' function. */
|
||||||
|
#define HAVE_VSNPRINTF 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `_vsnprintf' function. */
|
||||||
|
/* #undef HAVE__VSNPRINTF */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `_vsnprintf_s' function. */
|
||||||
|
/* #undef HAVE__VSNPRINTF_S */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `isblank' function. */
|
||||||
|
#define HAVE_ISBLANK 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `strncpy' function. */
|
||||||
|
#define HAVE_STRNCPY 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `strndup' function. */
|
||||||
|
#define HAVE_STRNDUP 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `cfmakeraw' function. */
|
||||||
|
/* #undef HAVE_CFMAKERAW */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `getaddrinfo' function. */
|
||||||
|
#define HAVE_GETADDRINFO 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `poll' function. */
|
||||||
|
#define HAVE_POLL 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `select' function. */
|
||||||
|
#define HAVE_SELECT 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `clock_gettime' function. */
|
||||||
|
/* #undef HAVE_CLOCK_GETTIME */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `ntohll' function. */
|
||||||
|
/* #undef HAVE_NTOHLL */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `htonll' function. */
|
||||||
|
/* #undef HAVE_HTONLL */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `strtoull' function. */
|
||||||
|
#define HAVE_STRTOULL 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `__strtoull' function. */
|
||||||
|
/* #undef HAVE___STRTOULL */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `_strtoui64' function. */
|
||||||
|
/* #undef HAVE__STRTOUI64 */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `glob' function. */
|
||||||
|
#define HAVE_GLOB 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `explicit_bzero' function. */
|
||||||
|
/* #undef HAVE_EXPLICIT_BZERO 1 */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `memset_s' function. */
|
||||||
|
/* #undef HAVE_MEMSET_S */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `SecureZeroMemory' function. */
|
||||||
|
/* #undef HAVE_SECURE_ZERO_MEMORY */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `cmocka_set_test_filter' function. */
|
||||||
|
/* #undef HAVE_CMOCKA_SET_TEST_FILTER */
|
||||||
|
|
||||||
|
/*************************** LIBRARIES ***************************/
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `crypto' library (-lcrypto). */
|
||||||
|
#define HAVE_LIBCRYPTO 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `gcrypt' library (-lgcrypt). */
|
||||||
|
/* #undef HAVE_LIBGCRYPT */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the 'mbedTLS' library (-lmbedtls). */
|
||||||
|
/* #undef HAVE_LIBMBEDCRYPTO */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `pthread' library (-lpthread). */
|
||||||
|
#define HAVE_PTHREAD 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `cmocka' library (-lcmocka). */
|
||||||
|
/* #undef HAVE_CMOCKA */
|
||||||
|
|
||||||
|
/**************************** OPTIONS ****************************/
|
||||||
|
|
||||||
|
#define HAVE_GCC_THREAD_LOCAL_STORAGE 1
|
||||||
|
/* #undef HAVE_MSC_THREAD_LOCAL_STORAGE */
|
||||||
|
|
||||||
|
#define HAVE_FALLTHROUGH_ATTRIBUTE 1
|
||||||
|
#define HAVE_UNUSED_ATTRIBUTE 1
|
||||||
|
|
||||||
|
#define HAVE_CONSTRUCTOR_ATTRIBUTE 1
|
||||||
|
#define HAVE_DESTRUCTOR_ATTRIBUTE 1
|
||||||
|
|
||||||
|
#define HAVE_GCC_VOLATILE_MEMORY_PROTECTION 1
|
||||||
|
|
||||||
|
#define HAVE_COMPILER__FUNC__ 1
|
||||||
|
#define HAVE_COMPILER__FUNCTION__ 1
|
||||||
|
|
||||||
|
/* #undef HAVE_GCC_BOUNDED_ATTRIBUTE */
|
||||||
|
|
||||||
|
/* Define to 1 if you want to enable GSSAPI */
|
||||||
|
/* #undef WITH_GSSAPI */
|
||||||
|
|
||||||
|
/* Define to 1 if you want to enable ZLIB */
|
||||||
|
/* #undef WITH_ZLIB */
|
||||||
|
|
||||||
|
/* Define to 1 if you want to enable SFTP */
|
||||||
|
/* #undef WITH_SFTP */
|
||||||
|
|
||||||
|
/* Define to 1 if you want to enable server support */
|
||||||
|
#define WITH_SERVER 1
|
||||||
|
|
||||||
|
/* Define to 1 if you want to enable DH group exchange algorithms */
|
||||||
|
/* #undef WITH_GEX */
|
||||||
|
|
||||||
|
/* Define to 1 if you want to enable blowfish cipher support */
|
||||||
|
/* #undef WITH_BLOWFISH_CIPHER */
|
||||||
|
|
||||||
|
/* Define to 1 if you want to enable debug output for crypto functions */
|
||||||
|
/* #undef DEBUG_CRYPTO */
|
||||||
|
|
||||||
|
/* Define to 1 if you want to enable debug output for packet functions */
|
||||||
|
/* #undef DEBUG_PACKET */
|
||||||
|
|
||||||
|
/* Define to 1 if you want to enable pcap output support (experimental) */
|
||||||
|
/* #undef WITH_PCAP */
|
||||||
|
|
||||||
|
/* Define to 1 if you want to enable calltrace debug output */
|
||||||
|
/* #undef DEBUG_CALLTRACE */
|
||||||
|
|
||||||
|
/* Define to 1 if you want to enable NaCl support */
|
||||||
|
/* #undef WITH_NACL */
|
||||||
|
|
||||||
|
/*************************** ENDIAN *****************************/
|
||||||
|
|
||||||
|
/* Define WORDS_BIGENDIAN to 1 if your processor stores words with the most
|
||||||
|
significant byte first (like Motorola and SPARC, unlike Intel). */
|
||||||
|
/* #undef WORDS_BIGENDIAN */
|
287
contrib/libssh-cmake/linux/riscv64/config.h
Normal file
287
contrib/libssh-cmake/linux/riscv64/config.h
Normal file
@ -0,0 +1,287 @@
|
|||||||
|
/* Name of package */
|
||||||
|
#define PACKAGE "libssh"
|
||||||
|
|
||||||
|
/* Version number of package */
|
||||||
|
#define VERSION "0.9.7"
|
||||||
|
|
||||||
|
#define SYSCONFDIR "etc"
|
||||||
|
#define BINARYDIR "/home/ubuntu/workdir/ClickHouse/build/riscv64"
|
||||||
|
#define SOURCEDIR "/home/ubuntu/workdir/ClickHouse"
|
||||||
|
|
||||||
|
/* Global bind configuration file path */
|
||||||
|
#define GLOBAL_BIND_CONFIG "/etc/ssh/libssh_server_config"
|
||||||
|
|
||||||
|
/* Global client configuration file path */
|
||||||
|
#define GLOBAL_CLIENT_CONFIG "/etc/ssh/ssh_config"
|
||||||
|
|
||||||
|
/************************** HEADER FILES *************************/
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <argp.h> header file. */
|
||||||
|
/* #undef HAVE_ARGP_H */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <aprpa/inet.h> header file. */
|
||||||
|
#define HAVE_ARPA_INET_H 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <glob.h> header file. */
|
||||||
|
#define HAVE_GLOB_H 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <valgrind/valgrind.h> header file. */
|
||||||
|
/* #undef HAVE_VALGRIND_VALGRIND_H */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <pty.h> header file. */
|
||||||
|
/* #undef HAVE_PTY_H */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <utmp.h> header file. */
|
||||||
|
/* #undef HAVE_UTMP_H */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <util.h> header file. */
|
||||||
|
/* #undef HAVE_UTIL_H */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <libutil.h> header file. */
|
||||||
|
/* #undef HAVE_LIBUTIL_H */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <sys/time.h> header file. */
|
||||||
|
#define HAVE_SYS_TIME_H 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <sys/utime.h> header file. */
|
||||||
|
/* #undef HAVE_SYS_UTIME_H */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <io.h> header file. */
|
||||||
|
/* #undef HAVE_IO_H */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <termios.h> header file. */
|
||||||
|
#define HAVE_TERMIOS_H 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <unistd.h> header file. */
|
||||||
|
#define HAVE_UNISTD_H 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <stdint.h> header file. */
|
||||||
|
#define HAVE_STDINT_H 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <openssl/aes.h> header file. */
|
||||||
|
#define HAVE_OPENSSL_AES_H 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <wspiapi.h> header file. */
|
||||||
|
/* #undef HAVE_WSPIAPI_H */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <openssl/blowfish.h> header file. */
|
||||||
|
/* #undef HAVE_OPENSSL_BLOWFISH_H */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <openssl/des.h> header file. */
|
||||||
|
#define HAVE_OPENSSL_DES_H 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <openssl/ecdh.h> header file. */
|
||||||
|
#define HAVE_OPENSSL_ECDH_H 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <openssl/ec.h> header file. */
|
||||||
|
#define HAVE_OPENSSL_EC_H 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <openssl/ecdsa.h> header file. */
|
||||||
|
#define HAVE_OPENSSL_ECDSA_H 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <pthread.h> header file. */
|
||||||
|
#define HAVE_PTHREAD_H 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have eliptic curve cryptography in openssl */
|
||||||
|
#define HAVE_OPENSSL_ECC 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have eliptic curve cryptography in gcrypt */
|
||||||
|
/* #undef HAVE_GCRYPT_ECC */
|
||||||
|
|
||||||
|
/* Define to 1 if you have eliptic curve cryptography */
|
||||||
|
#define HAVE_ECC 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have DSA */
|
||||||
|
/* #undef HAVE_DSA */
|
||||||
|
|
||||||
|
/* Define to 1 if you have gl_flags as a glob_t sturct member */
|
||||||
|
#define HAVE_GLOB_GL_FLAGS_MEMBER 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have OpenSSL with Ed25519 support */
|
||||||
|
#define HAVE_OPENSSL_ED25519 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have OpenSSL with X25519 support */
|
||||||
|
#define HAVE_OPENSSL_X25519 1
|
||||||
|
|
||||||
|
/*************************** FUNCTIONS ***************************/
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `EVP_aes128_ctr' function. */
|
||||||
|
#define HAVE_OPENSSL_EVP_AES_CTR 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `EVP_aes128_cbc' function. */
|
||||||
|
#define HAVE_OPENSSL_EVP_AES_CBC 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `EVP_aes128_gcm' function. */
|
||||||
|
/* #undef HAVE_OPENSSL_EVP_AES_GCM */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `CRYPTO_THREADID_set_callback' function. */
|
||||||
|
#define HAVE_OPENSSL_CRYPTO_THREADID_SET_CALLBACK 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `CRYPTO_ctr128_encrypt' function. */
|
||||||
|
#define HAVE_OPENSSL_CRYPTO_CTR128_ENCRYPT 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `EVP_CIPHER_CTX_new' function. */
|
||||||
|
#define HAVE_OPENSSL_EVP_CIPHER_CTX_NEW 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `EVP_KDF_CTX_new_id' function. */
|
||||||
|
/* #undef HAVE_OPENSSL_EVP_KDF_CTX_NEW_ID */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `FIPS_mode' function. */
|
||||||
|
#if USE_BORINGSSL
|
||||||
|
#define HAVE_OPENSSL_FIPS_MODE 1
|
||||||
|
#endif
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `EVP_DigestSign' function. */
|
||||||
|
#define HAVE_OPENSSL_EVP_DIGESTSIGN 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `EVP_DigestVerify' function. */
|
||||||
|
#define HAVE_OPENSSL_EVP_DIGESTVERIFY 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `OPENSSL_ia32cap_loc' function. */
|
||||||
|
/* #undef HAVE_OPENSSL_IA32CAP_LOC */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `snprintf' function. */
|
||||||
|
#define HAVE_SNPRINTF 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `_snprintf' function. */
|
||||||
|
/* #undef HAVE__SNPRINTF */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `_snprintf_s' function. */
|
||||||
|
/* #undef HAVE__SNPRINTF_S */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `vsnprintf' function. */
|
||||||
|
#define HAVE_VSNPRINTF 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `_vsnprintf' function. */
|
||||||
|
/* #undef HAVE__VSNPRINTF */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `_vsnprintf_s' function. */
|
||||||
|
/* #undef HAVE__VSNPRINTF_S */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `isblank' function. */
|
||||||
|
#define HAVE_ISBLANK 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `strncpy' function. */
|
||||||
|
#define HAVE_STRNCPY 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `strndup' function. */
|
||||||
|
#define HAVE_STRNDUP 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `cfmakeraw' function. */
|
||||||
|
/* #undef HAVE_CFMAKERAW */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `getaddrinfo' function. */
|
||||||
|
#define HAVE_GETADDRINFO 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `poll' function. */
|
||||||
|
#define HAVE_POLL 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `select' function. */
|
||||||
|
#define HAVE_SELECT 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `clock_gettime' function. */
|
||||||
|
/* #undef HAVE_CLOCK_GETTIME */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `ntohll' function. */
|
||||||
|
/* #undef HAVE_NTOHLL */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `htonll' function. */
|
||||||
|
/* #undef HAVE_HTONLL */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `strtoull' function. */
|
||||||
|
#define HAVE_STRTOULL 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `__strtoull' function. */
|
||||||
|
/* #undef HAVE___STRTOULL */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `_strtoui64' function. */
|
||||||
|
/* #undef HAVE__STRTOUI64 */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `glob' function. */
|
||||||
|
#define HAVE_GLOB 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `explicit_bzero' function. */
|
||||||
|
/* #undef HAVE_EXPLICIT_BZERO 1 */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `memset_s' function. */
|
||||||
|
/* #undef HAVE_MEMSET_S */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `SecureZeroMemory' function. */
|
||||||
|
/* #undef HAVE_SECURE_ZERO_MEMORY */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `cmocka_set_test_filter' function. */
|
||||||
|
/* #undef HAVE_CMOCKA_SET_TEST_FILTER */
|
||||||
|
|
||||||
|
/*************************** LIBRARIES ***************************/
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `crypto' library (-lcrypto). */
|
||||||
|
#define HAVE_LIBCRYPTO 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `gcrypt' library (-lgcrypt). */
|
||||||
|
/* #undef HAVE_LIBGCRYPT */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the 'mbedTLS' library (-lmbedtls). */
|
||||||
|
/* #undef HAVE_LIBMBEDCRYPTO */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `pthread' library (-lpthread). */
|
||||||
|
#define HAVE_PTHREAD 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `cmocka' library (-lcmocka). */
|
||||||
|
/* #undef HAVE_CMOCKA */
|
||||||
|
|
||||||
|
/**************************** OPTIONS ****************************/
|
||||||
|
|
||||||
|
#define HAVE_GCC_THREAD_LOCAL_STORAGE 1
|
||||||
|
/* #undef HAVE_MSC_THREAD_LOCAL_STORAGE */
|
||||||
|
|
||||||
|
#define HAVE_FALLTHROUGH_ATTRIBUTE 1
|
||||||
|
#define HAVE_UNUSED_ATTRIBUTE 1
|
||||||
|
|
||||||
|
#define HAVE_CONSTRUCTOR_ATTRIBUTE 1
|
||||||
|
#define HAVE_DESTRUCTOR_ATTRIBUTE 1
|
||||||
|
|
||||||
|
#define HAVE_GCC_VOLATILE_MEMORY_PROTECTION 1
|
||||||
|
|
||||||
|
#define HAVE_COMPILER__FUNC__ 1
|
||||||
|
#define HAVE_COMPILER__FUNCTION__ 1
|
||||||
|
|
||||||
|
/* #undef HAVE_GCC_BOUNDED_ATTRIBUTE */
|
||||||
|
|
||||||
|
/* Define to 1 if you want to enable GSSAPI */
|
||||||
|
/* #undef WITH_GSSAPI */
|
||||||
|
|
||||||
|
/* Define to 1 if you want to enable ZLIB */
|
||||||
|
/* #undef WITH_ZLIB */
|
||||||
|
|
||||||
|
/* Define to 1 if you want to enable SFTP */
|
||||||
|
/* #undef WITH_SFTP */
|
||||||
|
|
||||||
|
/* Define to 1 if you want to enable server support */
|
||||||
|
#define WITH_SERVER 1
|
||||||
|
|
||||||
|
/* Define to 1 if you want to enable DH group exchange algorithms */
|
||||||
|
/* #undef WITH_GEX */
|
||||||
|
|
||||||
|
/* Define to 1 if you want to enable blowfish cipher support */
|
||||||
|
/* #undef WITH_BLOWFISH_CIPHER */
|
||||||
|
|
||||||
|
/* Define to 1 if you want to enable debug output for crypto functions */
|
||||||
|
/* #undef DEBUG_CRYPTO */
|
||||||
|
|
||||||
|
/* Define to 1 if you want to enable debug output for packet functions */
|
||||||
|
/* #undef DEBUG_PACKET */
|
||||||
|
|
||||||
|
/* Define to 1 if you want to enable pcap output support (experimental) */
|
||||||
|
/* #undef WITH_PCAP */
|
||||||
|
|
||||||
|
/* Define to 1 if you want to enable calltrace debug output */
|
||||||
|
/* #undef DEBUG_CALLTRACE */
|
||||||
|
|
||||||
|
/* Define to 1 if you want to enable NaCl support */
|
||||||
|
/* #undef WITH_NACL */
|
||||||
|
|
||||||
|
/*************************** ENDIAN *****************************/
|
||||||
|
|
||||||
|
/* Define WORDS_BIGENDIAN to 1 if your processor stores words with the most
|
||||||
|
significant byte first (like Motorola and SPARC, unlike Intel). */
|
||||||
|
/* #undef WORDS_BIGENDIAN */
|
287
contrib/libssh-cmake/linux/s390x/config.h
Normal file
287
contrib/libssh-cmake/linux/s390x/config.h
Normal file
@ -0,0 +1,287 @@
|
|||||||
|
/* Name of package */
|
||||||
|
#define PACKAGE "libssh"
|
||||||
|
|
||||||
|
/* Version number of package */
|
||||||
|
#define VERSION "0.9.7"
|
||||||
|
|
||||||
|
#define SYSCONFDIR "etc"
|
||||||
|
#define BINARYDIR "/home/ubuntu/workdir/ClickHouse/build/s390x"
|
||||||
|
#define SOURCEDIR "/home/ubuntu/workdir/ClickHouse"
|
||||||
|
|
||||||
|
/* Global bind configuration file path */
|
||||||
|
#define GLOBAL_BIND_CONFIG "/etc/ssh/libssh_server_config"
|
||||||
|
|
||||||
|
/* Global client configuration file path */
|
||||||
|
#define GLOBAL_CLIENT_CONFIG "/etc/ssh/ssh_config"
|
||||||
|
|
||||||
|
/************************** HEADER FILES *************************/
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <argp.h> header file. */
|
||||||
|
/* #undef HAVE_ARGP_H */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <aprpa/inet.h> header file. */
|
||||||
|
#define HAVE_ARPA_INET_H 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <glob.h> header file. */
|
||||||
|
#define HAVE_GLOB_H 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <valgrind/valgrind.h> header file. */
|
||||||
|
/* #undef HAVE_VALGRIND_VALGRIND_H */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <pty.h> header file. */
|
||||||
|
/* #undef HAVE_PTY_H */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <utmp.h> header file. */
|
||||||
|
/* #undef HAVE_UTMP_H */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <util.h> header file. */
|
||||||
|
/* #undef HAVE_UTIL_H */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <libutil.h> header file. */
|
||||||
|
/* #undef HAVE_LIBUTIL_H */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <sys/time.h> header file. */
|
||||||
|
#define HAVE_SYS_TIME_H 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <sys/utime.h> header file. */
|
||||||
|
/* #undef HAVE_SYS_UTIME_H */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <io.h> header file. */
|
||||||
|
/* #undef HAVE_IO_H */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <termios.h> header file. */
|
||||||
|
#define HAVE_TERMIOS_H 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <unistd.h> header file. */
|
||||||
|
#define HAVE_UNISTD_H 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <stdint.h> header file. */
|
||||||
|
#define HAVE_STDINT_H 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <openssl/aes.h> header file. */
|
||||||
|
#define HAVE_OPENSSL_AES_H 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <wspiapi.h> header file. */
|
||||||
|
/* #undef HAVE_WSPIAPI_H */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <openssl/blowfish.h> header file. */
|
||||||
|
/* #undef HAVE_OPENSSL_BLOWFISH_H */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <openssl/des.h> header file. */
|
||||||
|
#define HAVE_OPENSSL_DES_H 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <openssl/ecdh.h> header file. */
|
||||||
|
#define HAVE_OPENSSL_ECDH_H 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <openssl/ec.h> header file. */
|
||||||
|
#define HAVE_OPENSSL_EC_H 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <openssl/ecdsa.h> header file. */
|
||||||
|
#define HAVE_OPENSSL_ECDSA_H 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <pthread.h> header file. */
|
||||||
|
#define HAVE_PTHREAD_H 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have eliptic curve cryptography in openssl */
|
||||||
|
#define HAVE_OPENSSL_ECC 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have eliptic curve cryptography in gcrypt */
|
||||||
|
/* #undef HAVE_GCRYPT_ECC */
|
||||||
|
|
||||||
|
/* Define to 1 if you have eliptic curve cryptography */
|
||||||
|
#define HAVE_ECC 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have DSA */
|
||||||
|
/* #undef HAVE_DSA */
|
||||||
|
|
||||||
|
/* Define to 1 if you have gl_flags as a glob_t sturct member */
|
||||||
|
#define HAVE_GLOB_GL_FLAGS_MEMBER 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have OpenSSL with Ed25519 support */
|
||||||
|
#define HAVE_OPENSSL_ED25519 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have OpenSSL with X25519 support */
|
||||||
|
#define HAVE_OPENSSL_X25519 1
|
||||||
|
|
||||||
|
/*************************** FUNCTIONS ***************************/
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `EVP_aes128_ctr' function. */
|
||||||
|
#define HAVE_OPENSSL_EVP_AES_CTR 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `EVP_aes128_cbc' function. */
|
||||||
|
#define HAVE_OPENSSL_EVP_AES_CBC 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `EVP_aes128_gcm' function. */
|
||||||
|
/* #undef HAVE_OPENSSL_EVP_AES_GCM */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `CRYPTO_THREADID_set_callback' function. */
|
||||||
|
#define HAVE_OPENSSL_CRYPTO_THREADID_SET_CALLBACK 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `CRYPTO_ctr128_encrypt' function. */
|
||||||
|
#define HAVE_OPENSSL_CRYPTO_CTR128_ENCRYPT 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `EVP_CIPHER_CTX_new' function. */
|
||||||
|
#define HAVE_OPENSSL_EVP_CIPHER_CTX_NEW 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `EVP_KDF_CTX_new_id' function. */
|
||||||
|
/* #undef HAVE_OPENSSL_EVP_KDF_CTX_NEW_ID */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `FIPS_mode' function. */
|
||||||
|
#if USE_BORINGSSL
|
||||||
|
#define HAVE_OPENSSL_FIPS_MODE 1
|
||||||
|
#endif
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `EVP_DigestSign' function. */
|
||||||
|
#define HAVE_OPENSSL_EVP_DIGESTSIGN 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `EVP_DigestVerify' function. */
|
||||||
|
#define HAVE_OPENSSL_EVP_DIGESTVERIFY 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `OPENSSL_ia32cap_loc' function. */
|
||||||
|
/* #undef HAVE_OPENSSL_IA32CAP_LOC */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `snprintf' function. */
|
||||||
|
#define HAVE_SNPRINTF 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `_snprintf' function. */
|
||||||
|
/* #undef HAVE__SNPRINTF */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `_snprintf_s' function. */
|
||||||
|
/* #undef HAVE__SNPRINTF_S */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `vsnprintf' function. */
|
||||||
|
#define HAVE_VSNPRINTF 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `_vsnprintf' function. */
|
||||||
|
/* #undef HAVE__VSNPRINTF */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `_vsnprintf_s' function. */
|
||||||
|
/* #undef HAVE__VSNPRINTF_S */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `isblank' function. */
|
||||||
|
#define HAVE_ISBLANK 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `strncpy' function. */
|
||||||
|
#define HAVE_STRNCPY 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `strndup' function. */
|
||||||
|
#define HAVE_STRNDUP 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `cfmakeraw' function. */
|
||||||
|
/* #undef HAVE_CFMAKERAW */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `getaddrinfo' function. */
|
||||||
|
#define HAVE_GETADDRINFO 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `poll' function. */
|
||||||
|
#define HAVE_POLL 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `select' function. */
|
||||||
|
#define HAVE_SELECT 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `clock_gettime' function. */
|
||||||
|
/* #undef HAVE_CLOCK_GETTIME */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `ntohll' function. */
|
||||||
|
/* #undef HAVE_NTOHLL */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `htonll' function. */
|
||||||
|
/* #undef HAVE_HTONLL */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `strtoull' function. */
|
||||||
|
#define HAVE_STRTOULL 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `__strtoull' function. */
|
||||||
|
/* #undef HAVE___STRTOULL */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `_strtoui64' function. */
|
||||||
|
/* #undef HAVE__STRTOUI64 */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `glob' function. */
|
||||||
|
#define HAVE_GLOB 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `explicit_bzero' function. */
|
||||||
|
/* #undef HAVE_EXPLICIT_BZERO 1 */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `memset_s' function. */
|
||||||
|
/* #undef HAVE_MEMSET_S */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `SecureZeroMemory' function. */
|
||||||
|
/* #undef HAVE_SECURE_ZERO_MEMORY */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `cmocka_set_test_filter' function. */
|
||||||
|
/* #undef HAVE_CMOCKA_SET_TEST_FILTER */
|
||||||
|
|
||||||
|
/*************************** LIBRARIES ***************************/
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `crypto' library (-lcrypto). */
|
||||||
|
#define HAVE_LIBCRYPTO 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `gcrypt' library (-lgcrypt). */
|
||||||
|
/* #undef HAVE_LIBGCRYPT */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the 'mbedTLS' library (-lmbedtls). */
|
||||||
|
/* #undef HAVE_LIBMBEDCRYPTO */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `pthread' library (-lpthread). */
|
||||||
|
#define HAVE_PTHREAD 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `cmocka' library (-lcmocka). */
|
||||||
|
/* #undef HAVE_CMOCKA */
|
||||||
|
|
||||||
|
/**************************** OPTIONS ****************************/
|
||||||
|
|
||||||
|
#define HAVE_GCC_THREAD_LOCAL_STORAGE 1
|
||||||
|
/* #undef HAVE_MSC_THREAD_LOCAL_STORAGE */
|
||||||
|
|
||||||
|
#define HAVE_FALLTHROUGH_ATTRIBUTE 1
|
||||||
|
#define HAVE_UNUSED_ATTRIBUTE 1
|
||||||
|
|
||||||
|
#define HAVE_CONSTRUCTOR_ATTRIBUTE 1
|
||||||
|
#define HAVE_DESTRUCTOR_ATTRIBUTE 1
|
||||||
|
|
||||||
|
#define HAVE_GCC_VOLATILE_MEMORY_PROTECTION 1
|
||||||
|
|
||||||
|
#define HAVE_COMPILER__FUNC__ 1
|
||||||
|
#define HAVE_COMPILER__FUNCTION__ 1
|
||||||
|
|
||||||
|
/* #undef HAVE_GCC_BOUNDED_ATTRIBUTE */
|
||||||
|
|
||||||
|
/* Define to 1 if you want to enable GSSAPI */
|
||||||
|
/* #undef WITH_GSSAPI */
|
||||||
|
|
||||||
|
/* Define to 1 if you want to enable ZLIB */
|
||||||
|
/* #undef WITH_ZLIB */
|
||||||
|
|
||||||
|
/* Define to 1 if you want to enable SFTP */
|
||||||
|
/* #undef WITH_SFTP */
|
||||||
|
|
||||||
|
/* Define to 1 if you want to enable server support */
|
||||||
|
#define WITH_SERVER 1
|
||||||
|
|
||||||
|
/* Define to 1 if you want to enable DH group exchange algorithms */
|
||||||
|
/* #undef WITH_GEX */
|
||||||
|
|
||||||
|
/* Define to 1 if you want to enable blowfish cipher support */
|
||||||
|
/* #undef WITH_BLOWFISH_CIPHER */
|
||||||
|
|
||||||
|
/* Define to 1 if you want to enable debug output for crypto functions */
|
||||||
|
/* #undef DEBUG_CRYPTO */
|
||||||
|
|
||||||
|
/* Define to 1 if you want to enable debug output for packet functions */
|
||||||
|
/* #undef DEBUG_PACKET */
|
||||||
|
|
||||||
|
/* Define to 1 if you want to enable pcap output support (experimental) */
|
||||||
|
/* #undef WITH_PCAP */
|
||||||
|
|
||||||
|
/* Define to 1 if you want to enable calltrace debug output */
|
||||||
|
/* #undef DEBUG_CALLTRACE */
|
||||||
|
|
||||||
|
/* Define to 1 if you want to enable NaCl support */
|
||||||
|
/* #undef WITH_NACL */
|
||||||
|
|
||||||
|
/*************************** ENDIAN *****************************/
|
||||||
|
|
||||||
|
/* Define WORDS_BIGENDIAN to 1 if your processor stores words with the most
|
||||||
|
significant byte first (like Motorola and SPARC, unlike Intel). */
|
||||||
|
#define WORDS_BIGENDIAN 1
|
287
contrib/libssh-cmake/linux/x86-64-musl/config.h
Normal file
287
contrib/libssh-cmake/linux/x86-64-musl/config.h
Normal file
@ -0,0 +1,287 @@
|
|||||||
|
/* Name of package */
|
||||||
|
#define PACKAGE "libssh"
|
||||||
|
|
||||||
|
/* Version number of package */
|
||||||
|
#define VERSION "0.9.7"
|
||||||
|
|
||||||
|
#define SYSCONFDIR "etc"
|
||||||
|
#define BINARYDIR "/home/ubuntu/workdir/ClickHouse/build/musl"
|
||||||
|
#define SOURCEDIR "/home/ubuntu/workdir/ClickHouse"
|
||||||
|
|
||||||
|
/* Global bind configuration file path */
|
||||||
|
#define GLOBAL_BIND_CONFIG "/etc/ssh/libssh_server_config"
|
||||||
|
|
||||||
|
/* Global client configuration file path */
|
||||||
|
#define GLOBAL_CLIENT_CONFIG "/etc/ssh/ssh_config"
|
||||||
|
|
||||||
|
/************************** HEADER FILES *************************/
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <argp.h> header file. */
|
||||||
|
/* #undef HAVE_ARGP_H */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <aprpa/inet.h> header file. */
|
||||||
|
#define HAVE_ARPA_INET_H 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <glob.h> header file. */
|
||||||
|
#define HAVE_GLOB_H 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <valgrind/valgrind.h> header file. */
|
||||||
|
/* #undef HAVE_VALGRIND_VALGRIND_H */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <pty.h> header file. */
|
||||||
|
/* #undef HAVE_PTY_H */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <utmp.h> header file. */
|
||||||
|
/* #undef HAVE_UTMP_H */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <util.h> header file. */
|
||||||
|
/* #undef HAVE_UTIL_H */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <libutil.h> header file. */
|
||||||
|
/* #undef HAVE_LIBUTIL_H */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <sys/time.h> header file. */
|
||||||
|
#define HAVE_SYS_TIME_H 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <sys/utime.h> header file. */
|
||||||
|
/* #undef HAVE_SYS_UTIME_H */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <io.h> header file. */
|
||||||
|
/* #undef HAVE_IO_H */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <termios.h> header file. */
|
||||||
|
#define HAVE_TERMIOS_H 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <unistd.h> header file. */
|
||||||
|
#define HAVE_UNISTD_H 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <stdint.h> header file. */
|
||||||
|
#define HAVE_STDINT_H 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <openssl/aes.h> header file. */
|
||||||
|
#define HAVE_OPENSSL_AES_H 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <wspiapi.h> header file. */
|
||||||
|
/* #undef HAVE_WSPIAPI_H */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <openssl/blowfish.h> header file. */
|
||||||
|
/* #undef HAVE_OPENSSL_BLOWFISH_H */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <openssl/des.h> header file. */
|
||||||
|
#define HAVE_OPENSSL_DES_H 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <openssl/ecdh.h> header file. */
|
||||||
|
#define HAVE_OPENSSL_ECDH_H 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <openssl/ec.h> header file. */
|
||||||
|
#define HAVE_OPENSSL_EC_H 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <openssl/ecdsa.h> header file. */
|
||||||
|
#define HAVE_OPENSSL_ECDSA_H 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <pthread.h> header file. */
|
||||||
|
#define HAVE_PTHREAD_H 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have eliptic curve cryptography in openssl */
|
||||||
|
#define HAVE_OPENSSL_ECC 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have eliptic curve cryptography in gcrypt */
|
||||||
|
/* #undef HAVE_GCRYPT_ECC */
|
||||||
|
|
||||||
|
/* Define to 1 if you have eliptic curve cryptography */
|
||||||
|
#define HAVE_ECC 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have DSA */
|
||||||
|
/* #undef HAVE_DSA */
|
||||||
|
|
||||||
|
/* Define to 1 if you have gl_flags as a glob_t sturct member */
|
||||||
|
/* #undef HAVE_GLOB_GL_FLAGS_MEMBER
|
||||||
|
|
||||||
|
/* Define to 1 if you have OpenSSL with Ed25519 support */
|
||||||
|
#define HAVE_OPENSSL_ED25519 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have OpenSSL with X25519 support */
|
||||||
|
#define HAVE_OPENSSL_X25519 1
|
||||||
|
|
||||||
|
/*************************** FUNCTIONS ***************************/
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `EVP_aes128_ctr' function. */
|
||||||
|
#define HAVE_OPENSSL_EVP_AES_CTR 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `EVP_aes128_cbc' function. */
|
||||||
|
#define HAVE_OPENSSL_EVP_AES_CBC 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `EVP_aes128_gcm' function. */
|
||||||
|
/* #undef HAVE_OPENSSL_EVP_AES_GCM */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `CRYPTO_THREADID_set_callback' function. */
|
||||||
|
#define HAVE_OPENSSL_CRYPTO_THREADID_SET_CALLBACK 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `CRYPTO_ctr128_encrypt' function. */
|
||||||
|
#define HAVE_OPENSSL_CRYPTO_CTR128_ENCRYPT 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `EVP_CIPHER_CTX_new' function. */
|
||||||
|
#define HAVE_OPENSSL_EVP_CIPHER_CTX_NEW 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `EVP_KDF_CTX_new_id' function. */
|
||||||
|
/* #undef HAVE_OPENSSL_EVP_KDF_CTX_NEW_ID */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `FIPS_mode' function. */
|
||||||
|
#if USE_BORINGSSL
|
||||||
|
#define HAVE_OPENSSL_FIPS_MODE 1
|
||||||
|
#endif
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `EVP_DigestSign' function. */
|
||||||
|
#define HAVE_OPENSSL_EVP_DIGESTSIGN 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `EVP_DigestVerify' function. */
|
||||||
|
#define HAVE_OPENSSL_EVP_DIGESTVERIFY 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `OPENSSL_ia32cap_loc' function. */
|
||||||
|
/* #undef HAVE_OPENSSL_IA32CAP_LOC */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `snprintf' function. */
|
||||||
|
#define HAVE_SNPRINTF 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `_snprintf' function. */
|
||||||
|
/* #undef HAVE__SNPRINTF */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `_snprintf_s' function. */
|
||||||
|
/* #undef HAVE__SNPRINTF_S */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `vsnprintf' function. */
|
||||||
|
#define HAVE_VSNPRINTF 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `_vsnprintf' function. */
|
||||||
|
/* #undef HAVE__VSNPRINTF */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `_vsnprintf_s' function. */
|
||||||
|
/* #undef HAVE__VSNPRINTF_S */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `isblank' function. */
|
||||||
|
#define HAVE_ISBLANK 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `strncpy' function. */
|
||||||
|
#define HAVE_STRNCPY 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `strndup' function. */
|
||||||
|
#define HAVE_STRNDUP 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `cfmakeraw' function. */
|
||||||
|
/* #undef HAVE_CFMAKERAW */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `getaddrinfo' function. */
|
||||||
|
#define HAVE_GETADDRINFO 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `poll' function. */
|
||||||
|
#define HAVE_POLL 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `select' function. */
|
||||||
|
#define HAVE_SELECT 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `clock_gettime' function. */
|
||||||
|
/* #undef HAVE_CLOCK_GETTIME */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `ntohll' function. */
|
||||||
|
/* #undef HAVE_NTOHLL */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `htonll' function. */
|
||||||
|
/* #undef HAVE_HTONLL */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `strtoull' function. */
|
||||||
|
#define HAVE_STRTOULL 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `__strtoull' function. */
|
||||||
|
/* #undef HAVE___STRTOULL */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `_strtoui64' function. */
|
||||||
|
/* #undef HAVE__STRTOUI64 */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `glob' function. */
|
||||||
|
#define HAVE_GLOB 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `explicit_bzero' function. */
|
||||||
|
#define HAVE_EXPLICIT_BZERO 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `memset_s' function. */
|
||||||
|
/* #undef HAVE_MEMSET_S */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `SecureZeroMemory' function. */
|
||||||
|
/* #undef HAVE_SECURE_ZERO_MEMORY */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `cmocka_set_test_filter' function. */
|
||||||
|
/* #undef HAVE_CMOCKA_SET_TEST_FILTER */
|
||||||
|
|
||||||
|
/*************************** LIBRARIES ***************************/
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `crypto' library (-lcrypto). */
|
||||||
|
#define HAVE_LIBCRYPTO 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `gcrypt' library (-lgcrypt). */
|
||||||
|
/* #undef HAVE_LIBGCRYPT */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the 'mbedTLS' library (-lmbedtls). */
|
||||||
|
/* #undef HAVE_LIBMBEDCRYPTO */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `pthread' library (-lpthread). */
|
||||||
|
#define HAVE_PTHREAD 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `cmocka' library (-lcmocka). */
|
||||||
|
/* #undef HAVE_CMOCKA */
|
||||||
|
|
||||||
|
/**************************** OPTIONS ****************************/
|
||||||
|
|
||||||
|
#define HAVE_GCC_THREAD_LOCAL_STORAGE 1
|
||||||
|
/* #undef HAVE_MSC_THREAD_LOCAL_STORAGE */
|
||||||
|
|
||||||
|
#define HAVE_FALLTHROUGH_ATTRIBUTE 1
|
||||||
|
#define HAVE_UNUSED_ATTRIBUTE 1
|
||||||
|
|
||||||
|
#define HAVE_CONSTRUCTOR_ATTRIBUTE 1
|
||||||
|
#define HAVE_DESTRUCTOR_ATTRIBUTE 1
|
||||||
|
|
||||||
|
#define HAVE_GCC_VOLATILE_MEMORY_PROTECTION 1
|
||||||
|
|
||||||
|
#define HAVE_COMPILER__FUNC__ 1
|
||||||
|
#define HAVE_COMPILER__FUNCTION__ 1
|
||||||
|
|
||||||
|
/* #undef HAVE_GCC_BOUNDED_ATTRIBUTE */
|
||||||
|
|
||||||
|
/* Define to 1 if you want to enable GSSAPI */
|
||||||
|
/* #undef WITH_GSSAPI */
|
||||||
|
|
||||||
|
/* Define to 1 if you want to enable ZLIB */
|
||||||
|
/* #undef WITH_ZLIB */
|
||||||
|
|
||||||
|
/* Define to 1 if you want to enable SFTP */
|
||||||
|
/* #undef WITH_SFTP */
|
||||||
|
|
||||||
|
/* Define to 1 if you want to enable server support */
|
||||||
|
#define WITH_SERVER 1
|
||||||
|
|
||||||
|
/* Define to 1 if you want to enable DH group exchange algorithms */
|
||||||
|
/* #undef WITH_GEX */
|
||||||
|
|
||||||
|
/* Define to 1 if you want to enable blowfish cipher support */
|
||||||
|
/* #undef WITH_BLOWFISH_CIPHER */
|
||||||
|
|
||||||
|
/* Define to 1 if you want to enable debug output for crypto functions */
|
||||||
|
/* #undef DEBUG_CRYPTO */
|
||||||
|
|
||||||
|
/* Define to 1 if you want to enable debug output for packet functions */
|
||||||
|
/* #undef DEBUG_PACKET */
|
||||||
|
|
||||||
|
/* Define to 1 if you want to enable pcap output support (experimental) */
|
||||||
|
/* #undef WITH_PCAP */
|
||||||
|
|
||||||
|
/* Define to 1 if you want to enable calltrace debug output */
|
||||||
|
/* #undef DEBUG_CALLTRACE */
|
||||||
|
|
||||||
|
/* Define to 1 if you want to enable NaCl support */
|
||||||
|
/* #undef WITH_NACL */
|
||||||
|
|
||||||
|
/*************************** ENDIAN *****************************/
|
||||||
|
|
||||||
|
/* Define WORDS_BIGENDIAN to 1 if your processor stores words with the most
|
||||||
|
significant byte first (like Motorola and SPARC, unlike Intel). */
|
||||||
|
/* #undef WORDS_BIGENDIAN */
|
287
contrib/libssh-cmake/linux/x86-64/config.h
Normal file
287
contrib/libssh-cmake/linux/x86-64/config.h
Normal file
@ -0,0 +1,287 @@
|
|||||||
|
/* Name of package */
|
||||||
|
#define PACKAGE "libssh"
|
||||||
|
|
||||||
|
/* Version number of package */
|
||||||
|
#define VERSION "0.9.7"
|
||||||
|
|
||||||
|
#define SYSCONFDIR "etc"
|
||||||
|
#define BINARYDIR "/home/ubuntu/workdir/ClickHouse/build/Debug"
|
||||||
|
#define SOURCEDIR "/home/ubuntu/workdir/ClickHouse"
|
||||||
|
|
||||||
|
/* Global bind configuration file path */
|
||||||
|
#define GLOBAL_BIND_CONFIG "/etc/ssh/libssh_server_config"
|
||||||
|
|
||||||
|
/* Global client configuration file path */
|
||||||
|
#define GLOBAL_CLIENT_CONFIG "/etc/ssh/ssh_config"
|
||||||
|
|
||||||
|
/************************** HEADER FILES *************************/
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <argp.h> header file. */
|
||||||
|
/* #undef HAVE_ARGP_H */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <aprpa/inet.h> header file. */
|
||||||
|
#define HAVE_ARPA_INET_H 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <glob.h> header file. */
|
||||||
|
#define HAVE_GLOB_H 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <valgrind/valgrind.h> header file. */
|
||||||
|
/* #undef HAVE_VALGRIND_VALGRIND_H */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <pty.h> header file. */
|
||||||
|
/* #undef HAVE_PTY_H */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <utmp.h> header file. */
|
||||||
|
/* #undef HAVE_UTMP_H */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <util.h> header file. */
|
||||||
|
/* #undef HAVE_UTIL_H */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <libutil.h> header file. */
|
||||||
|
/* #undef HAVE_LIBUTIL_H */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <sys/time.h> header file. */
|
||||||
|
#define HAVE_SYS_TIME_H 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <sys/utime.h> header file. */
|
||||||
|
/* #undef HAVE_SYS_UTIME_H */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <io.h> header file. */
|
||||||
|
/* #undef HAVE_IO_H */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <termios.h> header file. */
|
||||||
|
#define HAVE_TERMIOS_H 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <unistd.h> header file. */
|
||||||
|
#define HAVE_UNISTD_H 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <stdint.h> header file. */
|
||||||
|
#define HAVE_STDINT_H 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <openssl/aes.h> header file. */
|
||||||
|
#define HAVE_OPENSSL_AES_H 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <wspiapi.h> header file. */
|
||||||
|
/* #undef HAVE_WSPIAPI_H */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <openssl/blowfish.h> header file. */
|
||||||
|
/* #undef HAVE_OPENSSL_BLOWFISH_H */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <openssl/des.h> header file. */
|
||||||
|
#define HAVE_OPENSSL_DES_H 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <openssl/ecdh.h> header file. */
|
||||||
|
#define HAVE_OPENSSL_ECDH_H 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <openssl/ec.h> header file. */
|
||||||
|
#define HAVE_OPENSSL_EC_H 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <openssl/ecdsa.h> header file. */
|
||||||
|
#define HAVE_OPENSSL_ECDSA_H 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <pthread.h> header file. */
|
||||||
|
#define HAVE_PTHREAD_H 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have eliptic curve cryptography in openssl */
|
||||||
|
#define HAVE_OPENSSL_ECC 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have eliptic curve cryptography in gcrypt */
|
||||||
|
/* #undef HAVE_GCRYPT_ECC */
|
||||||
|
|
||||||
|
/* Define to 1 if you have eliptic curve cryptography */
|
||||||
|
#define HAVE_ECC 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have DSA */
|
||||||
|
/* #undef HAVE_DSA */
|
||||||
|
|
||||||
|
/* Define to 1 if you have gl_flags as a glob_t sturct member */
|
||||||
|
#define HAVE_GLOB_GL_FLAGS_MEMBER 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have OpenSSL with Ed25519 support */
|
||||||
|
#define HAVE_OPENSSL_ED25519 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have OpenSSL with X25519 support */
|
||||||
|
#define HAVE_OPENSSL_X25519 1
|
||||||
|
|
||||||
|
/*************************** FUNCTIONS ***************************/
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `EVP_aes128_ctr' function. */
|
||||||
|
#define HAVE_OPENSSL_EVP_AES_CTR 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `EVP_aes128_cbc' function. */
|
||||||
|
#define HAVE_OPENSSL_EVP_AES_CBC 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `EVP_aes128_gcm' function. */
|
||||||
|
/* #undef HAVE_OPENSSL_EVP_AES_GCM */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `CRYPTO_THREADID_set_callback' function. */
|
||||||
|
#define HAVE_OPENSSL_CRYPTO_THREADID_SET_CALLBACK 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `CRYPTO_ctr128_encrypt' function. */
|
||||||
|
#define HAVE_OPENSSL_CRYPTO_CTR128_ENCRYPT 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `EVP_CIPHER_CTX_new' function. */
|
||||||
|
#define HAVE_OPENSSL_EVP_CIPHER_CTX_NEW 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `EVP_KDF_CTX_new_id' function. */
|
||||||
|
/* #undef HAVE_OPENSSL_EVP_KDF_CTX_NEW_ID */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `FIPS_mode' function. */
|
||||||
|
#if USE_BORINGSSL
|
||||||
|
#define HAVE_OPENSSL_FIPS_MODE 1
|
||||||
|
#endif
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `EVP_DigestSign' function. */
|
||||||
|
#define HAVE_OPENSSL_EVP_DIGESTSIGN 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `EVP_DigestVerify' function. */
|
||||||
|
#define HAVE_OPENSSL_EVP_DIGESTVERIFY 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `OPENSSL_ia32cap_loc' function. */
|
||||||
|
/* #undef HAVE_OPENSSL_IA32CAP_LOC */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `snprintf' function. */
|
||||||
|
#define HAVE_SNPRINTF 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `_snprintf' function. */
|
||||||
|
/* #undef HAVE__SNPRINTF */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `_snprintf_s' function. */
|
||||||
|
/* #undef HAVE__SNPRINTF_S */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `vsnprintf' function. */
|
||||||
|
#define HAVE_VSNPRINTF 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `_vsnprintf' function. */
|
||||||
|
/* #undef HAVE__VSNPRINTF */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `_vsnprintf_s' function. */
|
||||||
|
/* #undef HAVE__VSNPRINTF_S */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `isblank' function. */
|
||||||
|
#define HAVE_ISBLANK 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `strncpy' function. */
|
||||||
|
#define HAVE_STRNCPY 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `strndup' function. */
|
||||||
|
#define HAVE_STRNDUP 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `cfmakeraw' function. */
|
||||||
|
/* #undef HAVE_CFMAKERAW */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `getaddrinfo' function. */
|
||||||
|
#define HAVE_GETADDRINFO 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `poll' function. */
|
||||||
|
#define HAVE_POLL 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `select' function. */
|
||||||
|
#define HAVE_SELECT 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `clock_gettime' function. */
|
||||||
|
/* #undef HAVE_CLOCK_GETTIME */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `ntohll' function. */
|
||||||
|
/* #undef HAVE_NTOHLL */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `htonll' function. */
|
||||||
|
/* #undef HAVE_HTONLL */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `strtoull' function. */
|
||||||
|
#define HAVE_STRTOULL 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `__strtoull' function. */
|
||||||
|
/* #undef HAVE___STRTOULL */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `_strtoui64' function. */
|
||||||
|
/* #undef HAVE__STRTOUI64 */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `glob' function. */
|
||||||
|
#define HAVE_GLOB 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `explicit_bzero' function. */
|
||||||
|
#define HAVE_EXPLICIT_BZERO 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `memset_s' function. */
|
||||||
|
#define HAVE_MEMSET_S 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `SecureZeroMemory' function. */
|
||||||
|
/* #undef HAVE_SECURE_ZERO_MEMORY */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `cmocka_set_test_filter' function. */
|
||||||
|
/* #undef HAVE_CMOCKA_SET_TEST_FILTER */
|
||||||
|
|
||||||
|
/*************************** LIBRARIES ***************************/
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `crypto' library (-lcrypto). */
|
||||||
|
#define HAVE_LIBCRYPTO 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `gcrypt' library (-lgcrypt). */
|
||||||
|
/* #undef HAVE_LIBGCRYPT */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the 'mbedTLS' library (-lmbedtls). */
|
||||||
|
/* #undef HAVE_LIBMBEDCRYPTO */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `pthread' library (-lpthread). */
|
||||||
|
#define HAVE_PTHREAD 1
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `cmocka' library (-lcmocka). */
|
||||||
|
/* #undef HAVE_CMOCKA */
|
||||||
|
|
||||||
|
/**************************** OPTIONS ****************************/
|
||||||
|
|
||||||
|
#define HAVE_GCC_THREAD_LOCAL_STORAGE 1
|
||||||
|
/* #undef HAVE_MSC_THREAD_LOCAL_STORAGE */
|
||||||
|
|
||||||
|
#define HAVE_FALLTHROUGH_ATTRIBUTE 1
|
||||||
|
#define HAVE_UNUSED_ATTRIBUTE 1
|
||||||
|
|
||||||
|
#define HAVE_CONSTRUCTOR_ATTRIBUTE 1
|
||||||
|
#define HAVE_DESTRUCTOR_ATTRIBUTE 1
|
||||||
|
|
||||||
|
#define HAVE_GCC_VOLATILE_MEMORY_PROTECTION 1
|
||||||
|
|
||||||
|
#define HAVE_COMPILER__FUNC__ 1
|
||||||
|
#define HAVE_COMPILER__FUNCTION__ 1
|
||||||
|
|
||||||
|
/* #undef HAVE_GCC_BOUNDED_ATTRIBUTE */
|
||||||
|
|
||||||
|
/* Define to 1 if you want to enable GSSAPI */
|
||||||
|
/* #undef WITH_GSSAPI */
|
||||||
|
|
||||||
|
/* Define to 1 if you want to enable ZLIB */
|
||||||
|
/* #undef WITH_ZLIB */
|
||||||
|
|
||||||
|
/* Define to 1 if you want to enable SFTP */
|
||||||
|
/* #undef WITH_SFTP */
|
||||||
|
|
||||||
|
/* Define to 1 if you want to enable server support */
|
||||||
|
#define WITH_SERVER 1
|
||||||
|
|
||||||
|
/* Define to 1 if you want to enable DH group exchange algorithms */
|
||||||
|
/* #undef WITH_GEX */
|
||||||
|
|
||||||
|
/* Define to 1 if you want to enable blowfish cipher support */
|
||||||
|
/* #undef WITH_BLOWFISH_CIPHER */
|
||||||
|
|
||||||
|
/* Define to 1 if you want to enable debug output for crypto functions */
|
||||||
|
/* #undef DEBUG_CRYPTO */
|
||||||
|
|
||||||
|
/* Define to 1 if you want to enable debug output for packet functions */
|
||||||
|
/* #undef DEBUG_PACKET */
|
||||||
|
|
||||||
|
/* Define to 1 if you want to enable pcap output support (experimental) */
|
||||||
|
/* #undef WITH_PCAP */
|
||||||
|
|
||||||
|
/* Define to 1 if you want to enable calltrace debug output */
|
||||||
|
/* #undef DEBUG_CALLTRACE */
|
||||||
|
|
||||||
|
/* Define to 1 if you want to enable NaCl support */
|
||||||
|
/* #undef WITH_NACL */
|
||||||
|
|
||||||
|
/*************************** ENDIAN *****************************/
|
||||||
|
|
||||||
|
/* Define WORDS_BIGENDIAN to 1 if your processor stores words with the most
|
||||||
|
significant byte first (like Motorola and SPARC, unlike Intel). */
|
||||||
|
/* #undef WORDS_BIGENDIAN */
|
@ -1,4 +1,4 @@
|
|||||||
if (APPLE OR SANITIZE STREQUAL "undefined")
|
if (APPLE OR SANITIZE STREQUAL "undefined" OR SANITIZE STREQUAL "memory")
|
||||||
set (ENABLE_EMBEDDED_COMPILER_DEFAULT OFF)
|
set (ENABLE_EMBEDDED_COMPILER_DEFAULT OFF)
|
||||||
else()
|
else()
|
||||||
set (ENABLE_EMBEDDED_COMPILER_DEFAULT ON)
|
set (ENABLE_EMBEDDED_COMPILER_DEFAULT ON)
|
||||||
|
@ -54,8 +54,10 @@ namespace pdqsort_detail {
|
|||||||
block_size = 64,
|
block_size = 64,
|
||||||
|
|
||||||
// Cacheline size, assumes power of two.
|
// Cacheline size, assumes power of two.
|
||||||
cacheline_size = 64
|
cacheline_size = 64,
|
||||||
|
|
||||||
|
/// Try sort allowed iterations
|
||||||
|
try_sort_iterations = 3,
|
||||||
};
|
};
|
||||||
|
|
||||||
#if __cplusplus >= 201103L
|
#if __cplusplus >= 201103L
|
||||||
@ -501,6 +503,167 @@ namespace pdqsort_detail {
|
|||||||
leftmost = false;
|
leftmost = false;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
template<class Iter, class Compare, bool Branchless>
|
||||||
|
inline bool pdqsort_try_sort_loop(Iter begin,
|
||||||
|
Iter end,
|
||||||
|
Compare comp,
|
||||||
|
size_t bad_allowed,
|
||||||
|
size_t iterations_allowed,
|
||||||
|
bool force_sort = false,
|
||||||
|
bool leftmost = true) {
|
||||||
|
typedef typename std::iterator_traits<Iter>::difference_type diff_t;
|
||||||
|
|
||||||
|
// Use a while loop for tail recursion elimination.
|
||||||
|
while (true) {
|
||||||
|
if (!force_sort && iterations_allowed == 0) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
diff_t size = end - begin;
|
||||||
|
|
||||||
|
// Insertion sort is faster for small arrays.
|
||||||
|
if (size < insertion_sort_threshold) {
|
||||||
|
if (leftmost) insertion_sort(begin, end, comp);
|
||||||
|
else unguarded_insertion_sort(begin, end, comp);
|
||||||
|
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Choose pivot as median of 3 or pseudomedian of 9.
|
||||||
|
diff_t s2 = size / 2;
|
||||||
|
if (size > ninther_threshold) {
|
||||||
|
sort3(begin, begin + s2, end - 1, comp);
|
||||||
|
sort3(begin + 1, begin + (s2 - 1), end - 2, comp);
|
||||||
|
sort3(begin + 2, begin + (s2 + 1), end - 3, comp);
|
||||||
|
sort3(begin + (s2 - 1), begin + s2, begin + (s2 + 1), comp);
|
||||||
|
std::iter_swap(begin, begin + s2);
|
||||||
|
} else sort3(begin + s2, begin, end - 1, comp);
|
||||||
|
|
||||||
|
// If *(begin - 1) is the end of the right partition of a previous partition operation
|
||||||
|
// there is no element in [begin, end) that is smaller than *(begin - 1). Then if our
|
||||||
|
// pivot compares equal to *(begin - 1) we change strategy, putting equal elements in
|
||||||
|
// the left partition, greater elements in the right partition. We do not have to
|
||||||
|
// recurse on the left partition, since it's sorted (all equal).
|
||||||
|
if (!leftmost && !comp(*(begin - 1), *begin)) {
|
||||||
|
begin = partition_left(begin, end, comp) + 1;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Partition and get results.
|
||||||
|
std::pair<Iter, bool> part_result =
|
||||||
|
Branchless ? partition_right_branchless(begin, end, comp)
|
||||||
|
: partition_right(begin, end, comp);
|
||||||
|
Iter pivot_pos = part_result.first;
|
||||||
|
bool already_partitioned = part_result.second;
|
||||||
|
|
||||||
|
// Check for a highly unbalanced partition.
|
||||||
|
diff_t l_size = pivot_pos - begin;
|
||||||
|
diff_t r_size = end - (pivot_pos + 1);
|
||||||
|
bool highly_unbalanced = l_size < size / 8 || r_size < size / 8;
|
||||||
|
|
||||||
|
// If we got a highly unbalanced partition we shuffle elements to break many patterns.
|
||||||
|
if (highly_unbalanced) {
|
||||||
|
if (!force_sort) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
// If we had too many bad partitions, switch to heapsort to guarantee O(n log n).
|
||||||
|
if (--bad_allowed == 0) {
|
||||||
|
std::make_heap(begin, end, comp);
|
||||||
|
std::sort_heap(begin, end, comp);
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (l_size >= insertion_sort_threshold) {
|
||||||
|
std::iter_swap(begin, begin + l_size / 4);
|
||||||
|
std::iter_swap(pivot_pos - 1, pivot_pos - l_size / 4);
|
||||||
|
|
||||||
|
if (l_size > ninther_threshold) {
|
||||||
|
std::iter_swap(begin + 1, begin + (l_size / 4 + 1));
|
||||||
|
std::iter_swap(begin + 2, begin + (l_size / 4 + 2));
|
||||||
|
std::iter_swap(pivot_pos - 2, pivot_pos - (l_size / 4 + 1));
|
||||||
|
std::iter_swap(pivot_pos - 3, pivot_pos - (l_size / 4 + 2));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (r_size >= insertion_sort_threshold) {
|
||||||
|
std::iter_swap(pivot_pos + 1, pivot_pos + (1 + r_size / 4));
|
||||||
|
std::iter_swap(end - 1, end - r_size / 4);
|
||||||
|
|
||||||
|
if (r_size > ninther_threshold) {
|
||||||
|
std::iter_swap(pivot_pos + 2, pivot_pos + (2 + r_size / 4));
|
||||||
|
std::iter_swap(pivot_pos + 3, pivot_pos + (3 + r_size / 4));
|
||||||
|
std::iter_swap(end - 2, end - (1 + r_size / 4));
|
||||||
|
std::iter_swap(end - 3, end - (2 + r_size / 4));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// If we were decently balanced and we tried to sort an already partitioned
|
||||||
|
// sequence try to use insertion sort.
|
||||||
|
if (already_partitioned && partial_insertion_sort(begin, pivot_pos, comp)
|
||||||
|
&& partial_insertion_sort(pivot_pos + 1, end, comp)) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Sort the left partition first using recursion and do tail recursion elimination for
|
||||||
|
// the right-hand partition.
|
||||||
|
if (pdqsort_try_sort_loop<Iter, Compare, Branchless>(begin,
|
||||||
|
pivot_pos,
|
||||||
|
comp,
|
||||||
|
bad_allowed,
|
||||||
|
iterations_allowed - 1,
|
||||||
|
force_sort,
|
||||||
|
leftmost)) {
|
||||||
|
force_sort = true;
|
||||||
|
} else {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
--iterations_allowed;
|
||||||
|
begin = pivot_pos + 1;
|
||||||
|
leftmost = false;
|
||||||
|
}
|
||||||
|
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
template<class Iter, class Compare, bool Branchless>
|
||||||
|
inline bool pdqsort_try_sort_impl(Iter begin, Iter end, Compare comp, size_t bad_allowed)
|
||||||
|
{
|
||||||
|
typedef typename std::iterator_traits<Iter>::difference_type diff_t;
|
||||||
|
|
||||||
|
static constexpr size_t iterations_allowed = pdqsort_detail::try_sort_iterations;
|
||||||
|
static constexpr size_t num_to_try = 16;
|
||||||
|
|
||||||
|
diff_t size = end - begin;
|
||||||
|
|
||||||
|
if (size > num_to_try * 10)
|
||||||
|
{
|
||||||
|
size_t out_of_order_elements = 0;
|
||||||
|
|
||||||
|
for (size_t i = 1; i < num_to_try; ++i)
|
||||||
|
{
|
||||||
|
diff_t offset = size / num_to_try;
|
||||||
|
|
||||||
|
diff_t prev_position = offset * (i - 1);
|
||||||
|
diff_t curr_position = offset * i;
|
||||||
|
diff_t next_position = offset * (i + 1) - 1;
|
||||||
|
|
||||||
|
bool prev_less_than_curr = comp(*(begin + prev_position), *(begin + curr_position));
|
||||||
|
bool curr_less_than_next = comp(*(begin + curr_position), *(begin + next_position));
|
||||||
|
if ((prev_less_than_curr && curr_less_than_next) || (!prev_less_than_curr && !curr_less_than_next))
|
||||||
|
continue;
|
||||||
|
|
||||||
|
++out_of_order_elements;
|
||||||
|
if (out_of_order_elements > iterations_allowed)
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return pdqsort_try_sort_loop<Iter, Compare, Branchless>(begin, end, comp, bad_allowed, iterations_allowed);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@ -538,6 +701,41 @@ inline void pdqsort_branchless(Iter begin, Iter end) {
|
|||||||
pdqsort_branchless(begin, end, std::less<T>());
|
pdqsort_branchless(begin, end, std::less<T>());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
template<class Iter, class Compare>
|
||||||
|
inline bool pdqsort_try_sort(Iter begin, Iter end, Compare comp) {
|
||||||
|
if (begin == end) return true;
|
||||||
|
|
||||||
|
#if __cplusplus >= 201103L
|
||||||
|
return pdqsort_detail::pdqsort_try_sort_impl<Iter, Compare,
|
||||||
|
pdqsort_detail::is_default_compare<typename std::decay<Compare>::type>::value &&
|
||||||
|
std::is_arithmetic<typename std::iterator_traits<Iter>::value_type>::value>(
|
||||||
|
begin, end, comp, pdqsort_detail::log2(end - begin));
|
||||||
|
#else
|
||||||
|
return pdqsort_detail::pdqsort_try_sort_impl<Iter, Compare, false>(
|
||||||
|
begin, end, comp, pdqsort_detail::log2(end - begin));
|
||||||
|
#endif
|
||||||
|
}
|
||||||
|
|
||||||
|
template<class Iter>
|
||||||
|
inline bool pdqsort_try_sort(Iter begin, Iter end) {
|
||||||
|
typedef typename std::iterator_traits<Iter>::value_type T;
|
||||||
|
return pdqsort_try_sort(begin, end, std::less<T>());
|
||||||
|
}
|
||||||
|
|
||||||
|
template<class Iter, class Compare>
|
||||||
|
inline bool pdqsort_try_sort_branchless(Iter begin, Iter end, Compare comp) {
|
||||||
|
if (begin == end) return true;
|
||||||
|
|
||||||
|
return pdqsort_detail::pdqsort_try_sort_impl<Iter, Compare, true>(
|
||||||
|
begin, end, comp, pdqsort_detail::log2(end - begin));
|
||||||
|
}
|
||||||
|
|
||||||
|
template<class Iter>
|
||||||
|
inline bool pdqsort_try_sort_branchless(Iter begin, Iter end) {
|
||||||
|
typedef typename std::iterator_traits<Iter>::value_type T;
|
||||||
|
return pdqsort_try_sort_branchless(begin, end, std::less<T>());
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
#undef PDQSORT_PREFER_MOVE
|
#undef PDQSORT_PREFER_MOVE
|
||||||
|
|
||||||
|
@ -1,9 +1,7 @@
|
|||||||
{
|
{
|
||||||
"docker/packager/binary": {
|
"docker/packager/binary": {
|
||||||
"name": "clickhouse/binary-builder",
|
"name": "clickhouse/binary-builder",
|
||||||
"dependent": [
|
"dependent": []
|
||||||
"docker/test/codebrowser"
|
|
||||||
]
|
|
||||||
},
|
},
|
||||||
"docker/test/compatibility/centos": {
|
"docker/test/compatibility/centos": {
|
||||||
"name": "clickhouse/test-old-centos",
|
"name": "clickhouse/test-old-centos",
|
||||||
@ -63,10 +61,6 @@
|
|||||||
"name": "clickhouse/upgrade-check",
|
"name": "clickhouse/upgrade-check",
|
||||||
"dependent": []
|
"dependent": []
|
||||||
},
|
},
|
||||||
"docker/test/codebrowser": {
|
|
||||||
"name": "clickhouse/codebrowser",
|
|
||||||
"dependent": []
|
|
||||||
},
|
|
||||||
"docker/test/integration/runner": {
|
"docker/test/integration/runner": {
|
||||||
"only_amd64": true,
|
"only_amd64": true,
|
||||||
"name": "clickhouse/integration-tests-runner",
|
"name": "clickhouse/integration-tests-runner",
|
||||||
|
@ -32,7 +32,7 @@ RUN arch=${TARGETARCH:-amd64} \
|
|||||||
esac
|
esac
|
||||||
|
|
||||||
ARG REPOSITORY="https://s3.amazonaws.com/clickhouse-builds/22.4/31c367d3cd3aefd316778601ff6565119fe36682/package_release"
|
ARG REPOSITORY="https://s3.amazonaws.com/clickhouse-builds/22.4/31c367d3cd3aefd316778601ff6565119fe36682/package_release"
|
||||||
ARG VERSION="23.8.2.7"
|
ARG VERSION="23.9.1.1854"
|
||||||
ARG PACKAGES="clickhouse-keeper"
|
ARG PACKAGES="clickhouse-keeper"
|
||||||
|
|
||||||
# user/group precreated explicitly with fixed uid/gid on purpose.
|
# user/group precreated explicitly with fixed uid/gid on purpose.
|
||||||
|
@ -15,6 +15,11 @@ if [ "$EXTRACT_TOOLCHAIN_DARWIN" = "1" ]; then
|
|||||||
mkdir -p /build/cmake/toolchain/darwin-x86_64
|
mkdir -p /build/cmake/toolchain/darwin-x86_64
|
||||||
tar xJf /MacOSX11.0.sdk.tar.xz -C /build/cmake/toolchain/darwin-x86_64 --strip-components=1
|
tar xJf /MacOSX11.0.sdk.tar.xz -C /build/cmake/toolchain/darwin-x86_64 --strip-components=1
|
||||||
ln -sf darwin-x86_64 /build/cmake/toolchain/darwin-aarch64
|
ln -sf darwin-x86_64 /build/cmake/toolchain/darwin-aarch64
|
||||||
|
|
||||||
|
if [ "$EXPORT_SOURCES_WITH_SUBMODULES" = "1" ]; then
|
||||||
|
cd /build
|
||||||
|
tar --exclude-vcs-ignores --exclude-vcs --exclude build --exclude build_docker --exclude debian --exclude .git --exclude .github --exclude .cache --exclude docs --exclude tests/integration -c . | pigz -9 > /output/source_sub.tar.gz
|
||||||
|
fi
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Uncomment to debug ccache. Don't put ccache log in /output right away, or it
|
# Uncomment to debug ccache. Don't put ccache log in /output right away, or it
|
||||||
@ -26,9 +31,6 @@ fi
|
|||||||
mkdir -p /build/build_docker
|
mkdir -p /build/build_docker
|
||||||
cd /build/build_docker
|
cd /build/build_docker
|
||||||
rm -f CMakeCache.txt
|
rm -f CMakeCache.txt
|
||||||
# Read cmake arguments into array (possibly empty)
|
|
||||||
read -ra CMAKE_FLAGS <<< "${CMAKE_FLAGS:-}"
|
|
||||||
env
|
|
||||||
|
|
||||||
if [ -n "$MAKE_DEB" ]; then
|
if [ -n "$MAKE_DEB" ]; then
|
||||||
rm -rf /build/packages/root
|
rm -rf /build/packages/root
|
||||||
@ -55,11 +57,36 @@ ccache_status
|
|||||||
# clear cache stats
|
# clear cache stats
|
||||||
ccache --zero-stats ||:
|
ccache --zero-stats ||:
|
||||||
|
|
||||||
|
function check_prebuild_exists() {
|
||||||
|
local path="$1"
|
||||||
|
[ -d "$path" ] && [ "$(ls -A "$path")" ]
|
||||||
|
}
|
||||||
|
|
||||||
|
# Check whether the directory with pre-build scripts exists and not empty.
|
||||||
|
if check_prebuild_exists /build/packages/pre-build
|
||||||
|
then
|
||||||
|
# Execute all commands
|
||||||
|
for file in /build/packages/pre-build/*.sh ;
|
||||||
|
do
|
||||||
|
# The script may want to modify environment variables. Why not to allow it to do so?
|
||||||
|
# shellcheck disable=SC1090
|
||||||
|
source "$file"
|
||||||
|
done
|
||||||
|
else
|
||||||
|
echo "There are no subcommands to execute :)"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Read cmake arguments into array (possibly empty)
|
||||||
|
# The name of local variable has to be different from the name of environment variable
|
||||||
|
# not to override it. And make it usable for other processes.
|
||||||
|
read -ra CMAKE_FLAGS_ARRAY <<< "${CMAKE_FLAGS:-}"
|
||||||
|
env
|
||||||
|
|
||||||
if [ "$BUILD_MUSL_KEEPER" == "1" ]
|
if [ "$BUILD_MUSL_KEEPER" == "1" ]
|
||||||
then
|
then
|
||||||
# build keeper with musl separately
|
# build keeper with musl separately
|
||||||
# and without rust bindings
|
# and without rust bindings
|
||||||
cmake --debug-trycompile -DENABLE_RUST=OFF -DBUILD_STANDALONE_KEEPER=1 -DENABLE_CLICKHOUSE_KEEPER=1 -DCMAKE_VERBOSE_MAKEFILE=1 -DUSE_MUSL=1 -LA -DCMAKE_TOOLCHAIN_FILE=/build/cmake/linux/toolchain-x86_64-musl.cmake "-DCMAKE_BUILD_TYPE=$BUILD_TYPE" "-DSANITIZE=$SANITIZER" -DENABLE_CHECK_HEAVY_BUILDS=1 "${CMAKE_FLAGS[@]}" ..
|
cmake --debug-trycompile -DENABLE_RUST=OFF -DBUILD_STANDALONE_KEEPER=1 -DENABLE_CLICKHOUSE_KEEPER=1 -DCMAKE_VERBOSE_MAKEFILE=1 -DUSE_MUSL=1 -LA -DCMAKE_TOOLCHAIN_FILE=/build/cmake/linux/toolchain-x86_64-musl.cmake "-DCMAKE_BUILD_TYPE=$BUILD_TYPE" "-DSANITIZE=$SANITIZER" -DENABLE_CHECK_HEAVY_BUILDS=1 "${CMAKE_FLAGS_ARRAY[@]}" ..
|
||||||
# shellcheck disable=SC2086 # No quotes because I want it to expand to nothing if empty.
|
# shellcheck disable=SC2086 # No quotes because I want it to expand to nothing if empty.
|
||||||
ninja $NINJA_FLAGS clickhouse-keeper
|
ninja $NINJA_FLAGS clickhouse-keeper
|
||||||
|
|
||||||
@ -73,13 +100,13 @@ then
|
|||||||
fi
|
fi
|
||||||
rm -f CMakeCache.txt
|
rm -f CMakeCache.txt
|
||||||
|
|
||||||
# Build the rest of binaries
|
# Modify CMake flags, so we won't overwrite standalone keeper with symlinks
|
||||||
cmake --debug-trycompile -DBUILD_STANDALONE_KEEPER=0 -DCREATE_KEEPER_SYMLINK=0 -DCMAKE_VERBOSE_MAKEFILE=1 -LA "-DCMAKE_BUILD_TYPE=$BUILD_TYPE" "-DSANITIZE=$SANITIZER" -DENABLE_CHECK_HEAVY_BUILDS=1 "${CMAKE_FLAGS[@]}" ..
|
CMAKE_FLAGS_ARRAY+=(-DBUILD_STANDALONE_KEEPER=0 -DCREATE_KEEPER_SYMLINK=0)
|
||||||
else
|
|
||||||
# Build everything
|
|
||||||
cmake --debug-trycompile -DCMAKE_VERBOSE_MAKEFILE=1 -LA "-DCMAKE_BUILD_TYPE=$BUILD_TYPE" "-DSANITIZE=$SANITIZER" -DENABLE_CHECK_HEAVY_BUILDS=1 "${CMAKE_FLAGS[@]}" ..
|
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
# Build everything
|
||||||
|
cmake --debug-trycompile -DCMAKE_VERBOSE_MAKEFILE=1 -LA "-DCMAKE_BUILD_TYPE=$BUILD_TYPE" "-DSANITIZE=$SANITIZER" -DENABLE_CHECK_HEAVY_BUILDS=1 "${CMAKE_FLAGS_ARRAY[@]}" ..
|
||||||
|
|
||||||
# No quotes because I want it to expand to nothing if empty.
|
# No quotes because I want it to expand to nothing if empty.
|
||||||
# shellcheck disable=SC2086 # No quotes because I want it to expand to nothing if empty.
|
# shellcheck disable=SC2086 # No quotes because I want it to expand to nothing if empty.
|
||||||
ninja $NINJA_FLAGS $BUILD_TARGET
|
ninja $NINJA_FLAGS $BUILD_TARGET
|
||||||
|
@ -105,7 +105,7 @@ def run_docker_image_with_env(
|
|||||||
ccache_mount = ""
|
ccache_mount = ""
|
||||||
|
|
||||||
cmd = (
|
cmd = (
|
||||||
f"docker run --network=host --user={user} --rm {ccache_mount}"
|
f"docker run --network=host --user={user} --rm {ccache_mount} "
|
||||||
f"--volume={output_dir}:/output --volume={ch_root}:/build {env_part} "
|
f"--volume={output_dir}:/output --volume={ch_root}:/build {env_part} "
|
||||||
f"--volume={cargo_cache_dir}:/rust/cargo/registry {interactive} {image_name}"
|
f"--volume={cargo_cache_dir}:/rust/cargo/registry {interactive} {image_name}"
|
||||||
)
|
)
|
||||||
@ -179,6 +179,7 @@ def parse_env_variables(
|
|||||||
"-DCMAKE_TOOLCHAIN_FILE=/build/cmake/darwin/toolchain-x86_64.cmake"
|
"-DCMAKE_TOOLCHAIN_FILE=/build/cmake/darwin/toolchain-x86_64.cmake"
|
||||||
)
|
)
|
||||||
result.append("EXTRACT_TOOLCHAIN_DARWIN=1")
|
result.append("EXTRACT_TOOLCHAIN_DARWIN=1")
|
||||||
|
result.append("EXPORT_SOURCES_WITH_SUBMODULES=1")
|
||||||
elif is_cross_darwin_arm:
|
elif is_cross_darwin_arm:
|
||||||
cc = compiler[: -len(DARWIN_ARM_SUFFIX)]
|
cc = compiler[: -len(DARWIN_ARM_SUFFIX)]
|
||||||
cmake_flags.append("-DCMAKE_AR:FILEPATH=/cctools/bin/aarch64-apple-darwin-ar")
|
cmake_flags.append("-DCMAKE_AR:FILEPATH=/cctools/bin/aarch64-apple-darwin-ar")
|
||||||
@ -393,18 +394,18 @@ def parse_args() -> argparse.Namespace:
|
|||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--compiler",
|
"--compiler",
|
||||||
choices=(
|
choices=(
|
||||||
"clang-16",
|
"clang-17",
|
||||||
"clang-16-darwin",
|
"clang-17-darwin",
|
||||||
"clang-16-darwin-aarch64",
|
"clang-17-darwin-aarch64",
|
||||||
"clang-16-aarch64",
|
"clang-17-aarch64",
|
||||||
"clang-16-aarch64-v80compat",
|
"clang-17-aarch64-v80compat",
|
||||||
"clang-16-ppc64le",
|
"clang-17-ppc64le",
|
||||||
"clang-16-riscv64",
|
"clang-17-riscv64",
|
||||||
"clang-16-s390x",
|
"clang-17-s390x",
|
||||||
"clang-16-amd64-compat",
|
"clang-17-amd64-compat",
|
||||||
"clang-16-freebsd",
|
"clang-17-freebsd",
|
||||||
),
|
),
|
||||||
default="clang-16",
|
default="clang-17",
|
||||||
help="a compiler to use",
|
help="a compiler to use",
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
|
@ -33,7 +33,7 @@ RUN arch=${TARGETARCH:-amd64} \
|
|||||||
# lts / testing / prestable / etc
|
# lts / testing / prestable / etc
|
||||||
ARG REPO_CHANNEL="stable"
|
ARG REPO_CHANNEL="stable"
|
||||||
ARG REPOSITORY="https://packages.clickhouse.com/tgz/${REPO_CHANNEL}"
|
ARG REPOSITORY="https://packages.clickhouse.com/tgz/${REPO_CHANNEL}"
|
||||||
ARG VERSION="23.8.2.7"
|
ARG VERSION="23.9.1.1854"
|
||||||
ARG PACKAGES="clickhouse-client clickhouse-server clickhouse-common-static"
|
ARG PACKAGES="clickhouse-client clickhouse-server clickhouse-common-static"
|
||||||
|
|
||||||
# user/group precreated explicitly with fixed uid/gid on purpose.
|
# user/group precreated explicitly with fixed uid/gid on purpose.
|
||||||
|
@ -23,7 +23,7 @@ RUN sed -i "s|http://archive.ubuntu.com|${apt_archive}|g" /etc/apt/sources.list
|
|||||||
|
|
||||||
ARG REPO_CHANNEL="stable"
|
ARG REPO_CHANNEL="stable"
|
||||||
ARG REPOSITORY="deb [signed-by=/usr/share/keyrings/clickhouse-keyring.gpg] https://packages.clickhouse.com/deb ${REPO_CHANNEL} main"
|
ARG REPOSITORY="deb [signed-by=/usr/share/keyrings/clickhouse-keyring.gpg] https://packages.clickhouse.com/deb ${REPO_CHANNEL} main"
|
||||||
ARG VERSION="23.8.2.7"
|
ARG VERSION="23.9.1.1854"
|
||||||
ARG PACKAGES="clickhouse-client clickhouse-server clickhouse-common-static"
|
ARG PACKAGES="clickhouse-client clickhouse-server clickhouse-common-static"
|
||||||
|
|
||||||
# set non-empty deb_location_url url to create a docker image
|
# set non-empty deb_location_url url to create a docker image
|
||||||
|
@ -1,30 +0,0 @@
|
|||||||
# rebuild in #33610
|
|
||||||
# docker build --network=host -t clickhouse/codebrowser .
|
|
||||||
# docker run --volume=path_to_repo:/repo_folder --volume=path_to_result:/test_output clickhouse/codebrowser
|
|
||||||
ARG FROM_TAG=latest
|
|
||||||
FROM clickhouse/binary-builder:$FROM_TAG
|
|
||||||
|
|
||||||
# ARG for quick switch to a given ubuntu mirror
|
|
||||||
ARG apt_archive="http://archive.ubuntu.com"
|
|
||||||
RUN sed -i "s|http://archive.ubuntu.com|$apt_archive|g" /etc/apt/sources.list
|
|
||||||
|
|
||||||
RUN apt-get update && apt-get --yes --allow-unauthenticated install libclang-${LLVM_VERSION}-dev libmlir-${LLVM_VERSION}-dev
|
|
||||||
|
|
||||||
ARG TARGETARCH
|
|
||||||
RUN arch=${TARGETARCH:-amd64} \
|
|
||||||
&& case $arch in \
|
|
||||||
amd64) rarch=x86_64 ;; \
|
|
||||||
arm64) rarch=aarch64 ;; \
|
|
||||||
*) exit 1 ;; \
|
|
||||||
esac
|
|
||||||
|
|
||||||
# repo versions doesn't work correctly with C++17
|
|
||||||
# also we push reports to s3, so we add index.html to subfolder urls
|
|
||||||
# https://github.com/ClickHouse/woboq_codebrowser/commit/37e15eaf377b920acb0b48dbe82471be9203f76b
|
|
||||||
RUN git clone --branch=master --depth=1 https://github.com/ClickHouse/woboq_codebrowser /woboq_codebrowser \
|
|
||||||
&& cd /woboq_codebrowser \
|
|
||||||
&& cmake . -G Ninja -DCMAKE_BUILD_TYPE=Release -DCMAKE_CXX_COMPILER=clang\+\+-${LLVM_VERSION} -DCMAKE_C_COMPILER=clang-${LLVM_VERSION} -DCLANG_BUILTIN_HEADERS_DIR=/usr/lib/llvm-${LLVM_VERSION}/lib/clang/${LLVM_VERSION}/include \
|
|
||||||
&& ninja
|
|
||||||
|
|
||||||
COPY build.sh /
|
|
||||||
CMD ["bash", "-c", "/build.sh 2>&1"]
|
|
@ -1,29 +0,0 @@
|
|||||||
#!/usr/bin/env bash
|
|
||||||
|
|
||||||
set -x -e
|
|
||||||
|
|
||||||
|
|
||||||
STATIC_DATA=${STATIC_DATA:-/woboq_codebrowser/data}
|
|
||||||
SOURCE_DIRECTORY=${SOURCE_DIRECTORY:-/build}
|
|
||||||
BUILD_DIRECTORY=${BUILD_DIRECTORY:-/workdir/build}
|
|
||||||
OUTPUT_DIRECTORY=${OUTPUT_DIRECTORY:-/workdir/output}
|
|
||||||
HTML_RESULT_DIRECTORY=${HTML_RESULT_DIRECTORY:-$OUTPUT_DIRECTORY/html_report}
|
|
||||||
SHA=${SHA:-nosha}
|
|
||||||
DATA=${DATA:-https://s3.amazonaws.com/clickhouse-test-reports/codebrowser/data}
|
|
||||||
nproc=$(($(nproc) + 2)) # increase parallelism
|
|
||||||
|
|
||||||
read -ra CMAKE_FLAGS <<< "${CMAKE_FLAGS:-}"
|
|
||||||
|
|
||||||
mkdir -p "$BUILD_DIRECTORY" && cd "$BUILD_DIRECTORY"
|
|
||||||
cmake "$SOURCE_DIRECTORY" -DCMAKE_CXX_COMPILER="/usr/bin/clang++-${LLVM_VERSION}" -DCMAKE_C_COMPILER="/usr/bin/clang-${LLVM_VERSION}" -DENABLE_WOBOQ_CODEBROWSER=ON "${CMAKE_FLAGS[@]}"
|
|
||||||
mkdir -p "$HTML_RESULT_DIRECTORY"
|
|
||||||
echo 'Filter out too noisy "Error: filename" lines and keep them in full codebrowser_generator.log'
|
|
||||||
/woboq_codebrowser/generator/codebrowser_generator -b "$BUILD_DIRECTORY" -a \
|
|
||||||
-o "$HTML_RESULT_DIRECTORY" --execute-concurrency="$nproc" -p "ClickHouse:$SOURCE_DIRECTORY:$SHA" \
|
|
||||||
-d "$DATA" \
|
|
||||||
|& ts '%Y-%m-%d %H:%M:%S' \
|
|
||||||
| tee "$OUTPUT_DIRECTORY/codebrowser_generator.log" \
|
|
||||||
| grep --line-buffered -v ':[0-9]* Error: '
|
|
||||||
cp -r "$STATIC_DATA" "$HTML_RESULT_DIRECTORY/"
|
|
||||||
/woboq_codebrowser/indexgenerator/codebrowser_indexgenerator "$HTML_RESULT_DIRECTORY" \
|
|
||||||
-d "$DATA" |& ts '%Y-%m-%d %H:%M:%S'
|
|
@ -31,7 +31,11 @@ RUN mkdir -p /tmp/clickhouse-odbc-tmp \
|
|||||||
&& cp /tmp/clickhouse-odbc-tmp/lib64/*.so /usr/local/lib/ \
|
&& cp /tmp/clickhouse-odbc-tmp/lib64/*.so /usr/local/lib/ \
|
||||||
&& odbcinst -i -d -f /tmp/clickhouse-odbc-tmp/share/doc/clickhouse-odbc/config/odbcinst.ini.sample \
|
&& odbcinst -i -d -f /tmp/clickhouse-odbc-tmp/share/doc/clickhouse-odbc/config/odbcinst.ini.sample \
|
||||||
&& odbcinst -i -s -l -f /tmp/clickhouse-odbc-tmp/share/doc/clickhouse-odbc/config/odbc.ini.sample \
|
&& odbcinst -i -s -l -f /tmp/clickhouse-odbc-tmp/share/doc/clickhouse-odbc/config/odbc.ini.sample \
|
||||||
&& rm -rf /tmp/clickhouse-odbc-tmp
|
&& rm -rf /tmp/clickhouse-odbc-tmp \
|
||||||
|
&& mkdir -p /var/lib/clickhouse \
|
||||||
|
&& chmod 777 /var/lib/clickhouse
|
||||||
|
|
||||||
|
# chmod 777 to make the container user independent
|
||||||
|
|
||||||
ENV TZ=Europe/Amsterdam
|
ENV TZ=Europe/Amsterdam
|
||||||
RUN ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && echo $TZ > /etc/timezone
|
RUN ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && echo $TZ > /etc/timezone
|
||||||
|
@ -9,7 +9,7 @@ trap 'kill $(jobs -pr) ||:' EXIT
|
|||||||
stage=${stage:-}
|
stage=${stage:-}
|
||||||
|
|
||||||
# Compiler version, normally set by Dockerfile
|
# Compiler version, normally set by Dockerfile
|
||||||
export LLVM_VERSION=${LLVM_VERSION:-16}
|
export LLVM_VERSION=${LLVM_VERSION:-17}
|
||||||
|
|
||||||
# A variable to pass additional flags to CMake.
|
# A variable to pass additional flags to CMake.
|
||||||
# Here we explicitly default it to nothing so that bash doesn't complain about
|
# Here we explicitly default it to nothing so that bash doesn't complain about
|
||||||
@ -28,6 +28,12 @@ FASTTEST_BUILD=$(readlink -f "${FASTTEST_BUILD:-${BUILD:-$FASTTEST_WORKSPACE/bui
|
|||||||
FASTTEST_DATA=$(readlink -f "${FASTTEST_DATA:-$FASTTEST_WORKSPACE/db-fasttest}")
|
FASTTEST_DATA=$(readlink -f "${FASTTEST_DATA:-$FASTTEST_WORKSPACE/db-fasttest}")
|
||||||
FASTTEST_OUTPUT=$(readlink -f "${FASTTEST_OUTPUT:-$FASTTEST_WORKSPACE}")
|
FASTTEST_OUTPUT=$(readlink -f "${FASTTEST_OUTPUT:-$FASTTEST_WORKSPACE}")
|
||||||
PATH="$FASTTEST_BUILD/programs:$FASTTEST_SOURCE/tests:$PATH"
|
PATH="$FASTTEST_BUILD/programs:$FASTTEST_SOURCE/tests:$PATH"
|
||||||
|
# Work around for non-existent user
|
||||||
|
if [ "$HOME" == "/" ]; then
|
||||||
|
HOME="$FASTTEST_WORKSPACE/user-home"
|
||||||
|
mkdir -p "$HOME"
|
||||||
|
export HOME
|
||||||
|
fi
|
||||||
|
|
||||||
# Export these variables, so that all subsequent invocations of the script
|
# Export these variables, so that all subsequent invocations of the script
|
||||||
# use them, and not try to guess them anew, which leads to weird effects.
|
# use them, and not try to guess them anew, which leads to weird effects.
|
||||||
@ -152,7 +158,11 @@ function clone_submodules
|
|||||||
)
|
)
|
||||||
|
|
||||||
git submodule sync
|
git submodule sync
|
||||||
git submodule update --jobs=16 --depth 1 --single-branch --init "${SUBMODULES_TO_UPDATE[@]}"
|
git submodule init
|
||||||
|
# --jobs does not work as fast as real parallel running
|
||||||
|
printf '%s\0' "${SUBMODULES_TO_UPDATE[@]}" | \
|
||||||
|
xargs --max-procs=100 --null --no-run-if-empty --max-args=1 \
|
||||||
|
git submodule update --depth 1 --single-branch
|
||||||
git submodule foreach git reset --hard
|
git submodule foreach git reset --hard
|
||||||
git submodule foreach git checkout @ -f
|
git submodule foreach git checkout @ -f
|
||||||
git submodule foreach git clean -xfd
|
git submodule foreach git clean -xfd
|
||||||
@ -271,34 +281,12 @@ case "$stage" in
|
|||||||
;&
|
;&
|
||||||
"clone_root")
|
"clone_root")
|
||||||
clone_root
|
clone_root
|
||||||
|
|
||||||
# Pass control to the script from cloned sources, unless asked otherwise.
|
|
||||||
if ! [ -v FASTTEST_LOCAL_SCRIPT ]
|
|
||||||
then
|
|
||||||
# 'run' stage is deprecated, used for compatibility with old scripts.
|
|
||||||
# Replace with 'clone_submodules' after Nov 1, 2020.
|
|
||||||
# cd and CLICKHOUSE_DIR are also a setup for old scripts, remove as well.
|
|
||||||
# In modern script we undo it by changing back into workspace dir right
|
|
||||||
# away, see below. Remove that as well.
|
|
||||||
cd "$FASTTEST_SOURCE"
|
|
||||||
CLICKHOUSE_DIR=$(pwd)
|
|
||||||
export CLICKHOUSE_DIR
|
|
||||||
stage=run "$FASTTEST_SOURCE/docker/test/fasttest/run.sh"
|
|
||||||
exit $?
|
|
||||||
fi
|
|
||||||
;&
|
|
||||||
"run")
|
|
||||||
# A deprecated stage that is called by old script and equivalent to everything
|
|
||||||
# after cloning root, starting with cloning submodules.
|
|
||||||
;&
|
;&
|
||||||
"clone_submodules")
|
"clone_submodules")
|
||||||
# Recover after being called from the old script that changes into source directory.
|
|
||||||
# See the compatibility hacks in `clone_root` stage above. Remove at the same time,
|
|
||||||
# after Nov 1, 2020.
|
|
||||||
cd "$FASTTEST_WORKSPACE"
|
|
||||||
clone_submodules 2>&1 | ts '%Y-%m-%d %H:%M:%S' | tee "$FASTTEST_OUTPUT/submodule_log.txt"
|
clone_submodules 2>&1 | ts '%Y-%m-%d %H:%M:%S' | tee "$FASTTEST_OUTPUT/submodule_log.txt"
|
||||||
;&
|
;&
|
||||||
"run_cmake")
|
"run_cmake")
|
||||||
|
cd "$FASTTEST_WORKSPACE"
|
||||||
run_cmake
|
run_cmake
|
||||||
;&
|
;&
|
||||||
"build")
|
"build")
|
||||||
|
@ -17,7 +17,7 @@ stage=${stage:-}
|
|||||||
script_dir="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
|
script_dir="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
|
||||||
echo "$script_dir"
|
echo "$script_dir"
|
||||||
repo_dir=ch
|
repo_dir=ch
|
||||||
BINARY_TO_DOWNLOAD=${BINARY_TO_DOWNLOAD:="clang-16_debug_none_unsplitted_disable_False_binary"}
|
BINARY_TO_DOWNLOAD=${BINARY_TO_DOWNLOAD:="clang-17_debug_none_unsplitted_disable_False_binary"}
|
||||||
BINARY_URL_TO_DOWNLOAD=${BINARY_URL_TO_DOWNLOAD:="https://clickhouse-builds.s3.amazonaws.com/$PR_TO_TEST/$SHA_TO_TEST/clickhouse_build_check/$BINARY_TO_DOWNLOAD/clickhouse"}
|
BINARY_URL_TO_DOWNLOAD=${BINARY_URL_TO_DOWNLOAD:="https://clickhouse-builds.s3.amazonaws.com/$PR_TO_TEST/$SHA_TO_TEST/clickhouse_build_check/$BINARY_TO_DOWNLOAD/clickhouse"}
|
||||||
|
|
||||||
function git_clone_with_retry
|
function git_clone_with_retry
|
||||||
|
@ -80,7 +80,6 @@ RUN python3 -m pip install --no-cache-dir \
|
|||||||
kafka-python \
|
kafka-python \
|
||||||
kazoo \
|
kazoo \
|
||||||
lz4 \
|
lz4 \
|
||||||
meilisearch==0.18.3 \
|
|
||||||
minio \
|
minio \
|
||||||
nats-py \
|
nats-py \
|
||||||
protobuf \
|
protobuf \
|
||||||
|
@ -0,0 +1,16 @@
|
|||||||
|
version: '2.3'
|
||||||
|
services:
|
||||||
|
openldap:
|
||||||
|
image: bitnami/openldap:2.6.6
|
||||||
|
restart: always
|
||||||
|
environment:
|
||||||
|
LDAP_ROOT: dc=example,dc=org
|
||||||
|
LDAP_ADMIN_DN: cn=admin,dc=example,dc=org
|
||||||
|
LDAP_ADMIN_USERNAME: admin
|
||||||
|
LDAP_ADMIN_PASSWORD: clickhouse
|
||||||
|
LDAP_USER_DC: users
|
||||||
|
LDAP_USERS: janedoe,johndoe
|
||||||
|
LDAP_PASSWORDS: qwerty,qwertz
|
||||||
|
LDAP_PORT_NUMBER: ${LDAP_INTERNAL_PORT:-1389}
|
||||||
|
ports:
|
||||||
|
- ${LDAP_EXTERNAL_PORT:-1389}:${LDAP_INTERNAL_PORT:-1389}
|
@ -1,15 +0,0 @@
|
|||||||
version: '2.3'
|
|
||||||
services:
|
|
||||||
meili1:
|
|
||||||
image: getmeili/meilisearch:v0.27.0
|
|
||||||
restart: always
|
|
||||||
ports:
|
|
||||||
- ${MEILI_EXTERNAL_PORT:-7700}:${MEILI_INTERNAL_PORT:-7700}
|
|
||||||
|
|
||||||
meili_secure:
|
|
||||||
image: getmeili/meilisearch:v0.27.0
|
|
||||||
restart: always
|
|
||||||
ports:
|
|
||||||
- ${MEILI_SECURE_EXTERNAL_PORT:-7700}:${MEILI_SECURE_INTERNAL_PORT:-7700}
|
|
||||||
environment:
|
|
||||||
MEILI_MASTER_KEY: "password"
|
|
@ -2,9 +2,7 @@ version: '2.3'
|
|||||||
|
|
||||||
services:
|
services:
|
||||||
minio1:
|
minio1:
|
||||||
# Newer version of minio results in such errors:
|
image: minio/minio:RELEASE.2023-09-30T07-02-29Z
|
||||||
# "AWSErrorMarshaller: Encountered AWSError 'InternalError': We encountered an internal error, please try again"
|
|
||||||
image: minio/minio:RELEASE.2021-09-23T04-46-24Z
|
|
||||||
volumes:
|
volumes:
|
||||||
- data1-1:/data1
|
- data1-1:/data1
|
||||||
- ${MINIO_CERTS_DIR:-}:/certs
|
- ${MINIO_CERTS_DIR:-}:/certs
|
||||||
|
@ -2,7 +2,7 @@
|
|||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
|
|
||||||
|
|
||||||
CLICKHOUSE_PACKAGE=${CLICKHOUSE_PACKAGE:="https://clickhouse-builds.s3.amazonaws.com/$PR_TO_TEST/$SHA_TO_TEST/clickhouse_build_check/clang-16_relwithdebuginfo_none_unsplitted_disable_False_binary/clickhouse"}
|
CLICKHOUSE_PACKAGE=${CLICKHOUSE_PACKAGE:="https://clickhouse-builds.s3.amazonaws.com/$PR_TO_TEST/$SHA_TO_TEST/clickhouse_build_check/clang-17_relwithdebuginfo_none_unsplitted_disable_False_binary/clickhouse"}
|
||||||
CLICKHOUSE_REPO_PATH=${CLICKHOUSE_REPO_PATH:=""}
|
CLICKHOUSE_REPO_PATH=${CLICKHOUSE_REPO_PATH:=""}
|
||||||
|
|
||||||
|
|
||||||
|
@ -2,7 +2,7 @@
|
|||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
|
|
||||||
|
|
||||||
CLICKHOUSE_PACKAGE=${CLICKHOUSE_PACKAGE:="https://clickhouse-builds.s3.amazonaws.com/$PR_TO_TEST/$SHA_TO_TEST/clickhouse_build_check/clang-16_relwithdebuginfo_none_unsplitted_disable_False_binary/clickhouse"}
|
CLICKHOUSE_PACKAGE=${CLICKHOUSE_PACKAGE:="https://clickhouse-builds.s3.amazonaws.com/$PR_TO_TEST/$SHA_TO_TEST/clickhouse_build_check/clang-17_relwithdebuginfo_none_unsplitted_disable_False_binary/clickhouse"}
|
||||||
CLICKHOUSE_REPO_PATH=${CLICKHOUSE_REPO_PATH:=""}
|
CLICKHOUSE_REPO_PATH=${CLICKHOUSE_REPO_PATH:=""}
|
||||||
|
|
||||||
|
|
||||||
|
@ -6,7 +6,7 @@ set -e
|
|||||||
set -u
|
set -u
|
||||||
set -o pipefail
|
set -o pipefail
|
||||||
|
|
||||||
BINARY_TO_DOWNLOAD=${BINARY_TO_DOWNLOAD:="clang-16_debug_none_unsplitted_disable_False_binary"}
|
BINARY_TO_DOWNLOAD=${BINARY_TO_DOWNLOAD:="clang-17_debug_none_unsplitted_disable_False_binary"}
|
||||||
BINARY_URL_TO_DOWNLOAD=${BINARY_URL_TO_DOWNLOAD:="https://clickhouse-builds.s3.amazonaws.com/$PR_TO_TEST/$SHA_TO_TEST/clickhouse_build_check/$BINARY_TO_DOWNLOAD/clickhouse"}
|
BINARY_URL_TO_DOWNLOAD=${BINARY_URL_TO_DOWNLOAD:="https://clickhouse-builds.s3.amazonaws.com/$PR_TO_TEST/$SHA_TO_TEST/clickhouse_build_check/$BINARY_TO_DOWNLOAD/clickhouse"}
|
||||||
|
|
||||||
function wget_with_retry
|
function wget_with_retry
|
||||||
|
@ -69,6 +69,16 @@ else
|
|||||||
fi
|
fi
|
||||||
|
|
||||||
if [[ -n "$USE_DATABASE_REPLICATED" ]] && [[ "$USE_DATABASE_REPLICATED" -eq 1 ]]; then
|
if [[ -n "$USE_DATABASE_REPLICATED" ]] && [[ "$USE_DATABASE_REPLICATED" -eq 1 ]]; then
|
||||||
|
sudo cat /etc/clickhouse-server1/config.d/filesystem_caches_path.xml \
|
||||||
|
| sed "s|<filesystem_caches_path>/var/lib/clickhouse/filesystem_caches/</filesystem_caches_path>|<filesystem_caches_path>/var/lib/clickhouse/filesystem_caches_1/</filesystem_caches_path>|" \
|
||||||
|
> /etc/clickhouse-server1/config.d/filesystem_caches_path.xml.tmp
|
||||||
|
mv /etc/clickhouse-server1/config.d/filesystem_caches_path.xml.tmp /etc/clickhouse-server1/config.d/filesystem_caches_path.xml
|
||||||
|
|
||||||
|
sudo cat /etc/clickhouse-server2/config.d/filesystem_caches_path.xml \
|
||||||
|
| sed "s|<filesystem_caches_path>/var/lib/clickhouse/filesystem_caches/</filesystem_caches_path>|<filesystem_caches_path>/var/lib/clickhouse/filesystem_caches_2/</filesystem_caches_path>|" \
|
||||||
|
> /etc/clickhouse-server2/config.d/filesystem_caches_path.xml.tmp
|
||||||
|
mv /etc/clickhouse-server2/config.d/filesystem_caches_path.xml.tmp /etc/clickhouse-server2/config.d/filesystem_caches_path.xml
|
||||||
|
|
||||||
mkdir -p /var/run/clickhouse-server1
|
mkdir -p /var/run/clickhouse-server1
|
||||||
sudo chown clickhouse:clickhouse /var/run/clickhouse-server1
|
sudo chown clickhouse:clickhouse /var/run/clickhouse-server1
|
||||||
sudo -E -u clickhouse /usr/bin/clickhouse server --config /etc/clickhouse-server1/config.xml --daemon \
|
sudo -E -u clickhouse /usr/bin/clickhouse server --config /etc/clickhouse-server1/config.xml --daemon \
|
||||||
|
@ -52,6 +52,21 @@ function configure()
|
|||||||
| sed "s|<snapshot_distance>100000</snapshot_distance>|<snapshot_distance>10000</snapshot_distance>|" \
|
| sed "s|<snapshot_distance>100000</snapshot_distance>|<snapshot_distance>10000</snapshot_distance>|" \
|
||||||
> /etc/clickhouse-server/config.d/keeper_port.xml.tmp
|
> /etc/clickhouse-server/config.d/keeper_port.xml.tmp
|
||||||
sudo mv /etc/clickhouse-server/config.d/keeper_port.xml.tmp /etc/clickhouse-server/config.d/keeper_port.xml
|
sudo mv /etc/clickhouse-server/config.d/keeper_port.xml.tmp /etc/clickhouse-server/config.d/keeper_port.xml
|
||||||
|
|
||||||
|
function randomize_config_boolean_value {
|
||||||
|
value=$(($RANDOM % 2))
|
||||||
|
sudo cat /etc/clickhouse-server/config.d/keeper_port.xml \
|
||||||
|
| sed "s|<$1>[01]</$1>|<$1>$value</$1>|" \
|
||||||
|
> /etc/clickhouse-server/config.d/keeper_port.xml.tmp
|
||||||
|
sudo mv /etc/clickhouse-server/config.d/keeper_port.xml.tmp /etc/clickhouse-server/config.d/keeper_port.xml
|
||||||
|
}
|
||||||
|
|
||||||
|
# Randomize all Keeper feature flags
|
||||||
|
randomize_config_boolean_value filtered_list
|
||||||
|
randomize_config_boolean_value multi_read
|
||||||
|
randomize_config_boolean_value check_not_exists
|
||||||
|
randomize_config_boolean_value create_if_not_exists
|
||||||
|
|
||||||
sudo chown clickhouse /etc/clickhouse-server/config.d/keeper_port.xml
|
sudo chown clickhouse /etc/clickhouse-server/config.d/keeper_port.xml
|
||||||
sudo chgrp clickhouse /etc/clickhouse-server/config.d/keeper_port.xml
|
sudo chgrp clickhouse /etc/clickhouse-server/config.d/keeper_port.xml
|
||||||
|
|
||||||
|
@ -6,5 +6,4 @@ FROM clickhouse/stateless-test:$FROM_TAG
|
|||||||
RUN apt-get install gdb
|
RUN apt-get install gdb
|
||||||
|
|
||||||
COPY run.sh /
|
COPY run.sh /
|
||||||
COPY process_unit_tests_result.py /
|
|
||||||
CMD ["/bin/bash", "/run.sh"]
|
CMD ["/bin/bash", "/run.sh"]
|
||||||
|
@ -1,102 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
|
|
||||||
import os
|
|
||||||
import logging
|
|
||||||
import argparse
|
|
||||||
import csv
|
|
||||||
|
|
||||||
OK_SIGN = "OK ]"
|
|
||||||
FAILED_SIGN = "FAILED ]"
|
|
||||||
SEGFAULT = "Segmentation fault"
|
|
||||||
SIGNAL = "received signal SIG"
|
|
||||||
PASSED = "PASSED"
|
|
||||||
|
|
||||||
|
|
||||||
def get_test_name(line):
|
|
||||||
elements = reversed(line.split(" "))
|
|
||||||
for element in elements:
|
|
||||||
if "(" not in element and ")" not in element:
|
|
||||||
return element
|
|
||||||
raise Exception("No test name in line '{}'".format(line))
|
|
||||||
|
|
||||||
|
|
||||||
def process_result(result_folder):
|
|
||||||
summary = []
|
|
||||||
total_counter = 0
|
|
||||||
failed_counter = 0
|
|
||||||
result_log_path = "{}/test_result.txt".format(result_folder)
|
|
||||||
if not os.path.exists(result_log_path):
|
|
||||||
logging.info("No output log on path %s", result_log_path)
|
|
||||||
return "exception", "No output log", []
|
|
||||||
|
|
||||||
status = "success"
|
|
||||||
description = ""
|
|
||||||
passed = False
|
|
||||||
with open(result_log_path, "r") as test_result:
|
|
||||||
for line in test_result:
|
|
||||||
if OK_SIGN in line:
|
|
||||||
logging.info("Found ok line: '%s'", line)
|
|
||||||
test_name = get_test_name(line.strip())
|
|
||||||
logging.info("Test name: '%s'", test_name)
|
|
||||||
summary.append((test_name, "OK"))
|
|
||||||
total_counter += 1
|
|
||||||
elif FAILED_SIGN in line and "listed below" not in line and "ms)" in line:
|
|
||||||
logging.info("Found fail line: '%s'", line)
|
|
||||||
test_name = get_test_name(line.strip())
|
|
||||||
logging.info("Test name: '%s'", test_name)
|
|
||||||
summary.append((test_name, "FAIL"))
|
|
||||||
total_counter += 1
|
|
||||||
failed_counter += 1
|
|
||||||
elif SEGFAULT in line:
|
|
||||||
logging.info("Found segfault line: '%s'", line)
|
|
||||||
status = "failure"
|
|
||||||
description += "Segmentation fault. "
|
|
||||||
break
|
|
||||||
elif SIGNAL in line:
|
|
||||||
logging.info("Received signal line: '%s'", line)
|
|
||||||
status = "failure"
|
|
||||||
description += "Exit on signal. "
|
|
||||||
break
|
|
||||||
elif PASSED in line:
|
|
||||||
logging.info("PASSED record found: '%s'", line)
|
|
||||||
passed = True
|
|
||||||
|
|
||||||
if not passed:
|
|
||||||
status = "failure"
|
|
||||||
description += "PASSED record not found. "
|
|
||||||
|
|
||||||
if failed_counter != 0:
|
|
||||||
status = "failure"
|
|
||||||
|
|
||||||
if not description:
|
|
||||||
description += "fail: {}, passed: {}".format(
|
|
||||||
failed_counter, total_counter - failed_counter
|
|
||||||
)
|
|
||||||
|
|
||||||
return status, description, summary
|
|
||||||
|
|
||||||
|
|
||||||
def write_results(results_file, status_file, results, status):
|
|
||||||
with open(results_file, "w") as f:
|
|
||||||
out = csv.writer(f, delimiter="\t")
|
|
||||||
out.writerows(results)
|
|
||||||
with open(status_file, "w") as f:
|
|
||||||
out = csv.writer(f, delimiter="\t")
|
|
||||||
out.writerow(status)
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
logging.basicConfig(level=logging.INFO, format="%(asctime)s %(message)s")
|
|
||||||
parser = argparse.ArgumentParser(
|
|
||||||
description="ClickHouse script for parsing results of unit tests"
|
|
||||||
)
|
|
||||||
parser.add_argument("--in-results-dir", default="/test_output/")
|
|
||||||
parser.add_argument("--out-results-file", default="/test_output/test_results.tsv")
|
|
||||||
parser.add_argument("--out-status-file", default="/test_output/check_status.tsv")
|
|
||||||
args = parser.parse_args()
|
|
||||||
|
|
||||||
state, description, test_results = process_result(args.in_results_dir)
|
|
||||||
logging.info("Result parsed")
|
|
||||||
status = (state, description)
|
|
||||||
write_results(args.out_results_file, args.out_status_file, test_results, status)
|
|
||||||
logging.info("Result written")
|
|
@ -3,5 +3,4 @@
|
|||||||
set -x
|
set -x
|
||||||
|
|
||||||
service zookeeper start && sleep 7 && /usr/share/zookeeper/bin/zkCli.sh -server localhost:2181 -create create /clickhouse_test '';
|
service zookeeper start && sleep 7 && /usr/share/zookeeper/bin/zkCli.sh -server localhost:2181 -create create /clickhouse_test '';
|
||||||
timeout 40m gdb -q -ex 'set print inferior-events off' -ex 'set confirm off' -ex 'set print thread-events off' -ex run -ex bt -ex quit --args ./unit_tests_dbms | tee test_output/test_result.txt
|
timeout 40m gdb -q -ex 'set print inferior-events off' -ex 'set confirm off' -ex 'set print thread-events off' -ex run -ex bt -ex quit --args ./unit_tests_dbms --gtest_output='json:test_output/test_result.json' | tee test_output/test_result.txt
|
||||||
./process_unit_tests_result.py || echo -e "failure\tCannot parse results" > /test_output/check_status.tsv
|
|
||||||
|
@ -60,11 +60,19 @@ install_packages previous_release_package_folder
|
|||||||
# available for dump via clickhouse-local
|
# available for dump via clickhouse-local
|
||||||
configure
|
configure
|
||||||
|
|
||||||
|
function remove_keeper_config()
|
||||||
|
{
|
||||||
|
sudo cat /etc/clickhouse-server/config.d/keeper_port.xml \
|
||||||
|
| sed "/<$1>$2<\/$1>/d" \
|
||||||
|
> /etc/clickhouse-server/config.d/keeper_port.xml.tmp
|
||||||
|
sudo mv /etc/clickhouse-server/config.d/keeper_port.xml.tmp /etc/clickhouse-server/config.d/keeper_port.xml
|
||||||
|
}
|
||||||
|
|
||||||
# async_replication setting doesn't exist on some older versions
|
# async_replication setting doesn't exist on some older versions
|
||||||
sudo cat /etc/clickhouse-server/config.d/keeper_port.xml \
|
remove_keeper_config "async_replication" "1"
|
||||||
| sed "/<async_replication>1<\/async_replication>/d" \
|
|
||||||
> /etc/clickhouse-server/config.d/keeper_port.xml.tmp
|
# create_if_not_exists feature flag doesn't exist on some older versions
|
||||||
sudo mv /etc/clickhouse-server/config.d/keeper_port.xml.tmp /etc/clickhouse-server/config.d/keeper_port.xml
|
remove_keeper_config "create_if_not_exists" "[01]"
|
||||||
|
|
||||||
# it contains some new settings, but we can safely remove it
|
# it contains some new settings, but we can safely remove it
|
||||||
rm /etc/clickhouse-server/config.d/merge_tree.xml
|
rm /etc/clickhouse-server/config.d/merge_tree.xml
|
||||||
@ -89,10 +97,10 @@ sudo cat /etc/clickhouse-server/config.d/keeper_port.xml \
|
|||||||
sudo mv /etc/clickhouse-server/config.d/keeper_port.xml.tmp /etc/clickhouse-server/config.d/keeper_port.xml
|
sudo mv /etc/clickhouse-server/config.d/keeper_port.xml.tmp /etc/clickhouse-server/config.d/keeper_port.xml
|
||||||
|
|
||||||
# async_replication setting doesn't exist on some older versions
|
# async_replication setting doesn't exist on some older versions
|
||||||
sudo cat /etc/clickhouse-server/config.d/keeper_port.xml \
|
remove_keeper_config "async_replication" "1"
|
||||||
| sed "/<async_replication>1<\/async_replication>/d" \
|
|
||||||
> /etc/clickhouse-server/config.d/keeper_port.xml.tmp
|
# create_if_not_exists feature flag doesn't exist on some older versions
|
||||||
sudo mv /etc/clickhouse-server/config.d/keeper_port.xml.tmp /etc/clickhouse-server/config.d/keeper_port.xml
|
remove_keeper_config "create_if_not_exists" "[01]"
|
||||||
|
|
||||||
# But we still need default disk because some tables loaded only into it
|
# But we still need default disk because some tables loaded only into it
|
||||||
sudo cat /etc/clickhouse-server/config.d/s3_storage_policy_by_default.xml \
|
sudo cat /etc/clickhouse-server/config.d/s3_storage_policy_by_default.xml \
|
||||||
|
@ -6,7 +6,7 @@ ARG apt_archive="http://archive.ubuntu.com"
|
|||||||
RUN sed -i "s|http://archive.ubuntu.com|$apt_archive|g" /etc/apt/sources.list
|
RUN sed -i "s|http://archive.ubuntu.com|$apt_archive|g" /etc/apt/sources.list
|
||||||
|
|
||||||
# 15.0.2
|
# 15.0.2
|
||||||
ENV DEBIAN_FRONTEND=noninteractive LLVM_VERSION=16
|
ENV DEBIAN_FRONTEND=noninteractive LLVM_VERSION=17
|
||||||
|
|
||||||
RUN apt-get update \
|
RUN apt-get update \
|
||||||
&& apt-get install \
|
&& apt-get install \
|
||||||
|
43
docs/changelogs/v23.8.3.48-lts.md
Normal file
43
docs/changelogs/v23.8.3.48-lts.md
Normal file
@ -0,0 +1,43 @@
|
|||||||
|
---
|
||||||
|
sidebar_position: 1
|
||||||
|
sidebar_label: 2023
|
||||||
|
---
|
||||||
|
|
||||||
|
# 2023 Changelog
|
||||||
|
|
||||||
|
### ClickHouse release v23.8.3.48-lts (ebe4eb3d23e) FIXME as compared to v23.8.2.7-lts (f73c8f37874)
|
||||||
|
|
||||||
|
#### Improvement
|
||||||
|
* Backported in [#54287](https://github.com/ClickHouse/ClickHouse/issues/54287): Enable allow_remove_stale_moving_parts by default. [#54260](https://github.com/ClickHouse/ClickHouse/pull/54260) ([vdimir](https://github.com/vdimir)).
|
||||||
|
* Backported in [#55057](https://github.com/ClickHouse/ClickHouse/issues/55057): ProfileEvents added ContextLockWaitMicroseconds event. [#55029](https://github.com/ClickHouse/ClickHouse/pull/55029) ([Maksim Kita](https://github.com/kitaisreal)).
|
||||||
|
|
||||||
|
#### Build/Testing/Packaging Improvement
|
||||||
|
* Backported in [#54702](https://github.com/ClickHouse/ClickHouse/issues/54702): Enrich `changed_images.json` with the latest tag from master for images that are not changed in the pull request. [#54369](https://github.com/ClickHouse/ClickHouse/pull/54369) ([János Benjamin Antal](https://github.com/antaljanosbenjamin)).
|
||||||
|
* Backported in [#54743](https://github.com/ClickHouse/ClickHouse/issues/54743): Remove redundant `clickhouse-keeper-client` symlink. [#54587](https://github.com/ClickHouse/ClickHouse/pull/54587) ([Tomas Barton](https://github.com/deric)).
|
||||||
|
* Backported in [#54685](https://github.com/ClickHouse/ClickHouse/issues/54685): We build and upload them for every push, which isn't worth it. [#54675](https://github.com/ClickHouse/ClickHouse/pull/54675) ([Mikhail f. Shiryaev](https://github.com/Felixoid)).
|
||||||
|
|
||||||
|
#### Bug Fix (user-visible misbehavior in an official stable release)
|
||||||
|
|
||||||
|
* Fix: moved to prewhere condition actions can lose column [#53492](https://github.com/ClickHouse/ClickHouse/pull/53492) ([Yakov Olkhovskiy](https://github.com/yakov-olkhovskiy)).
|
||||||
|
* Fix: parallel replicas over distributed with prefer_localhost_replica=1 [#54334](https://github.com/ClickHouse/ClickHouse/pull/54334) ([Igor Nikonov](https://github.com/devcrafter)).
|
||||||
|
* Fix possible error 'URI contains invalid characters' in s3 table function [#54373](https://github.com/ClickHouse/ClickHouse/pull/54373) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||||
|
* Check for overflow before addition in `analysisOfVariance` function [#54385](https://github.com/ClickHouse/ClickHouse/pull/54385) ([Antonio Andelic](https://github.com/antonio2368)).
|
||||||
|
* reproduce and fix the bug in removeSharedRecursive [#54430](https://github.com/ClickHouse/ClickHouse/pull/54430) ([Sema Checherinda](https://github.com/CheSema)).
|
||||||
|
* Fix aggregate projections with normalized states [#54480](https://github.com/ClickHouse/ClickHouse/pull/54480) ([Amos Bird](https://github.com/amosbird)).
|
||||||
|
* Fix possible parsing error in WithNames formats with disabled input_format_with_names_use_header [#54513](https://github.com/ClickHouse/ClickHouse/pull/54513) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||||
|
* Fix zero copy garbage [#54550](https://github.com/ClickHouse/ClickHouse/pull/54550) ([Alexander Tokmakov](https://github.com/tavplubix)).
|
||||||
|
* Fix race in `ColumnUnique` [#54575](https://github.com/ClickHouse/ClickHouse/pull/54575) ([Nikita Taranov](https://github.com/nickitat)).
|
||||||
|
* Fix serialization of `ColumnDecimal` [#54601](https://github.com/ClickHouse/ClickHouse/pull/54601) ([Nikita Taranov](https://github.com/nickitat)).
|
||||||
|
* Fix virtual columns having incorrect values after ORDER BY [#54811](https://github.com/ClickHouse/ClickHouse/pull/54811) ([Michael Kolupaev](https://github.com/al13n321)).
|
||||||
|
* Fix Keeper segfault during shutdown [#54841](https://github.com/ClickHouse/ClickHouse/pull/54841) ([Antonio Andelic](https://github.com/antonio2368)).
|
||||||
|
* Rebuild minmax_count_projection when partition key gets modified [#54943](https://github.com/ClickHouse/ClickHouse/pull/54943) ([Amos Bird](https://github.com/amosbird)).
|
||||||
|
|
||||||
|
#### NOT FOR CHANGELOG / INSIGNIFICANT
|
||||||
|
|
||||||
|
* Test libunwind changes. [#51436](https://github.com/ClickHouse/ClickHouse/pull/51436) ([Nikolai Kochetov](https://github.com/KochetovNicolai)).
|
||||||
|
* Use pathlib.Path in S3Helper, rewrite build reports, improve small things [#54010](https://github.com/ClickHouse/ClickHouse/pull/54010) ([Mikhail f. Shiryaev](https://github.com/Felixoid)).
|
||||||
|
* Properly re-initialize ZooKeeper fault injection [#54251](https://github.com/ClickHouse/ClickHouse/pull/54251) ([Alexander Gololobov](https://github.com/davenger)).
|
||||||
|
* Fix segfault in system.zookeeper [#54326](https://github.com/ClickHouse/ClickHouse/pull/54326) ([Alexander Tokmakov](https://github.com/tavplubix)).
|
||||||
|
* Update automated commit status comment [#54441](https://github.com/ClickHouse/ClickHouse/pull/54441) ([vdimir](https://github.com/vdimir)).
|
||||||
|
* Adjusting `num_streams` by expected work in StorageS3 [#54815](https://github.com/ClickHouse/ClickHouse/pull/54815) ([pufit](https://github.com/pufit)).
|
||||||
|
|
381
docs/changelogs/v23.9.1.1854-stable.md
Normal file
381
docs/changelogs/v23.9.1.1854-stable.md
Normal file
@ -0,0 +1,381 @@
|
|||||||
|
---
|
||||||
|
sidebar_position: 1
|
||||||
|
sidebar_label: 2023
|
||||||
|
---
|
||||||
|
|
||||||
|
# 2023 Changelog
|
||||||
|
|
||||||
|
### ClickHouse release v23.9.1.1854-stable (8f9a227de1f) FIXME as compared to v23.8.1.2992-lts (ebc7d9a9f3b)
|
||||||
|
|
||||||
|
#### Backward Incompatible Change
|
||||||
|
* Remove the `status_info` configuration option and dictionaries status from the default Prometheus handler. [#54090](https://github.com/ClickHouse/ClickHouse/pull/54090) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* The experimental parts metadata cache is removed from the codebase. [#54215](https://github.com/ClickHouse/ClickHouse/pull/54215) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* Disable setting `input_format_json_try_infer_numbers_from_strings` by default, so we don't try to infer numbers from strings in JSON formats by default to avoid possible parsing errors when sample data contains strings that looks like a number. [#55099](https://github.com/ClickHouse/ClickHouse/pull/55099) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||||
|
|
||||||
|
#### New Feature
|
||||||
|
* Added new type of authentication based on SSH keys. It works only for Native TCP protocol. [#41109](https://github.com/ClickHouse/ClickHouse/pull/41109) ([George Gamezardashvili](https://github.com/InfJoker)).
|
||||||
|
* Added IO Scheduling support for remote disks. Storage configuration for disk types `s3`, `s3_plain`, `hdfs` and `azure_blob_storage` can now contain `read_resource` and `write_resource` elements holding resource names. Scheduling policies for these resources can be configured in a separate server configuration section `resources`. Queries can be marked using setting `workload` and classified using server configuration section `workload_classifiers` to achieve diverse resource scheduling goals. More details in docs/en/operations/workload-scheduling.md. [#47009](https://github.com/ClickHouse/ClickHouse/pull/47009) ([Sergei Trifonov](https://github.com/serxa)).
|
||||||
|
* Added a new column _block_number resolves [#44532](https://github.com/ClickHouse/ClickHouse/issues/44532). [#47532](https://github.com/ClickHouse/ClickHouse/pull/47532) ([SmitaRKulkarni](https://github.com/SmitaRKulkarni)).
|
||||||
|
* Add options `partial_result_update_duration_ms` and `max_rows_in_partial_result` to show updates of a partial result of output table in real-time during query execution. [#48607](https://github.com/ClickHouse/ClickHouse/pull/48607) ([Alexey Perevyshin](https://github.com/alexX512)).
|
||||||
|
* Support case-insensitive and dot-all matching modes in RegExpTree dictionaries. [#50906](https://github.com/ClickHouse/ClickHouse/pull/50906) ([Johann Gan](https://github.com/johanngan)).
|
||||||
|
* Add support for `ALTER TABLE MODIFY COMMENT`. Note: something similar was added by an external contributor a long time ago, but the feature did not work at all and only confused users. This closes [#36377](https://github.com/ClickHouse/ClickHouse/issues/36377). [#51304](https://github.com/ClickHouse/ClickHouse/pull/51304) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* Added "GCD" aka. "greatest common denominator" as a new data compression codec. The codec computes the GCD of all column values, and then divides each value by the GCD. The GCD codec is a data preparation codec (similar to Delta and DoubleDelta) and cannot be used stand-alone. It works with data integer, decimal and date/time type. A viable use case for the GCD codec are column values that change (increase/decrease) in multiples of the GCD, e.g. 24 - 28 - 16 - 24 - 8 - 24 (assuming GCD = 4). [#53149](https://github.com/ClickHouse/ClickHouse/pull/53149) ([Alexander Nam](https://github.com/seshWCS)).
|
||||||
|
* Two new type aliases "DECIMAL(P)" (as shortcut for "DECIMAL(P, 0") and "DECIMAL" (as shortcut for "DECIMAL(10, 0)") were added. This makes ClickHouse more compatible with MySQL's SQL dialect. [#53328](https://github.com/ClickHouse/ClickHouse/pull/53328) ([Val Doroshchuk](https://github.com/valbok)).
|
||||||
|
* Added a new system log table `backup_log` to track all `BACKUP` and `RESTORE` operations. [#53638](https://github.com/ClickHouse/ClickHouse/pull/53638) ([Victor Krasnov](https://github.com/sirvickr)).
|
||||||
|
* Added a format setting "output_format_markdown_escape_special_characters" (default: false). The setting controls whether special characters like "!", "#", "$" etc. are escaped (i.e. prefixed by a backslash) in the "Markdown" output format. [#53860](https://github.com/ClickHouse/ClickHouse/pull/53860) ([irenjj](https://github.com/irenjj)).
|
||||||
|
* Add function `decodeHTMLComponent`. [#54097](https://github.com/ClickHouse/ClickHouse/pull/54097) ([Bharat Nallan](https://github.com/bharatnc)).
|
||||||
|
* Added peak_threads_usage to query_log table. [#54335](https://github.com/ClickHouse/ClickHouse/pull/54335) ([Alexey Gerasimchuck](https://github.com/Demilivor)).
|
||||||
|
* Add SHOW FUNCTIONS support to clickhouse-client. [#54337](https://github.com/ClickHouse/ClickHouse/pull/54337) ([Julia Kartseva](https://github.com/wat-ze-hex)).
|
||||||
|
* This PRs improves schema inference from JSON formats: 1) Now it's possible to infer named Tuples from JSON objects without experimantal JSON type under a setting `input_format_json_try_infer_named_tuples_from_objects` in JSON formats. Previously without experimantal type JSON we could only infer JSON objects as Strings or Maps, now we can infer named Tuple. Resulting Tuple type will conain all keys of objects that were read in data sample during schema inference. It can be useful for reading structured JSON data without sparse objects. The setting is enabled by default. 2) Allow parsing JSON array into a column with type String under setting `input_format_json_read_arrays_as_strings`. It can help reading arrays with values with different types. 3) Allow to use type String for JSON keys with unkown types (`null`/`[]`/`{}`) in sample data under setting `input_format_json_infer_incomplete_types_as_strings`. Now in JSON formats we can read any value into String column and we can avoid getting error `Cannot determine type for column 'column_name' by first 25000 rows of data, most likely this column contains only Nulls or empty Arrays/Maps` during schema inference by using type String for unknown types, so the data will be read successfully. [#54427](https://github.com/ClickHouse/ClickHouse/pull/54427) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||||
|
* Added function "toDaysSinceYearZero" with alias "TO_DAYS()" (for compatibility with MySQL) which returns the number of days passed since 0001-01-01. [#54479](https://github.com/ClickHouse/ClickHouse/pull/54479) ([Robert Schulze](https://github.com/rschu1ze)).
|
||||||
|
* Added functions YYYYMMDDtoDate(), YYYYMMDDtoDate32(), YYYYMMDDhhmmssToDateTime() and YYYYMMDDhhmmssToDateTime64(). They convert a date or date with time encoded as integer (e.g. 20230911) into a native date or date with time. As such, they provide the opposite functionality of existing functions YYYYMMDDToDate(), YYYYMMDDToDateTime(), YYYYMMDDhhmmddToDateTime(), YYYYMMDDhhmmddToDateTime64(). [#54509](https://github.com/ClickHouse/ClickHouse/pull/54509) ([Robert Schulze](https://github.com/rschu1ze)).
|
||||||
|
* Added "bandwidth_limit" IO scheduling node type. It allows you to specify `max_speed` and `max_burst` constraints on traffic passing though this node. More details in docs/en/operations/workload-scheduling.md. [#54618](https://github.com/ClickHouse/ClickHouse/pull/54618) ([Sergei Trifonov](https://github.com/serxa)).
|
||||||
|
* Function `toDaysSinceYearZero()` now supports arguments of type `DateTime` and `DateTime64`. [#54856](https://github.com/ClickHouse/ClickHouse/pull/54856) ([Serge Klochkov](https://github.com/slvrtrn)).
|
||||||
|
* Allow S3-style URLs for table functions `s3`, `gcs`, `oss`. URL is automatically converted to HTTP. Example: `'s3://clickhouse-public-datasets/hits.csv'` is converted to `'https://clickhouse-public-datasets.s3.amazonaws.com/hits.csv'`. [#54931](https://github.com/ClickHouse/ClickHouse/pull/54931) ([Yarik Briukhovetskyi](https://github.com/yariks5s)).
|
||||||
|
* Add several string distance functions, include `byteHammingDistance`, `byteJaccardIndex`, `byteEditDistance`. ### Documentation entry for user-facing changes. [#54935](https://github.com/ClickHouse/ClickHouse/pull/54935) ([flynn](https://github.com/ucasfl)).
|
||||||
|
* Add new setting `print_pretty_type_names` to print pretty deep nested types like Tuple/Maps/Arrays. [#55095](https://github.com/ClickHouse/ClickHouse/pull/55095) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||||
|
|
||||||
|
#### Performance Improvement
|
||||||
|
* Improve performance of sorting for decimal columns. Improve performance of insertion into MergeTree if ORDER BY contains Decimal column. Improve performance of sorting when data is already sorted or almost sorted. [#35961](https://github.com/ClickHouse/ClickHouse/pull/35961) ([Maksim Kita](https://github.com/kitaisreal)).
|
||||||
|
* Improve performance for huge query analysis. Fixes [#51224](https://github.com/ClickHouse/ClickHouse/issues/51224). [#51469](https://github.com/ClickHouse/ClickHouse/pull/51469) ([frinkr](https://github.com/frinkr)).
|
||||||
|
* 1. Add rewriter for new analyzer. [#52082](https://github.com/ClickHouse/ClickHouse/pull/52082) ([JackyWoo](https://github.com/JackyWoo)).
|
||||||
|
* 1. Add rewriter for both old and new analyzer. 2. Add settings `optimize_uniq_to_count`. [#52645](https://github.com/ClickHouse/ClickHouse/pull/52645) ([JackyWoo](https://github.com/JackyWoo)).
|
||||||
|
* Remove manual calls to `mmap/mremap/munmap` and delegate all this work to `jemalloc`. [#52792](https://github.com/ClickHouse/ClickHouse/pull/52792) ([Nikita Taranov](https://github.com/nickitat)).
|
||||||
|
* Now roaringBitmaps being optimized before serialization. [#52842](https://github.com/ClickHouse/ClickHouse/pull/52842) ([UnamedRus](https://github.com/UnamedRus)).
|
||||||
|
* Optimize group by constant keys. Will optimize queries with group by `_file/_path` after https://github.com/ClickHouse/ClickHouse/pull/53529. [#53549](https://github.com/ClickHouse/ClickHouse/pull/53549) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||||
|
* Speed up reading from S3 by enabling prefetches by default. [#53709](https://github.com/ClickHouse/ClickHouse/pull/53709) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* Do not implicitly read pk and version columns in lonely parts if unnecessary. [#53919](https://github.com/ClickHouse/ClickHouse/pull/53919) ([Duc Canh Le](https://github.com/canhld94)).
|
||||||
|
* Fixed high in CPU consumption when working with NATS. [#54399](https://github.com/ClickHouse/ClickHouse/pull/54399) ([Vasilev Pyotr](https://github.com/vahpetr)).
|
||||||
|
* Since we use separate instructions for executing `toString()` with datetime argument, it is possible to improve performance a bit for non-datetime arguments and have some parts of the code cleaner. Follows up [#53680](https://github.com/ClickHouse/ClickHouse/issues/53680). [#54443](https://github.com/ClickHouse/ClickHouse/pull/54443) ([Yarik Briukhovetskyi](https://github.com/yariks5s)).
|
||||||
|
* Instead of serializing json elements into a `std::stringstream`, this PR try to put the serialization result into `ColumnString` direclty. [#54613](https://github.com/ClickHouse/ClickHouse/pull/54613) ([lgbo](https://github.com/lgbo-ustc)).
|
||||||
|
* Enable ORDER BY optimization for reading data in corresponding order from a MergeTree table in case that the table is behind a view. [#54628](https://github.com/ClickHouse/ClickHouse/pull/54628) ([Vitaly Baranov](https://github.com/vitlibar)).
|
||||||
|
* Improve JSON SQL functions by reusing `GeneratorJSONPath`. Since there are several `make_shared` in `GenerateorJSONPath`'s constructor, it has bad performance. [#54735](https://github.com/ClickHouse/ClickHouse/pull/54735) ([lgbo](https://github.com/lgbo-ustc)).
|
||||||
|
|
||||||
|
#### Improvement
|
||||||
|
* Keeper improvement: Add a `createIfNotExists` Keeper command. [#48855](https://github.com/ClickHouse/ClickHouse/pull/48855) ([Konstantin Bogdanov](https://github.com/thevar1able)).
|
||||||
|
* Add IF EMPTY clause for DROP TABLE queries. [#48915](https://github.com/ClickHouse/ClickHouse/pull/48915) ([Pavel Novitskiy](https://github.com/pnovitskiy)).
|
||||||
|
* The Keeper dynamically adjusts log levels. [#50372](https://github.com/ClickHouse/ClickHouse/pull/50372) ([helifu](https://github.com/helifu)).
|
||||||
|
* Allow to replace long names of files of columns in `MergeTree` data parts to hashes of names. It helps to avoid `File name too long` error in some cases. [#50612](https://github.com/ClickHouse/ClickHouse/pull/50612) ([Anton Popov](https://github.com/CurtizJ)).
|
||||||
|
* Allow specifying the expiration date and, optionally, the time for user credentials with `VALID UNTIL datetime` clause. [#51261](https://github.com/ClickHouse/ClickHouse/pull/51261) ([Nikolay Degterinsky](https://github.com/evillique)).
|
||||||
|
* Add setting `ignore_access_denied_multidirectory_globs`. [#52839](https://github.com/ClickHouse/ClickHouse/pull/52839) ([Andrey Zvonov](https://github.com/zvonand)).
|
||||||
|
* Output valid JSON/XML on excetpion during HTTP query execution. Add setting `http_write_exception_in_output_format` to enable/disable this behaviour (enabled by default). [#52853](https://github.com/ClickHouse/ClickHouse/pull/52853) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||||
|
* More precise Integer type inference, fix [#51236](https://github.com/ClickHouse/ClickHouse/issues/51236). [#53003](https://github.com/ClickHouse/ClickHouse/pull/53003) ([Chen768959](https://github.com/Chen768959)).
|
||||||
|
* Keeper tries to batch flush requests for better performance. [#53049](https://github.com/ClickHouse/ClickHouse/pull/53049) ([Antonio Andelic](https://github.com/antonio2368)).
|
||||||
|
* Introduced resolving of charsets in the string literals for MaterializedMySQL. [#53220](https://github.com/ClickHouse/ClickHouse/pull/53220) ([Val Doroshchuk](https://github.com/valbok)).
|
||||||
|
* Fix a subtle issue with a rarely used `EmbeddedRocksDB` table engine in an extremely rare scenario: sometimes the `EmbeddedRocksDB` table engine does not close files correctly in NFS after running `DROP TABLE`. [#53502](https://github.com/ClickHouse/ClickHouse/pull/53502) ([Mingliang Pan](https://github.com/liangliangpan)).
|
||||||
|
* SQL functions "toString(datetime)" and "formatDateTime()" now support non-constant timezone arguments. [#53680](https://github.com/ClickHouse/ClickHouse/pull/53680) ([Yarik Briukhovetskyi](https://github.com/yariks5s)).
|
||||||
|
* `RESTORE TABLE ON CLUSTER` must create replicated tables with a matching UUID on hosts. Otherwise the macro `{uuid}` in ZooKeeper path can't work correctly after RESTORE. This PR implements that. [#53765](https://github.com/ClickHouse/ClickHouse/pull/53765) ([Vitaly Baranov](https://github.com/vitlibar)).
|
||||||
|
* Added restore setting `restore_broken_parts_as_detached`: if it's true the RESTORE process won't stop on broken parts while restoring, instead all the broken parts will be copied to the `detached` folder with the prefix `broken-from-backup'. If it's false the RESTORE process will stop on the first broken part (if any). The default value is false. [#53877](https://github.com/ClickHouse/ClickHouse/pull/53877) ([Vitaly Baranov](https://github.com/vitlibar)).
|
||||||
|
* The creation of Annoy indexes can now be parallelized using setting `max_threads_for_annoy_index_creation`. [#54047](https://github.com/ClickHouse/ClickHouse/pull/54047) ([Robert Schulze](https://github.com/rschu1ze)).
|
||||||
|
* The MySQL interface gained a minimal implementation of prepared statements, just enough to allow a connection from Tableau Online to ClickHouse via the MySQL connector. [#54115](https://github.com/ClickHouse/ClickHouse/pull/54115) ([Serge Klochkov](https://github.com/slvrtrn)).
|
||||||
|
* Replaced the library to handle (encode/decode) base64 values from Turbo-Base64 to aklomp-base64. Both are SIMD-accelerated on x86 and ARM but 1. the license of the latter (BSD-2) is more favorable for ClickHouse, Turbo64 switched in the meantime to GPL-3, 2. with more GitHub stars, aklomp-base64 seems more future-proof, 3. aklomp-base64 has a slightly nicer API (which is arguably subjective), and 4. aklomp-base64 does not require us to hack around bugs (like non-threadsafe initialization). Note: aklomp-base64 rejects unpadded base64 values whereas Turbo-Base64 decodes them on a best-effort basis. RFC-4648 leaves it open whether padding is mandatory or not, but depending on the context this may be a behavioral change to be aware of. [#54119](https://github.com/ClickHouse/ClickHouse/pull/54119) ([Mikhail Koviazin](https://github.com/mkmkme)).
|
||||||
|
* Add elapsed_ns to HTTP headers X-ClickHouse-Progress and X-ClickHouse-Summary. [#54179](https://github.com/ClickHouse/ClickHouse/pull/54179) ([joelynch](https://github.com/joelynch)).
|
||||||
|
* Implementation of `reconfig` (https://github.com/ClickHouse/ClickHouse/pull/49450), `sync`, and `exists` commands for keeper-client. [#54201](https://github.com/ClickHouse/ClickHouse/pull/54201) ([pufit](https://github.com/pufit)).
|
||||||
|
* "clickhouse-local" and "clickhouse-client" now allow to specify the "--query" parameter multiple times, e.g. './clickhouse-client --query "SELECT 1" --query "SELECT 2"'. This syntax is slightly more intuitive than `./clickhouse-client --multiquery "SELECT 1;SELECT2", a bit easier to script (e.g. "queries.push_back('--query "$q"')") and more consistent with the behavior of existing parameter "--queries-file" (e.g. "./clickhouse client --queries-file queries1.sql --queries-file queries2.sql"). [#54249](https://github.com/ClickHouse/ClickHouse/pull/54249) ([Robert Schulze](https://github.com/rschu1ze)).
|
||||||
|
* Add sub-second precision to `formatReadableTimeDelta`. [#54250](https://github.com/ClickHouse/ClickHouse/pull/54250) ([Andrey Zvonov](https://github.com/zvonand)).
|
||||||
|
* Fix wrong reallocation in HashedArrayDictionary:. [#54254](https://github.com/ClickHouse/ClickHouse/pull/54254) ([Vitaly Baranov](https://github.com/vitlibar)).
|
||||||
|
* Enable allow_remove_stale_moving_parts by default. [#54260](https://github.com/ClickHouse/ClickHouse/pull/54260) ([vdimir](https://github.com/vdimir)).
|
||||||
|
* Fix using count from cache and improve progress bar for reading from archives. [#54271](https://github.com/ClickHouse/ClickHouse/pull/54271) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||||
|
* Add support for S3 credentials using SSO. To define a profile to be used with SSO, set `AWS_PROFILE` environment variable. [#54347](https://github.com/ClickHouse/ClickHouse/pull/54347) ([Antonio Andelic](https://github.com/antonio2368)).
|
||||||
|
* Support NULL as default for nested types Array/Tuple/Map for input formats. Closes [#51100](https://github.com/ClickHouse/ClickHouse/issues/51100). [#54351](https://github.com/ClickHouse/ClickHouse/pull/54351) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||||
|
* This is actually a bug fix, but not sure I'll be able to add a test to support the case, so I have put it as an improvement. This issue was introduced in https://github.com/ClickHouse/ClickHouse/pull/45878, which is when CH started reading arrow in batches. [#54370](https://github.com/ClickHouse/ClickHouse/pull/54370) ([Arthur Passos](https://github.com/arthurpassos)).
|
||||||
|
* Add STD alias to stddevPop function for MySQL compatibility. Closes [#54274](https://github.com/ClickHouse/ClickHouse/issues/54274). [#54382](https://github.com/ClickHouse/ClickHouse/pull/54382) ([Nikolay Degterinsky](https://github.com/evillique)).
|
||||||
|
* Add `addDate` function for compatibility with MySQL and `subDate` for consistency. Reference [#54275](https://github.com/ClickHouse/ClickHouse/issues/54275). [#54400](https://github.com/ClickHouse/ClickHouse/pull/54400) ([Nikolay Degterinsky](https://github.com/evillique)).
|
||||||
|
* Parse data in JSON format as JSONEachRow if failed to parse metadata. It will allow to read files with `.json` extension even if real format is JSONEachRow. Closes [#45740](https://github.com/ClickHouse/ClickHouse/issues/45740). [#54405](https://github.com/ClickHouse/ClickHouse/pull/54405) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||||
|
* Pass http retry timeout as milliseconds. [#54438](https://github.com/ClickHouse/ClickHouse/pull/54438) ([Duc Canh Le](https://github.com/canhld94)).
|
||||||
|
* Support SAMPLE BY for VIEW. [#54477](https://github.com/ClickHouse/ClickHouse/pull/54477) ([Azat Khuzhin](https://github.com/azat)).
|
||||||
|
* Add modification_time into system.detached_parts. [#54506](https://github.com/ClickHouse/ClickHouse/pull/54506) ([Azat Khuzhin](https://github.com/azat)).
|
||||||
|
* Added a setting "splitby_max_substrings_includes_remaining_string" which controls if functions "splitBy*()" with argument "max_substring" > 0 include the remaining string (if any) in the result array (Python/Spark semantics) or not. The default behavior does not change. [#54518](https://github.com/ClickHouse/ClickHouse/pull/54518) ([Robert Schulze](https://github.com/rschu1ze)).
|
||||||
|
* Now clickhouse-client process files in parallel in case of `INFILE 'glob_expression'`. Closes [#54218](https://github.com/ClickHouse/ClickHouse/issues/54218). [#54533](https://github.com/ClickHouse/ClickHouse/pull/54533) ([Max K.](https://github.com/mkaynov)).
|
||||||
|
* Allow to use primary key for IN function where primary key column types are different from `IN` function right side column types. Example: `SELECT id FROM test_table WHERE id IN (SELECT '5')`. Closes [#48936](https://github.com/ClickHouse/ClickHouse/issues/48936). [#54544](https://github.com/ClickHouse/ClickHouse/pull/54544) ([Maksim Kita](https://github.com/kitaisreal)).
|
||||||
|
* Better integer types inference for Int64/UInt64 fields. Continuation of https://github.com/ClickHouse/ClickHouse/pull/53003. Now it works also for nested types like Arrays of Arrays anf for functions like `map/tuple`. Issue: [#51236](https://github.com/ClickHouse/ClickHouse/issues/51236). [#54553](https://github.com/ClickHouse/ClickHouse/pull/54553) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||||
|
* HashJoin tries to shrink internal buffers consuming half of maximal available memory (set by `max_bytes_in_join`). [#54584](https://github.com/ClickHouse/ClickHouse/pull/54584) ([vdimir](https://github.com/vdimir)).
|
||||||
|
* Added array operations for multiplying, dividing and modulo on scalar. Works in each way, for example `5 * [5, 5]` and `[5, 5] * 5` - both cases are possible. [#54608](https://github.com/ClickHouse/ClickHouse/pull/54608) ([Yarik Briukhovetskyi](https://github.com/yariks5s)).
|
||||||
|
* Added function `timestamp` for compatibility with MySQL. Closes [#54275](https://github.com/ClickHouse/ClickHouse/issues/54275). [#54639](https://github.com/ClickHouse/ClickHouse/pull/54639) ([Nikolay Degterinsky](https://github.com/evillique)).
|
||||||
|
* Respect max_block_size for array join to avoid possible OOM. Close [#54290](https://github.com/ClickHouse/ClickHouse/issues/54290). [#54664](https://github.com/ClickHouse/ClickHouse/pull/54664) ([李扬](https://github.com/taiyang-li)).
|
||||||
|
* Add optional `version` argument to `rm` command in `keeper-client` to support safer deletes. [#54708](https://github.com/ClickHouse/ClickHouse/pull/54708) ([János Benjamin Antal](https://github.com/antaljanosbenjamin)).
|
||||||
|
* Disable killing the server by systemd (that may lead to data loss when using Buffer tables). [#54744](https://github.com/ClickHouse/ClickHouse/pull/54744) ([Azat Khuzhin](https://github.com/azat)).
|
||||||
|
* Added field "is_deterministic" to system table "system.functions" which indicates whether the result of a function is stable between two invocations (given exactly the same inputs) or not. [#54766](https://github.com/ClickHouse/ClickHouse/pull/54766) ([Robert Schulze](https://github.com/rschu1ze)).
|
||||||
|
* Made the views in schema "information_schema" more compatible with the equivalent views in MySQL (i.e. modified and extended them) up to a point where Tableau Online is able to connect to ClickHouse. More specifically: 1. The type of field "information_schema.tables.table_type" changed from Enum8 to String. 2. Added fields "table_comment" and "table_collation" to view "information_schema.table". 3. Added views "information_schema.key_column_usage" and "referential_constraints". 4. Replaced uppercase aliases in "information_schema" views with concrete uppercase columns. [#54773](https://github.com/ClickHouse/ClickHouse/pull/54773) ([Serge Klochkov](https://github.com/slvrtrn)).
|
||||||
|
* The query cache now returns an error if the user tries to cache the result of a query with a non-deterministic function such as "now()", "randomString()" and "dictGet()". Compared to the previous behavior (silently don't cache the result), this reduces confusion and surprise for users. [#54801](https://github.com/ClickHouse/ClickHouse/pull/54801) ([Robert Schulze](https://github.com/rschu1ze)).
|
||||||
|
* Forbid special columns for file/s3/url/... storages, fix insert into ephemeral columns from files. Closes [#53477](https://github.com/ClickHouse/ClickHouse/issues/53477). [#54803](https://github.com/ClickHouse/ClickHouse/pull/54803) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||||
|
* More configurable collecting metadata for backup. [#54804](https://github.com/ClickHouse/ClickHouse/pull/54804) ([Vitaly Baranov](https://github.com/vitlibar)).
|
||||||
|
* `clickhouse-local`'s log file (if enabled with --server_logs_file flag) will now prefix each line with timestamp, thread id, etc, just like `clickhouse-server`. [#54807](https://github.com/ClickHouse/ClickHouse/pull/54807) ([Michael Kolupaev](https://github.com/al13n321)).
|
||||||
|
* Reuse HTTP connections in s3 table function. [#54812](https://github.com/ClickHouse/ClickHouse/pull/54812) ([Michael Kolupaev](https://github.com/al13n321)).
|
||||||
|
* Avoid excessive calls to getifaddrs in isLocalAddress. [#54819](https://github.com/ClickHouse/ClickHouse/pull/54819) ([Duc Canh Le](https://github.com/canhld94)).
|
||||||
|
* Field "is_obsolete" in system.merge_tree_settings is now 1 for obsolete merge tree settings. Previously, only the description indicated that the setting is obsolete. [#54837](https://github.com/ClickHouse/ClickHouse/pull/54837) ([Robert Schulze](https://github.com/rschu1ze)).
|
||||||
|
* Make it possible to use plural when using interval literals. `INTERVAL 2 HOURS` should be equivalent to `INTERVAL 2 HOUR`. [#54860](https://github.com/ClickHouse/ClickHouse/pull/54860) ([Jordi Villar](https://github.com/jrdi)).
|
||||||
|
* Replace the linear method in `MergeTreeRangeReader::Stream::ceilRowsToCompleteGranules` with a binary search. [#54869](https://github.com/ClickHouse/ClickHouse/pull/54869) ([usurai](https://github.com/usurai)).
|
||||||
|
* Always allow the creation of a projection with `Nullable` PK. This fixes [#54814](https://github.com/ClickHouse/ClickHouse/issues/54814). [#54895](https://github.com/ClickHouse/ClickHouse/pull/54895) ([Amos Bird](https://github.com/amosbird)).
|
||||||
|
* Retry backup S3 operations after connection reset failure. [#54900](https://github.com/ClickHouse/ClickHouse/pull/54900) ([Vitaly Baranov](https://github.com/vitlibar)).
|
||||||
|
* Make the exception message exact in case of the maximum value of a settings is less than the minimum value. [#54925](https://github.com/ClickHouse/ClickHouse/pull/54925) ([János Benjamin Antal](https://github.com/antaljanosbenjamin)).
|
||||||
|
* LIKE, match, and other regular expressions matching functions now allow matching with patterns containing non-UTF-8 substrings by falling back to binary matching. Example: you can use `string LIKE '\xFE\xFF%'` to detect BOM. This closes [#54486](https://github.com/ClickHouse/ClickHouse/issues/54486). [#54942](https://github.com/ClickHouse/ClickHouse/pull/54942) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* ProfileEvents added ContextLockWaitMicroseconds event. [#55029](https://github.com/ClickHouse/ClickHouse/pull/55029) ([Maksim Kita](https://github.com/kitaisreal)).
|
||||||
|
* Added field "is_deterministic" to system table "system.functions" which indicates whether the result of a function is stable between two invocations (given exactly the same inputs) or not. [#55035](https://github.com/ClickHouse/ClickHouse/pull/55035) ([Robert Schulze](https://github.com/rschu1ze)).
|
||||||
|
* View information_schema.tables now has a new field `data_length` which shows the approximate size of the data on disk. Required to run queries generated by Amazon QuickSight. [#55037](https://github.com/ClickHouse/ClickHouse/pull/55037) ([Robert Schulze](https://github.com/rschu1ze)).
|
||||||
|
|
||||||
|
#### Build/Testing/Packaging Improvement
|
||||||
|
* ClickHouse is built with Musl instead of GLibc by default. [#52550](https://github.com/ClickHouse/ClickHouse/pull/52550) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* ClickHouse is built with Musl instead of GLibc. [#52721](https://github.com/ClickHouse/ClickHouse/pull/52721) ([Azat Khuzhin](https://github.com/azat)).
|
||||||
|
* Bumped the compiler of official and continuous integration builds of ClickHouse from Clang 16 to 17. [#53831](https://github.com/ClickHouse/ClickHouse/pull/53831) ([Robert Schulze](https://github.com/rschu1ze)).
|
||||||
|
* Fix flaky test. `wait_resolver` function was asserting the response to be == proxy1, but it might actually return proxy2. Account for that as well. [#54191](https://github.com/ClickHouse/ClickHouse/pull/54191) ([Arthur Passos](https://github.com/arthurpassos)).
|
||||||
|
* Regenerated tld data for lookups (`tldLookup.generated.cpp`). [#54269](https://github.com/ClickHouse/ClickHouse/pull/54269) ([Bharat Nallan](https://github.com/bharatnc)).
|
||||||
|
* Report properly timeout for check itself in `fast_test_check`/`stress_check`. [#54278](https://github.com/ClickHouse/ClickHouse/pull/54278) ([Igor Nikonov](https://github.com/devcrafter)).
|
||||||
|
* Suddenly, `test_host_regexp_multiple_ptr_records_concurrent` became flaky. [#54307](https://github.com/ClickHouse/ClickHouse/pull/54307) ([Arthur Passos](https://github.com/arthurpassos)).
|
||||||
|
* Fixed precise float parsing issue on s390x. [#54330](https://github.com/ClickHouse/ClickHouse/pull/54330) ([Harry Lee](https://github.com/HarryLeeIBM)).
|
||||||
|
* Enrich `changed_images.json` with the latest tag from master for images that are not changed in the pull request. [#54369](https://github.com/ClickHouse/ClickHouse/pull/54369) ([János Benjamin Antal](https://github.com/antaljanosbenjamin)).
|
||||||
|
* Fixed endian issue in jemalloc_bins system table for s390x. [#54517](https://github.com/ClickHouse/ClickHouse/pull/54517) ([Harry Lee](https://github.com/HarryLeeIBM)).
|
||||||
|
* Fixed random generation issue for UInt256 and IPv4 on s390x. [#54576](https://github.com/ClickHouse/ClickHouse/pull/54576) ([Harry Lee](https://github.com/HarryLeeIBM)).
|
||||||
|
* Remove redundant `clickhouse-keeper-client` symlink. [#54587](https://github.com/ClickHouse/ClickHouse/pull/54587) ([Tomas Barton](https://github.com/deric)).
|
||||||
|
* Use `/usr/bin/env` to resolve bash. [#54603](https://github.com/ClickHouse/ClickHouse/pull/54603) ([Fionera](https://github.com/fionera)).
|
||||||
|
* Move all `tests/ci/*.lib files` to `stateless-tests` image. Closes [#54540](https://github.com/ClickHouse/ClickHouse/issues/54540). [#54668](https://github.com/ClickHouse/ClickHouse/pull/54668) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||||
|
* We build and upload them for every push, which isn't worth it. [#54675](https://github.com/ClickHouse/ClickHouse/pull/54675) ([Mikhail f. Shiryaev](https://github.com/Felixoid)).
|
||||||
|
* Fixed SimHash function endian issue for s390x. [#54793](https://github.com/ClickHouse/ClickHouse/pull/54793) ([Harry Lee](https://github.com/HarryLeeIBM)).
|
||||||
|
* Do not clone the fast tests repo twice; parallelize submodules checkout; use the current user in the fast-tests container. [#54849](https://github.com/ClickHouse/ClickHouse/pull/54849) ([Mikhail f. Shiryaev](https://github.com/Felixoid)).
|
||||||
|
* Avoid running pull request ci workflow for fixes touching .md files only. [#54914](https://github.com/ClickHouse/ClickHouse/pull/54914) ([Max K.](https://github.com/mkaynov)).
|
||||||
|
* CMake added `PROFILE_CPU` option needed to perform `perf record` without using DWARF call graph. [#54917](https://github.com/ClickHouse/ClickHouse/pull/54917) ([Maksim Kita](https://github.com/kitaisreal)).
|
||||||
|
* Use `--gtest_output='json:'` to parse unit test results. [#54922](https://github.com/ClickHouse/ClickHouse/pull/54922) ([Mikhail f. Shiryaev](https://github.com/Felixoid)).
|
||||||
|
* Added support for additional scripts (you need to mound a volume) to extend build process. [#55000](https://github.com/ClickHouse/ClickHouse/pull/55000) ([Nikita Mikhaylov](https://github.com/nikitamikhaylov)).
|
||||||
|
* If the linker is different than LLD, stop with a fatal error. [#55036](https://github.com/ClickHouse/ClickHouse/pull/55036) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
|
||||||
|
#### Bug Fix (user-visible misbehavior in an official stable release)
|
||||||
|
|
||||||
|
* Store NULL in scalar result map for empty subquery result [#52240](https://github.com/ClickHouse/ClickHouse/pull/52240) ([vdimir](https://github.com/vdimir)).
|
||||||
|
* Fix misleading error message in OUTFILE with CapnProto/Protobuf [#52870](https://github.com/ClickHouse/ClickHouse/pull/52870) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||||
|
* Fix summary reporting with parallel replicas with LIMIT [#53050](https://github.com/ClickHouse/ClickHouse/pull/53050) ([Raúl Marín](https://github.com/Algunenano)).
|
||||||
|
* Fix throttling of BACKUPs from/to S3 (in case native copy was not used) and in some other places as well [#53336](https://github.com/ClickHouse/ClickHouse/pull/53336) ([Azat Khuzhin](https://github.com/azat)).
|
||||||
|
* Fix IO throttling during copying whole directories [#53338](https://github.com/ClickHouse/ClickHouse/pull/53338) ([Azat Khuzhin](https://github.com/azat)).
|
||||||
|
* Fix: moved to prewhere condition actions can lose column [#53492](https://github.com/ClickHouse/ClickHouse/pull/53492) ([Yakov Olkhovskiy](https://github.com/yakov-olkhovskiy)).
|
||||||
|
* 37737 fixed internal error when replacing with byte-equal parts [#53735](https://github.com/ClickHouse/ClickHouse/pull/53735) ([Pedro Riera](https://github.com/priera)).
|
||||||
|
* Fix: require columns participating in interpolate expression [#53754](https://github.com/ClickHouse/ClickHouse/pull/53754) ([Yakov Olkhovskiy](https://github.com/yakov-olkhovskiy)).
|
||||||
|
* Fix cluster discovery initialization + setting up fail points in config [#54113](https://github.com/ClickHouse/ClickHouse/pull/54113) ([vdimir](https://github.com/vdimir)).
|
||||||
|
* Fix issues in accurateCastOrNull [#54136](https://github.com/ClickHouse/ClickHouse/pull/54136) ([Salvatore Mesoraca](https://github.com/aiven-sal)).
|
||||||
|
* Fix nullable primary key in final [#54164](https://github.com/ClickHouse/ClickHouse/pull/54164) ([Amos Bird](https://github.com/amosbird)).
|
||||||
|
* Inserting only non-duplicate chunks in MV [#54184](https://github.com/ClickHouse/ClickHouse/pull/54184) ([Pedro Riera](https://github.com/priera)).
|
||||||
|
* Fix REPLACE/MOVE PARTITION with zero-copy replication [#54193](https://github.com/ClickHouse/ClickHouse/pull/54193) ([Alexander Tokmakov](https://github.com/tavplubix)).
|
||||||
|
* Fix: parallel replicas over distributed don't read from all replicas [#54199](https://github.com/ClickHouse/ClickHouse/pull/54199) ([Igor Nikonov](https://github.com/devcrafter)).
|
||||||
|
* Fix: allow IPv6 for bloom filter [#54200](https://github.com/ClickHouse/ClickHouse/pull/54200) ([Yakov Olkhovskiy](https://github.com/yakov-olkhovskiy)).
|
||||||
|
* fix possible type mismatch with IPv4 [#54212](https://github.com/ClickHouse/ClickHouse/pull/54212) ([Bharat Nallan](https://github.com/bharatnc)).
|
||||||
|
* Fix system.data_skipping_indices for recreated indices [#54225](https://github.com/ClickHouse/ClickHouse/pull/54225) ([Artur Malchanau](https://github.com/Hexta)).
|
||||||
|
* fix name clash for multiple join rewriter v2 [#54240](https://github.com/ClickHouse/ClickHouse/pull/54240) ([Tao Wang](https://github.com/wangtZJU)).
|
||||||
|
* Fix unexpected errors in system.errors after join [#54306](https://github.com/ClickHouse/ClickHouse/pull/54306) ([vdimir](https://github.com/vdimir)).
|
||||||
|
* Fix isZeroOrNull(NULL) [#54316](https://github.com/ClickHouse/ClickHouse/pull/54316) ([flynn](https://github.com/ucasfl)).
|
||||||
|
* Fix: parallel replicas over distributed with prefer_localhost_replica=1 [#54334](https://github.com/ClickHouse/ClickHouse/pull/54334) ([Igor Nikonov](https://github.com/devcrafter)).
|
||||||
|
* Fix logical error in vertical merge + replacing merge tree + optimize cleanup [#54368](https://github.com/ClickHouse/ClickHouse/pull/54368) ([alesapin](https://github.com/alesapin)).
|
||||||
|
* Fix possible error 'URI contains invalid characters' in s3 table function [#54373](https://github.com/ClickHouse/ClickHouse/pull/54373) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||||
|
* Fix segfault in AST optimization of `arrayExists` function [#54379](https://github.com/ClickHouse/ClickHouse/pull/54379) ([Nikolay Degterinsky](https://github.com/evillique)).
|
||||||
|
* Check for overflow before addition in `analysisOfVariance` function [#54385](https://github.com/ClickHouse/ClickHouse/pull/54385) ([Antonio Andelic](https://github.com/antonio2368)).
|
||||||
|
* reproduce and fix the bug in removeSharedRecursive [#54430](https://github.com/ClickHouse/ClickHouse/pull/54430) ([Sema Checherinda](https://github.com/CheSema)).
|
||||||
|
* Fix possible incorrect result with SimpleAggregateFunction in PREWHERE and FINAL [#54436](https://github.com/ClickHouse/ClickHouse/pull/54436) ([Azat Khuzhin](https://github.com/azat)).
|
||||||
|
* Fix filtering parts with indexHint for non analyzer [#54449](https://github.com/ClickHouse/ClickHouse/pull/54449) ([Azat Khuzhin](https://github.com/azat)).
|
||||||
|
* Fix aggregate projections with normalized states [#54480](https://github.com/ClickHouse/ClickHouse/pull/54480) ([Amos Bird](https://github.com/amosbird)).
|
||||||
|
* Bugfix/local multiquery parameter [#54498](https://github.com/ClickHouse/ClickHouse/pull/54498) ([CuiShuoGuo](https://github.com/bakam412)).
|
||||||
|
* clickhouse-local support --database command line argument [#54503](https://github.com/ClickHouse/ClickHouse/pull/54503) ([vdimir](https://github.com/vdimir)).
|
||||||
|
* Fix possible parsing error in WithNames formats with disabled input_format_with_names_use_header [#54513](https://github.com/ClickHouse/ClickHouse/pull/54513) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||||
|
* Fix rare case of CHECKSUM_DOESNT_MATCH error [#54549](https://github.com/ClickHouse/ClickHouse/pull/54549) ([alesapin](https://github.com/alesapin)).
|
||||||
|
* Fix zero copy garbage [#54550](https://github.com/ClickHouse/ClickHouse/pull/54550) ([Alexander Tokmakov](https://github.com/tavplubix)).
|
||||||
|
* Fix sorting of UNION ALL of already sorted results [#54564](https://github.com/ClickHouse/ClickHouse/pull/54564) ([Vitaly Baranov](https://github.com/vitlibar)).
|
||||||
|
* Fix snapshot install in Keeper [#54572](https://github.com/ClickHouse/ClickHouse/pull/54572) ([Antonio Andelic](https://github.com/antonio2368)).
|
||||||
|
* Fix race in `ColumnUnique` [#54575](https://github.com/ClickHouse/ClickHouse/pull/54575) ([Nikita Taranov](https://github.com/nickitat)).
|
||||||
|
* Annoy/Usearch index: Fix LOGICAL_ERROR during build-up with default values [#54600](https://github.com/ClickHouse/ClickHouse/pull/54600) ([Robert Schulze](https://github.com/rschu1ze)).
|
||||||
|
* Fix serialization of `ColumnDecimal` [#54601](https://github.com/ClickHouse/ClickHouse/pull/54601) ([Nikita Taranov](https://github.com/nickitat)).
|
||||||
|
* Fix schema inference for *Cluster functions for column names with spaces [#54635](https://github.com/ClickHouse/ClickHouse/pull/54635) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||||
|
* Fix using structure from insertion tables in case of defaults and explicit insert columns [#54655](https://github.com/ClickHouse/ClickHouse/pull/54655) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||||
|
* Fix: avoid using regex match, possibly containing alternation, as a key condition. [#54696](https://github.com/ClickHouse/ClickHouse/pull/54696) ([Yakov Olkhovskiy](https://github.com/yakov-olkhovskiy)).
|
||||||
|
* Fix ReplacingMergeTree with vertical merge and cleanup [#54706](https://github.com/ClickHouse/ClickHouse/pull/54706) ([SmitaRKulkarni](https://github.com/SmitaRKulkarni)).
|
||||||
|
* Fix virtual columns having incorrect values after ORDER BY [#54811](https://github.com/ClickHouse/ClickHouse/pull/54811) ([Michael Kolupaev](https://github.com/al13n321)).
|
||||||
|
* Fix filtering parts with indexHint for non analyzer (resubmit) [#54825](https://github.com/ClickHouse/ClickHouse/pull/54825) ([Azat Khuzhin](https://github.com/azat)).
|
||||||
|
* Fix Keeper segfault during shutdown [#54841](https://github.com/ClickHouse/ClickHouse/pull/54841) ([Antonio Andelic](https://github.com/antonio2368)).
|
||||||
|
* Fix "Invalid number of rows in Chunk" in MaterializedPostgreSQL [#54844](https://github.com/ClickHouse/ClickHouse/pull/54844) ([Kseniia Sumarokova](https://github.com/kssenii)).
|
||||||
|
* Move obsolete format settings to separate section [#54855](https://github.com/ClickHouse/ClickHouse/pull/54855) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||||
|
* Fix zero copy locks with hardlinks [#54859](https://github.com/ClickHouse/ClickHouse/pull/54859) ([Alexander Tokmakov](https://github.com/tavplubix)).
|
||||||
|
* Fix `FINAL` produces invalid read ranges in a rare case [#54934](https://github.com/ClickHouse/ClickHouse/pull/54934) ([Nikita Taranov](https://github.com/nickitat)).
|
||||||
|
* Rebuild minmax_count_projection when partition key gets modified [#54943](https://github.com/ClickHouse/ClickHouse/pull/54943) ([Amos Bird](https://github.com/amosbird)).
|
||||||
|
* Fix bad cast to ColumnVector<Int128> in function if [#55019](https://github.com/ClickHouse/ClickHouse/pull/55019) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||||
|
* Fix: insert quorum w/o keeper retries [#55026](https://github.com/ClickHouse/ClickHouse/pull/55026) ([Igor Nikonov](https://github.com/devcrafter)).
|
||||||
|
* Fix simple state with nullable [#55030](https://github.com/ClickHouse/ClickHouse/pull/55030) ([Pedro Riera](https://github.com/priera)).
|
||||||
|
* Prevent attaching parts from tables with different projections or indices [#55062](https://github.com/ClickHouse/ClickHouse/pull/55062) ([János Benjamin Antal](https://github.com/antaljanosbenjamin)).
|
||||||
|
|
||||||
|
#### NO CL ENTRY
|
||||||
|
|
||||||
|
* NO CL ENTRY: 'Revert "Revert "Fixed wrong python test name pattern""'. [#54043](https://github.com/ClickHouse/ClickHouse/pull/54043) ([Mikhail f. Shiryaev](https://github.com/Felixoid)).
|
||||||
|
* NO CL ENTRY: 'Revert "Fix: respect skip_unavailable_shards with parallel replicas"'. [#54189](https://github.com/ClickHouse/ClickHouse/pull/54189) ([Alexander Tokmakov](https://github.com/tavplubix)).
|
||||||
|
* NO CL ENTRY: 'Revert "Add settings for real-time updates during query execution"'. [#54470](https://github.com/ClickHouse/ClickHouse/pull/54470) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* NO CL ENTRY: 'Revert "Fix issues in accurateCastOrNull"'. [#54472](https://github.com/ClickHouse/ClickHouse/pull/54472) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* NO CL ENTRY: 'Revert "Revert "Add settings for real-time updates during query execution""'. [#54476](https://github.com/ClickHouse/ClickHouse/pull/54476) ([Nikolai Kochetov](https://github.com/KochetovNicolai)).
|
||||||
|
* NO CL ENTRY: 'Revert "add runOptimize call in bitmap write method"'. [#54528](https://github.com/ClickHouse/ClickHouse/pull/54528) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* NO CL ENTRY: 'Revert "Optimize uniq to count"'. [#54566](https://github.com/ClickHouse/ClickHouse/pull/54566) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* NO CL ENTRY: 'Revert "Add stateless test for clickhouse keeper-client --no-confirmation"'. [#54616](https://github.com/ClickHouse/ClickHouse/pull/54616) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* NO CL ENTRY: 'Revert "Remove flaky tests for the experimental `UNDROP` feature"'. [#54671](https://github.com/ClickHouse/ClickHouse/pull/54671) ([Alexander Tokmakov](https://github.com/tavplubix)).
|
||||||
|
* NO CL ENTRY: 'Revert "Fix filtering parts with indexHint for non analyzer"'. [#54806](https://github.com/ClickHouse/ClickHouse/pull/54806) ([Azat Khuzhin](https://github.com/azat)).
|
||||||
|
* NO CL ENTRY: 'Revert "refine error code of duplicated index in create query"'. [#54840](https://github.com/ClickHouse/ClickHouse/pull/54840) ([Alexander Tokmakov](https://github.com/tavplubix)).
|
||||||
|
* NO CL ENTRY: 'Revert "Avoid excessive calls to getifaddrs in isLocalAddress"'. [#54893](https://github.com/ClickHouse/ClickHouse/pull/54893) ([Igor Nikonov](https://github.com/devcrafter)).
|
||||||
|
* NO CL ENTRY: 'Revert "Fix NATS high cpu usage"'. [#55005](https://github.com/ClickHouse/ClickHouse/pull/55005) ([Nikolay Degterinsky](https://github.com/evillique)).
|
||||||
|
|
||||||
|
#### NOT FOR CHANGELOG / INSIGNIFICANT
|
||||||
|
|
||||||
|
* libFuzzer: add CI fuzzers build, add tcp protocol fuzzer, fix other fuzzers. [#42599](https://github.com/ClickHouse/ClickHouse/pull/42599) ([Yakov Olkhovskiy](https://github.com/yakov-olkhovskiy)).
|
||||||
|
* Add new exceptions to 4xx error [#50722](https://github.com/ClickHouse/ClickHouse/pull/50722) ([Boris Kuschel](https://github.com/bkuschel)).
|
||||||
|
* Test libunwind changes. [#51436](https://github.com/ClickHouse/ClickHouse/pull/51436) ([Nikolai Kochetov](https://github.com/KochetovNicolai)).
|
||||||
|
* Fix data race in copyFromIStreamWithProgressCallback [#51449](https://github.com/ClickHouse/ClickHouse/pull/51449) ([Michael Kolupaev](https://github.com/al13n321)).
|
||||||
|
* Abort on `std::logic_error` in CI [#51907](https://github.com/ClickHouse/ClickHouse/pull/51907) ([Alexander Tokmakov](https://github.com/tavplubix)).
|
||||||
|
* Unify setting http keep-alive timeout, increase default to 30s [#53068](https://github.com/ClickHouse/ClickHouse/pull/53068) ([Nikita Taranov](https://github.com/nickitat)).
|
||||||
|
* Add a regression test for broken Vertical merge after ADD+DROP COLUMN [#53214](https://github.com/ClickHouse/ClickHouse/pull/53214) ([Azat Khuzhin](https://github.com/azat)).
|
||||||
|
* Revert "Revert "dateDiff: add support for plural units."" [#53803](https://github.com/ClickHouse/ClickHouse/pull/53803) ([Han Fei](https://github.com/hanfei1991)).
|
||||||
|
* Fix some tests [#53892](https://github.com/ClickHouse/ClickHouse/pull/53892) ([Alexander Tokmakov](https://github.com/tavplubix)).
|
||||||
|
* Refactoring of reading from `MergeTree` tables [#53931](https://github.com/ClickHouse/ClickHouse/pull/53931) ([Anton Popov](https://github.com/CurtizJ)).
|
||||||
|
* Use pathlib.Path in S3Helper, rewrite build reports, improve small things [#54010](https://github.com/ClickHouse/ClickHouse/pull/54010) ([Mikhail f. Shiryaev](https://github.com/Felixoid)).
|
||||||
|
* Correct UniquesHashSet to be endianness-independent. [#54045](https://github.com/ClickHouse/ClickHouse/pull/54045) ([Austin Kothig](https://github.com/kothiga)).
|
||||||
|
* Increase retries for test_merge_tree_azure_blob_storage [#54069](https://github.com/ClickHouse/ClickHouse/pull/54069) ([SmitaRKulkarni](https://github.com/SmitaRKulkarni)).
|
||||||
|
* Fix SipHash128 reference for big-endian platforms [#54095](https://github.com/ClickHouse/ClickHouse/pull/54095) ([ltrk2](https://github.com/ltrk2)).
|
||||||
|
* Small usearch index improvements: metrics and configurable internal data type [#54103](https://github.com/ClickHouse/ClickHouse/pull/54103) ([Michael Kolupaev](https://github.com/al13n321)).
|
||||||
|
* Small refactoring for read from object storage [#54134](https://github.com/ClickHouse/ClickHouse/pull/54134) ([Kseniia Sumarokova](https://github.com/kssenii)).
|
||||||
|
* Minor changes [#54171](https://github.com/ClickHouse/ClickHouse/pull/54171) ([Kseniia Sumarokova](https://github.com/kssenii)).
|
||||||
|
* Fix hostname and co result constness in new analyzer [#54174](https://github.com/ClickHouse/ClickHouse/pull/54174) ([vdimir](https://github.com/vdimir)).
|
||||||
|
* Amend a confusing line of code in Loggers.cpp [#54183](https://github.com/ClickHouse/ClickHouse/pull/54183) ([Victor Krasnov](https://github.com/sirvickr)).
|
||||||
|
* Fix partition id pruning for analyzer. [#54185](https://github.com/ClickHouse/ClickHouse/pull/54185) ([Nikolai Kochetov](https://github.com/KochetovNicolai)).
|
||||||
|
* Update version after release [#54186](https://github.com/ClickHouse/ClickHouse/pull/54186) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* Update version_date.tsv and changelogs after v23.8.1.2992-lts [#54188](https://github.com/ClickHouse/ClickHouse/pull/54188) ([robot-clickhouse](https://github.com/robot-clickhouse)).
|
||||||
|
* Fix pager in client/local interactive mode when not all data had been read [#54190](https://github.com/ClickHouse/ClickHouse/pull/54190) ([Azat Khuzhin](https://github.com/azat)).
|
||||||
|
* Fix flaky test `01099_operators_date_and_timestamp` [#54195](https://github.com/ClickHouse/ClickHouse/pull/54195) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* Save system tables from s3_disk in the report [#54198](https://github.com/ClickHouse/ClickHouse/pull/54198) ([Alexander Tokmakov](https://github.com/tavplubix)).
|
||||||
|
* Fix timezones in the CI Logs database [#54210](https://github.com/ClickHouse/ClickHouse/pull/54210) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* 2R: Fix: respect skip_unavailable_shards with parallel replicas [#54213](https://github.com/ClickHouse/ClickHouse/pull/54213) ([Igor Nikonov](https://github.com/devcrafter)).
|
||||||
|
* S3Queue is experimental [#54214](https://github.com/ClickHouse/ClickHouse/pull/54214) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* Improve vars with refernce in Settings cpps [#54220](https://github.com/ClickHouse/ClickHouse/pull/54220) ([xuzifu666](https://github.com/xuzifu666)).
|
||||||
|
* Add ProfileEvents::Timer class [#54221](https://github.com/ClickHouse/ClickHouse/pull/54221) ([Stig Bakken](https://github.com/stigsb)).
|
||||||
|
* Test: extend cluster_all_replicas integration test with skip_unavailable_shards [#54223](https://github.com/ClickHouse/ClickHouse/pull/54223) ([Igor Nikonov](https://github.com/devcrafter)).
|
||||||
|
* remove semicolon [#54236](https://github.com/ClickHouse/ClickHouse/pull/54236) ([YinZheng-Sun](https://github.com/YinZheng-Sun)).
|
||||||
|
* Fix bad code in the `system.filesystem_cache`: catching exceptions [#54237](https://github.com/ClickHouse/ClickHouse/pull/54237) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* Merge [#54236](https://github.com/ClickHouse/ClickHouse/issues/54236) [#54238](https://github.com/ClickHouse/ClickHouse/pull/54238) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* Minor improvement, load from config [#54244](https://github.com/ClickHouse/ClickHouse/pull/54244) ([zhanglistar](https://github.com/zhanglistar)).
|
||||||
|
* Follow-up to [#54198](https://github.com/ClickHouse/ClickHouse/issues/54198) [#54246](https://github.com/ClickHouse/ClickHouse/pull/54246) ([Alexander Tokmakov](https://github.com/tavplubix)).
|
||||||
|
* Properly re-initialize ZooKeeper fault injection [#54251](https://github.com/ClickHouse/ClickHouse/pull/54251) ([Alexander Gololobov](https://github.com/davenger)).
|
||||||
|
* Update ci-slack-bot.py [#54253](https://github.com/ClickHouse/ClickHouse/pull/54253) ([Alexander Tokmakov](https://github.com/tavplubix)).
|
||||||
|
* Fix clickhouse-test --no-drop-if-fail on reference mismatch [#54256](https://github.com/ClickHouse/ClickHouse/pull/54256) ([Azat Khuzhin](https://github.com/azat)).
|
||||||
|
* Improve slack-bot-ci lambda [#54258](https://github.com/ClickHouse/ClickHouse/pull/54258) ([Mikhail f. Shiryaev](https://github.com/Felixoid)).
|
||||||
|
* Update version_date.tsv and changelogs after v23.3.12.11-lts [#54259](https://github.com/ClickHouse/ClickHouse/pull/54259) ([robot-clickhouse](https://github.com/robot-clickhouse)).
|
||||||
|
* Minor change [#54261](https://github.com/ClickHouse/ClickHouse/pull/54261) ([flynn](https://github.com/ucasfl)).
|
||||||
|
* Add a note of where the lambda is deployed [#54268](https://github.com/ClickHouse/ClickHouse/pull/54268) ([Mikhail f. Shiryaev](https://github.com/Felixoid)).
|
||||||
|
* Query cache: Log caching of entries [#54270](https://github.com/ClickHouse/ClickHouse/pull/54270) ([Robert Schulze](https://github.com/rschu1ze)).
|
||||||
|
* Update version_date.tsv and changelogs after v23.8.2.7-lts [#54273](https://github.com/ClickHouse/ClickHouse/pull/54273) ([robot-clickhouse](https://github.com/robot-clickhouse)).
|
||||||
|
* Fix test `02783_parsedatetimebesteffort_syslog` [#54279](https://github.com/ClickHouse/ClickHouse/pull/54279) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* Fix `test_keeper_disks` [#54291](https://github.com/ClickHouse/ClickHouse/pull/54291) ([Antonio Andelic](https://github.com/antonio2368)).
|
||||||
|
* Code improvement for reading from archives [#54293](https://github.com/ClickHouse/ClickHouse/pull/54293) ([Antonio Andelic](https://github.com/antonio2368)).
|
||||||
|
* Rollback testing part from [#42599](https://github.com/ClickHouse/ClickHouse/issues/42599) [#54301](https://github.com/ClickHouse/ClickHouse/pull/54301) ([Mikhail f. Shiryaev](https://github.com/Felixoid)).
|
||||||
|
* CI: libFuzzer integration [#54310](https://github.com/ClickHouse/ClickHouse/pull/54310) ([Yakov Olkhovskiy](https://github.com/yakov-olkhovskiy)).
|
||||||
|
* Update version_date.tsv and changelogs after v23.3.13.6-lts [#54313](https://github.com/ClickHouse/ClickHouse/pull/54313) ([robot-clickhouse](https://github.com/robot-clickhouse)).
|
||||||
|
* Add logs for parallel replica over distributed [#54315](https://github.com/ClickHouse/ClickHouse/pull/54315) ([Igor Nikonov](https://github.com/devcrafter)).
|
||||||
|
* Increase timeout for system.stack_trace in 01051_system_stack_trace [#54321](https://github.com/ClickHouse/ClickHouse/pull/54321) ([Azat Khuzhin](https://github.com/azat)).
|
||||||
|
* Fix replace_partition test [#54322](https://github.com/ClickHouse/ClickHouse/pull/54322) ([Pedro Riera](https://github.com/priera)).
|
||||||
|
* Fix segfault in system.zookeeper [#54326](https://github.com/ClickHouse/ClickHouse/pull/54326) ([Alexander Tokmakov](https://github.com/tavplubix)).
|
||||||
|
* Fixed flaky test `02841_parallel_replicas_summary` [#54331](https://github.com/ClickHouse/ClickHouse/pull/54331) ([Nikita Mikhaylov](https://github.com/nikitamikhaylov)).
|
||||||
|
* Consolidate GCD codec tests (Follow up to [#53149](https://github.com/ClickHouse/ClickHouse/issues/53149)) [#54332](https://github.com/ClickHouse/ClickHouse/pull/54332) ([Robert Schulze](https://github.com/rschu1ze)).
|
||||||
|
* Fixed wrong dereference problem in Context::setTemporaryStorageInCache [#54333](https://github.com/ClickHouse/ClickHouse/pull/54333) ([Alexey Gerasimchuck](https://github.com/Demilivor)).
|
||||||
|
* Used assert_cast instead of dynamic_cast in ExternalDataSourceCache [#54336](https://github.com/ClickHouse/ClickHouse/pull/54336) ([Alexey Gerasimchuck](https://github.com/Demilivor)).
|
||||||
|
* Fix bad punctuation in Keeper's logs [#54338](https://github.com/ClickHouse/ClickHouse/pull/54338) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* Improved protection from dereferencing of nullptr [#54339](https://github.com/ClickHouse/ClickHouse/pull/54339) ([Alexey Gerasimchuck](https://github.com/Demilivor)).
|
||||||
|
* Fix filesystem cache test [#54343](https://github.com/ClickHouse/ClickHouse/pull/54343) ([Salvatore Mesoraca](https://github.com/aiven-sal)).
|
||||||
|
* Parallel replicas: remove unused code [#54354](https://github.com/ClickHouse/ClickHouse/pull/54354) ([Igor Nikonov](https://github.com/devcrafter)).
|
||||||
|
* Fix flaky test test_storage_azure_blob_storage/test.py::test_schema_iference_cache [#54367](https://github.com/ClickHouse/ClickHouse/pull/54367) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||||
|
* Enable hedged requests integration tests with tsan, use max_distributed_connections=1 to fix possible flakiness [#54371](https://github.com/ClickHouse/ClickHouse/pull/54371) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||||
|
* Use abiv2 when generating OpenSSL .s files for powerpc64le [#54375](https://github.com/ClickHouse/ClickHouse/pull/54375) ([Boris Kuschel](https://github.com/bkuschel)).
|
||||||
|
* Disable prefer_localhost_replica in test for parallel replicas [#54377](https://github.com/ClickHouse/ClickHouse/pull/54377) ([Igor Nikonov](https://github.com/devcrafter)).
|
||||||
|
* Fix incorrect formatting of CREATE query with PRIMARY KEY [#54403](https://github.com/ClickHouse/ClickHouse/pull/54403) ([Nikolay Degterinsky](https://github.com/evillique)).
|
||||||
|
* Fix failed assert in attach thread during startup retries [#54408](https://github.com/ClickHouse/ClickHouse/pull/54408) ([Antonio Andelic](https://github.com/antonio2368)).
|
||||||
|
* Hashtable order fix on big endian platform [#54409](https://github.com/ClickHouse/ClickHouse/pull/54409) ([Suzy Wang](https://github.com/SuzyWangIBMer)).
|
||||||
|
* Small fine-tune for using ColumnNullable pointer [#54435](https://github.com/ClickHouse/ClickHouse/pull/54435) ([Alex Cheng](https://github.com/Alex-Cheng)).
|
||||||
|
* Update automated commit status comment [#54441](https://github.com/ClickHouse/ClickHouse/pull/54441) ([vdimir](https://github.com/vdimir)).
|
||||||
|
* Remove useless line [#54466](https://github.com/ClickHouse/ClickHouse/pull/54466) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* Add a log message on replicated table drop [#54467](https://github.com/ClickHouse/ClickHouse/pull/54467) ([Dmitry Novik](https://github.com/novikd)).
|
||||||
|
* Cleanup: unnecessary SelectQueryInfo usage around distributed [#54468](https://github.com/ClickHouse/ClickHouse/pull/54468) ([Igor Nikonov](https://github.com/devcrafter)).
|
||||||
|
* Add `instance_type` column to CI Logs and the `checks` table [#54469](https://github.com/ClickHouse/ClickHouse/pull/54469) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* Refactor IHints [#54481](https://github.com/ClickHouse/ClickHouse/pull/54481) ([flynn](https://github.com/ucasfl)).
|
||||||
|
* Fix strange message [#54489](https://github.com/ClickHouse/ClickHouse/pull/54489) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* Bump re2 to latest main [#54492](https://github.com/ClickHouse/ClickHouse/pull/54492) ([Robert Schulze](https://github.com/rschu1ze)).
|
||||||
|
* S3 artifacts [#54504](https://github.com/ClickHouse/ClickHouse/pull/54504) ([Mikhail f. Shiryaev](https://github.com/Felixoid)).
|
||||||
|
* Flush logs for system.backup_log test. [#54507](https://github.com/ClickHouse/ClickHouse/pull/54507) ([Nikolai Kochetov](https://github.com/KochetovNicolai)).
|
||||||
|
* Fix use-after-free in `MergeTreePrefetchedReadPool` [#54512](https://github.com/ClickHouse/ClickHouse/pull/54512) ([Anton Popov](https://github.com/CurtizJ)).
|
||||||
|
* Disable parallel replicas on shards with not enough nodes [#54519](https://github.com/ClickHouse/ClickHouse/pull/54519) ([Igor Nikonov](https://github.com/devcrafter)).
|
||||||
|
* Parallel replicas: cleanup unused params [#54520](https://github.com/ClickHouse/ClickHouse/pull/54520) ([Igor Nikonov](https://github.com/devcrafter)).
|
||||||
|
* FunctionHelpers remove areTypesEqual function [#54546](https://github.com/ClickHouse/ClickHouse/pull/54546) ([Maksim Kita](https://github.com/kitaisreal)).
|
||||||
|
* Add stateless test for clickhouse keeper-client --no-confirmation [#54547](https://github.com/ClickHouse/ClickHouse/pull/54547) ([Azat Khuzhin](https://github.com/azat)).
|
||||||
|
* Increase default timeout in tests for keeper-client [#54551](https://github.com/ClickHouse/ClickHouse/pull/54551) ([pufit](https://github.com/pufit)).
|
||||||
|
* clang-format: Disable namespace indentation and omit {} in if/for/while [#54554](https://github.com/ClickHouse/ClickHouse/pull/54554) ([Robert Schulze](https://github.com/rschu1ze)).
|
||||||
|
* ngramDistance* queries fix for big endian platform [#54555](https://github.com/ClickHouse/ClickHouse/pull/54555) ([Suzy Wang](https://github.com/SuzyWangIBMer)).
|
||||||
|
* Fix AST fuzzer crash in MergeTreeIndex{FullText|Inverted} [#54563](https://github.com/ClickHouse/ClickHouse/pull/54563) ([Robert Schulze](https://github.com/rschu1ze)).
|
||||||
|
* Remove output_format_markdown_escape_special_characters from settings changes history [#54585](https://github.com/ClickHouse/ClickHouse/pull/54585) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||||
|
* Add basic logic to find releasable commits [#54604](https://github.com/ClickHouse/ClickHouse/pull/54604) ([János Benjamin Antal](https://github.com/antaljanosbenjamin)).
|
||||||
|
* Fix reading of virtual columns in reverse order [#54610](https://github.com/ClickHouse/ClickHouse/pull/54610) ([Anton Popov](https://github.com/CurtizJ)).
|
||||||
|
* Fix possible CANNOT_READ_ALL_DATA during ZooKeeper client finalization and add some tests [#54632](https://github.com/ClickHouse/ClickHouse/pull/54632) ([Azat Khuzhin](https://github.com/azat)).
|
||||||
|
* Fix a bug in addData and subData functions [#54636](https://github.com/ClickHouse/ClickHouse/pull/54636) ([Nikolay Degterinsky](https://github.com/evillique)).
|
||||||
|
* Follow-up to [#54550](https://github.com/ClickHouse/ClickHouse/issues/54550) [#54641](https://github.com/ClickHouse/ClickHouse/pull/54641) ([Alexander Tokmakov](https://github.com/tavplubix)).
|
||||||
|
* Remove broken lockless variant of re2 [#54642](https://github.com/ClickHouse/ClickHouse/pull/54642) ([Robert Schulze](https://github.com/rschu1ze)).
|
||||||
|
* Bump abseil [#54646](https://github.com/ClickHouse/ClickHouse/pull/54646) ([Robert Schulze](https://github.com/rschu1ze)).
|
||||||
|
* limit the delay before next try in S3 [#54651](https://github.com/ClickHouse/ClickHouse/pull/54651) ([Sema Checherinda](https://github.com/CheSema)).
|
||||||
|
* Fix parser unit tests [#54670](https://github.com/ClickHouse/ClickHouse/pull/54670) ([János Benjamin Antal](https://github.com/antaljanosbenjamin)).
|
||||||
|
* Fix: Log engine Mark file to read and write in little Endian for s390x [#54677](https://github.com/ClickHouse/ClickHouse/pull/54677) ([bhavnajindal](https://github.com/bhavnajindal)).
|
||||||
|
* Update WebObjectStorage.cpp [#54695](https://github.com/ClickHouse/ClickHouse/pull/54695) ([Kseniia Sumarokova](https://github.com/kssenii)).
|
||||||
|
* add cancelation point to s3 retries [#54697](https://github.com/ClickHouse/ClickHouse/pull/54697) ([Sema Checherinda](https://github.com/CheSema)).
|
||||||
|
* Revert default batch size for Keeper [#54745](https://github.com/ClickHouse/ClickHouse/pull/54745) ([Antonio Andelic](https://github.com/antonio2368)).
|
||||||
|
* Enable `allow_experimental_undrop_table_query` [#54754](https://github.com/ClickHouse/ClickHouse/pull/54754) ([Alexander Tokmakov](https://github.com/tavplubix)).
|
||||||
|
* Fix 02882_clickhouse_keeper_client_no_confirmation test [#54761](https://github.com/ClickHouse/ClickHouse/pull/54761) ([Azat Khuzhin](https://github.com/azat)).
|
||||||
|
* Better exception message in checkDataPart [#54768](https://github.com/ClickHouse/ClickHouse/pull/54768) ([alesapin](https://github.com/alesapin)).
|
||||||
|
* Don't use default move assignment in TimerDescriptor [#54769](https://github.com/ClickHouse/ClickHouse/pull/54769) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||||
|
* Add retries to rests test_async_query_sending/test_async_connect [#54772](https://github.com/ClickHouse/ClickHouse/pull/54772) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||||
|
* update comment [#54780](https://github.com/ClickHouse/ClickHouse/pull/54780) ([flynn](https://github.com/ucasfl)).
|
||||||
|
* Fix broken tests for clickhouse-diagnostics [#54790](https://github.com/ClickHouse/ClickHouse/pull/54790) ([Mikhail f. Shiryaev](https://github.com/Felixoid)).
|
||||||
|
* refine error code of duplicated index in create query [#54791](https://github.com/ClickHouse/ClickHouse/pull/54791) ([Han Fei](https://github.com/hanfei1991)).
|
||||||
|
* Do not set PR status label [#54799](https://github.com/ClickHouse/ClickHouse/pull/54799) ([vdimir](https://github.com/vdimir)).
|
||||||
|
* Prevent parquet schema inference reading the first 1 MB of the file unnecessarily [#54808](https://github.com/ClickHouse/ClickHouse/pull/54808) ([Michael Kolupaev](https://github.com/al13n321)).
|
||||||
|
* Prevent ParquetMetadata reading 40 MB from each file unnecessarily [#54809](https://github.com/ClickHouse/ClickHouse/pull/54809) ([Michael Kolupaev](https://github.com/al13n321)).
|
||||||
|
* Use appropriate error code instead of LOGICAL_ERROR [#54810](https://github.com/ClickHouse/ClickHouse/pull/54810) ([Yakov Olkhovskiy](https://github.com/yakov-olkhovskiy)).
|
||||||
|
* Adjusting `num_streams` by expected work in StorageS3 [#54815](https://github.com/ClickHouse/ClickHouse/pull/54815) ([pufit](https://github.com/pufit)).
|
||||||
|
* Fix test_backup_restore_on_cluster/test.py::test_stop_other_host_during_backup flakiness [#54816](https://github.com/ClickHouse/ClickHouse/pull/54816) ([Azat Khuzhin](https://github.com/azat)).
|
||||||
|
* Remove config files sizes check [#54824](https://github.com/ClickHouse/ClickHouse/pull/54824) ([Igor Nikonov](https://github.com/devcrafter)).
|
||||||
|
* Set correct size for signal pipe buffer [#54836](https://github.com/ClickHouse/ClickHouse/pull/54836) ([Antonio Andelic](https://github.com/antonio2368)).
|
||||||
|
* Refactor and split up vector search tests [#54839](https://github.com/ClickHouse/ClickHouse/pull/54839) ([Robert Schulze](https://github.com/rschu1ze)).
|
||||||
|
* Add some logging to StorageRabbitMQ [#54842](https://github.com/ClickHouse/ClickHouse/pull/54842) ([Kseniia Sumarokova](https://github.com/kssenii)).
|
||||||
|
* Update CHANGELOG.md [#54843](https://github.com/ClickHouse/ClickHouse/pull/54843) ([Ilya Yatsishin](https://github.com/qoega)).
|
||||||
|
* Refactor and simplify multi-directory globs [#54863](https://github.com/ClickHouse/ClickHouse/pull/54863) ([Andrey Zvonov](https://github.com/zvonand)).
|
||||||
|
* KeeperTCPHandler.cpp: Fix clang-17 build [#54874](https://github.com/ClickHouse/ClickHouse/pull/54874) ([Robert Schulze](https://github.com/rschu1ze)).
|
||||||
|
* Decrease timeout for fast tests with a commit [#54878](https://github.com/ClickHouse/ClickHouse/pull/54878) ([Mikhail f. Shiryaev](https://github.com/Felixoid)).
|
||||||
|
* More stable `02703_keeper_map_concurrent_create_drop` [#54879](https://github.com/ClickHouse/ClickHouse/pull/54879) ([Antonio Andelic](https://github.com/antonio2368)).
|
||||||
|
* Fix division by zero in StorageS3 [#54904](https://github.com/ClickHouse/ClickHouse/pull/54904) ([pufit](https://github.com/pufit)).
|
||||||
|
* Set exception for promise in `CreatingSetsTransform` [#54920](https://github.com/ClickHouse/ClickHouse/pull/54920) ([Antonio Andelic](https://github.com/antonio2368)).
|
||||||
|
* Fix an exception message in Pipe::addTransform [#54926](https://github.com/ClickHouse/ClickHouse/pull/54926) ([Alex Cheng](https://github.com/Alex-Cheng)).
|
||||||
|
* Fix data race during BackupsWorker::backup_log initialization [#54928](https://github.com/ClickHouse/ClickHouse/pull/54928) ([Victor Krasnov](https://github.com/sirvickr)).
|
||||||
|
* Provide support for BSON on BE [#54933](https://github.com/ClickHouse/ClickHouse/pull/54933) ([Austin Kothig](https://github.com/kothiga)).
|
||||||
|
* Set a minimum limit of `num_streams` in StorageS3 [#54936](https://github.com/ClickHouse/ClickHouse/pull/54936) ([pufit](https://github.com/pufit)).
|
||||||
|
* Ipv4 read big endian [#54938](https://github.com/ClickHouse/ClickHouse/pull/54938) ([Suzy Wang](https://github.com/SuzyWangIBMer)).
|
||||||
|
* Fix data race in SYSTEM STOP LISTEN [#54939](https://github.com/ClickHouse/ClickHouse/pull/54939) ([Nikolay Degterinsky](https://github.com/evillique)).
|
||||||
|
* Add desperate instrumentation for debugging deadlock in MultiplexedConnections [#54940](https://github.com/ClickHouse/ClickHouse/pull/54940) ([Michael Kolupaev](https://github.com/al13n321)).
|
||||||
|
* Respect max_block_size while generating rows for system.stack_trace (will fix flakiness of the test) [#54946](https://github.com/ClickHouse/ClickHouse/pull/54946) ([Azat Khuzhin](https://github.com/azat)).
|
||||||
|
* Remove test `01051_system_stack_trace` [#54951](https://github.com/ClickHouse/ClickHouse/pull/54951) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* Add a test for compatibility [#54960](https://github.com/ClickHouse/ClickHouse/pull/54960) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* Remove test `02151_hash_table_sizes_stats` [#54961](https://github.com/ClickHouse/ClickHouse/pull/54961) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* Remove 02151_hash_table_sizes_stats_distributed (fixes broken CI) [#54969](https://github.com/ClickHouse/ClickHouse/pull/54969) ([Azat Khuzhin](https://github.com/azat)).
|
||||||
|
* Use pregenerated gRPC protocol pb2 files to fix test flakyness. [#54976](https://github.com/ClickHouse/ClickHouse/pull/54976) ([Vitaly Baranov](https://github.com/vitlibar)).
|
||||||
|
* Delete a test [#54984](https://github.com/ClickHouse/ClickHouse/pull/54984) ([Kseniia Sumarokova](https://github.com/kssenii)).
|
||||||
|
* Add assertion [#54985](https://github.com/ClickHouse/ClickHouse/pull/54985) ([Kseniia Sumarokova](https://github.com/kssenii)).
|
||||||
|
* Fix test parallel replicas over distributed [#54987](https://github.com/ClickHouse/ClickHouse/pull/54987) ([Igor Nikonov](https://github.com/devcrafter)).
|
||||||
|
* Update README.md [#54990](https://github.com/ClickHouse/ClickHouse/pull/54990) ([Tyler Hannan](https://github.com/tylerhannan)).
|
||||||
|
* Re-enable clang-tidy checks disabled in the Clang 17 update [#54999](https://github.com/ClickHouse/ClickHouse/pull/54999) ([Robert Schulze](https://github.com/rschu1ze)).
|
||||||
|
* Print more information about one logical error in MergeTreeDataWriter [#55001](https://github.com/ClickHouse/ClickHouse/pull/55001) ([Michael Kolupaev](https://github.com/al13n321)).
|
||||||
|
* Add a test [#55003](https://github.com/ClickHouse/ClickHouse/pull/55003) ([Kseniia Sumarokova](https://github.com/kssenii)).
|
||||||
|
* Lower log levels for `SSOCredentialsProvider` [#55012](https://github.com/ClickHouse/ClickHouse/pull/55012) ([Antonio Andelic](https://github.com/antonio2368)).
|
||||||
|
* Set exception for promise in `CreatingSetsTransform` in more cases [#55013](https://github.com/ClickHouse/ClickHouse/pull/55013) ([Antonio Andelic](https://github.com/antonio2368)).
|
||||||
|
* Setting compile_aggregate_expressions comment fix [#55020](https://github.com/ClickHouse/ClickHouse/pull/55020) ([Maksim Kita](https://github.com/kitaisreal)).
|
||||||
|
* Revert "Added field "is_deterministic" to system.functions" [#55022](https://github.com/ClickHouse/ClickHouse/pull/55022) ([Alexander Tokmakov](https://github.com/tavplubix)).
|
||||||
|
* Get rid of the most of `os.path` stuff [#55028](https://github.com/ClickHouse/ClickHouse/pull/55028) ([Mikhail f. Shiryaev](https://github.com/Felixoid)).
|
||||||
|
* Fix pre-build scripts for old branches [#55032](https://github.com/ClickHouse/ClickHouse/pull/55032) ([Nikita Mikhaylov](https://github.com/nikitamikhaylov)).
|
||||||
|
* Review fix for [#54935](https://github.com/ClickHouse/ClickHouse/issues/54935) [#55042](https://github.com/ClickHouse/ClickHouse/pull/55042) ([flynn](https://github.com/ucasfl)).
|
||||||
|
* Update gtest_lru_file_cache.cpp [#55053](https://github.com/ClickHouse/ClickHouse/pull/55053) ([Kseniia Sumarokova](https://github.com/kssenii)).
|
||||||
|
* Fix prebuild scripts one more time [#55059](https://github.com/ClickHouse/ClickHouse/pull/55059) ([Nikita Mikhaylov](https://github.com/nikitamikhaylov)).
|
||||||
|
* Use different names for variables inside build.sh [#55067](https://github.com/ClickHouse/ClickHouse/pull/55067) ([Nikita Mikhaylov](https://github.com/nikitamikhaylov)).
|
||||||
|
* Remove String Jaccard Index [#55080](https://github.com/ClickHouse/ClickHouse/pull/55080) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* I don't understand why backup log is not enabled by default [#55081](https://github.com/ClickHouse/ClickHouse/pull/55081) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* Fix typo in packager when ccache is used [#55104](https://github.com/ClickHouse/ClickHouse/pull/55104) ([Ilya Yatsishin](https://github.com/qoega)).
|
||||||
|
* Reduce flakiness of 01455_opentelemetry_distributed [#55111](https://github.com/ClickHouse/ClickHouse/pull/55111) ([Michael Kolupaev](https://github.com/al13n321)).
|
||||||
|
* Fix build [#55113](https://github.com/ClickHouse/ClickHouse/pull/55113) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
|
@ -11,14 +11,14 @@ This is intended for continuous integration checks that run on Linux servers. If
|
|||||||
|
|
||||||
The cross-build for macOS is based on the [Build instructions](../development/build.md), follow them first.
|
The cross-build for macOS is based on the [Build instructions](../development/build.md), follow them first.
|
||||||
|
|
||||||
## Install Clang-16
|
## Install Clang-17
|
||||||
|
|
||||||
Follow the instructions from https://apt.llvm.org/ for your Ubuntu or Debian setup.
|
Follow the instructions from https://apt.llvm.org/ for your Ubuntu or Debian setup.
|
||||||
For example the commands for Bionic are like:
|
For example the commands for Bionic are like:
|
||||||
|
|
||||||
``` bash
|
``` bash
|
||||||
sudo echo "deb [trusted=yes] http://apt.llvm.org/bionic/ llvm-toolchain-bionic-16 main" >> /etc/apt/sources.list
|
sudo echo "deb [trusted=yes] http://apt.llvm.org/bionic/ llvm-toolchain-bionic-17 main" >> /etc/apt/sources.list
|
||||||
sudo apt-get install clang-16
|
sudo apt-get install clang-17
|
||||||
```
|
```
|
||||||
|
|
||||||
## Install Cross-Compilation Toolset {#install-cross-compilation-toolset}
|
## Install Cross-Compilation Toolset {#install-cross-compilation-toolset}
|
||||||
@ -55,7 +55,7 @@ curl -L 'https://github.com/phracker/MacOSX-SDKs/releases/download/10.15/MacOSX1
|
|||||||
cd ClickHouse
|
cd ClickHouse
|
||||||
mkdir build-darwin
|
mkdir build-darwin
|
||||||
cd build-darwin
|
cd build-darwin
|
||||||
CC=clang-16 CXX=clang++-16 cmake -DCMAKE_AR:FILEPATH=${CCTOOLS}/bin/x86_64-apple-darwin-ar -DCMAKE_INSTALL_NAME_TOOL=${CCTOOLS}/bin/x86_64-apple-darwin-install_name_tool -DCMAKE_RANLIB:FILEPATH=${CCTOOLS}/bin/x86_64-apple-darwin-ranlib -DLINKER_NAME=${CCTOOLS}/bin/x86_64-apple-darwin-ld -DCMAKE_TOOLCHAIN_FILE=cmake/darwin/toolchain-x86_64.cmake ..
|
CC=clang-17 CXX=clang++-17 cmake -DCMAKE_AR:FILEPATH=${CCTOOLS}/bin/x86_64-apple-darwin-ar -DCMAKE_INSTALL_NAME_TOOL=${CCTOOLS}/bin/x86_64-apple-darwin-install_name_tool -DCMAKE_RANLIB:FILEPATH=${CCTOOLS}/bin/x86_64-apple-darwin-ranlib -DLINKER_NAME=${CCTOOLS}/bin/x86_64-apple-darwin-ld -DCMAKE_TOOLCHAIN_FILE=cmake/darwin/toolchain-x86_64.cmake ..
|
||||||
ninja
|
ninja
|
||||||
```
|
```
|
||||||
|
|
||||||
|
@ -57,7 +57,7 @@ sudo add-apt-repository -y ppa:ubuntu-toolchain-r/test
|
|||||||
|
|
||||||
For other Linux distributions - check the availability of LLVM's [prebuild packages](https://releases.llvm.org/download.html).
|
For other Linux distributions - check the availability of LLVM's [prebuild packages](https://releases.llvm.org/download.html).
|
||||||
|
|
||||||
As of April 2023, clang-16 or higher will work.
|
As of August 2023, clang-16 or higher will work.
|
||||||
GCC as a compiler is not supported.
|
GCC as a compiler is not supported.
|
||||||
To build with a specific Clang version:
|
To build with a specific Clang version:
|
||||||
|
|
||||||
@ -67,8 +67,8 @@ to see what version you have installed before setting this environment variable.
|
|||||||
:::
|
:::
|
||||||
|
|
||||||
``` bash
|
``` bash
|
||||||
export CC=clang-16
|
export CC=clang-17
|
||||||
export CXX=clang++-16
|
export CXX=clang++-17
|
||||||
```
|
```
|
||||||
|
|
||||||
### Checkout ClickHouse Sources {#checkout-clickhouse-sources}
|
### Checkout ClickHouse Sources {#checkout-clickhouse-sources}
|
||||||
@ -105,8 +105,8 @@ The build requires the following components:
|
|||||||
|
|
||||||
- Git (used to checkout the sources, not needed for the build)
|
- Git (used to checkout the sources, not needed for the build)
|
||||||
- CMake 3.20 or newer
|
- CMake 3.20 or newer
|
||||||
- Compiler: clang-16 or newer
|
- Compiler: clang-17 or newer
|
||||||
- Linker: lld-16 or newer
|
- Linker: lld-17 or newer
|
||||||
- Ninja
|
- Ninja
|
||||||
- Yasm
|
- Yasm
|
||||||
- Gawk
|
- Gawk
|
||||||
|
@ -102,7 +102,7 @@ Builds ClickHouse in various configurations for use in further steps. You have t
|
|||||||
|
|
||||||
### Report Details
|
### Report Details
|
||||||
|
|
||||||
- **Compiler**: `clang-16`, optionally with the name of a target platform
|
- **Compiler**: `clang-17`, optionally with the name of a target platform
|
||||||
- **Build type**: `Debug` or `RelWithDebInfo` (cmake).
|
- **Build type**: `Debug` or `RelWithDebInfo` (cmake).
|
||||||
- **Sanitizer**: `none` (without sanitizers), `address` (ASan), `memory` (MSan), `undefined` (UBSan), or `thread` (TSan).
|
- **Sanitizer**: `none` (without sanitizers), `address` (ASan), `memory` (MSan), `undefined` (UBSan), or `thread` (TSan).
|
||||||
- **Status**: `success` or `fail`
|
- **Status**: `success` or `fail`
|
||||||
|
@ -152,7 +152,7 @@ While inside the `build` directory, configure your build by running CMake. Befor
|
|||||||
export CC=clang CXX=clang++
|
export CC=clang CXX=clang++
|
||||||
cmake ..
|
cmake ..
|
||||||
|
|
||||||
If you installed clang using the automatic installation script above, also specify the version of clang installed in the first command, e.g. `export CC=clang-16 CXX=clang++-16`. The clang version will be in the script output.
|
If you installed clang using the automatic installation script above, also specify the version of clang installed in the first command, e.g. `export CC=clang-17 CXX=clang++-17`. The clang version will be in the script output.
|
||||||
|
|
||||||
The `CC` variable specifies the compiler for C (short for C Compiler), and `CXX` variable instructs which C++ compiler is to be used for building.
|
The `CC` variable specifies the compiler for C (short for C Compiler), and `CXX` variable instructs which C++ compiler is to be used for building.
|
||||||
|
|
||||||
@ -276,8 +276,6 @@ Most probably some of the builds will fail at first times. This is due to the fa
|
|||||||
|
|
||||||
## Browse ClickHouse Source Code {#browse-clickhouse-source-code}
|
## Browse ClickHouse Source Code {#browse-clickhouse-source-code}
|
||||||
|
|
||||||
You can use the **Woboq** online code browser available [here](https://clickhouse.com/codebrowser/ClickHouse/src/index.html). It provides code navigation, semantic highlighting, search and indexing. The code snapshot is updated daily.
|
|
||||||
|
|
||||||
You can use GitHub integrated code browser [here](https://github.dev/ClickHouse/ClickHouse).
|
You can use GitHub integrated code browser [here](https://github.dev/ClickHouse/ClickHouse).
|
||||||
|
|
||||||
Also, you can browse sources on [GitHub](https://github.com/ClickHouse/ClickHouse) as usual.
|
Also, you can browse sources on [GitHub](https://github.com/ClickHouse/ClickHouse) as usual.
|
||||||
|
@ -60,7 +60,7 @@ Before using cache, add it to `config.xml`
|
|||||||
- limit_size: Required. The maximum size(in bytes) of local cache files.
|
- limit_size: Required. The maximum size(in bytes) of local cache files.
|
||||||
- bytes_read_before_flush: Control bytes before flush to local filesystem when downloading file from remote filesystem. The default value is 1MB.
|
- bytes_read_before_flush: Control bytes before flush to local filesystem when downloading file from remote filesystem. The default value is 1MB.
|
||||||
|
|
||||||
When ClickHouse is started up with local cache for remote filesystem enabled, users can still choose not to use cache with `settings use_local_cache_for_remote_fs = 0` in their query. `use_local_cache_for_remote_fs` is `false` in default.
|
When ClickHouse is started up with local cache for remote filesystem enabled, users can still choose not to use cache with `settings use_local_cache_for_remote_storage = 0` in their query. `use_local_cache_for_remote_storage` is `1` by default.
|
||||||
|
|
||||||
### Query Hive Table with ORC Input Format
|
### Query Hive Table with ORC Input Format
|
||||||
|
|
||||||
|
@ -1,12 +1,19 @@
|
|||||||
---
|
---
|
||||||
slug: /en/engines/table-engines/integrations/s3queue
|
slug: /en/engines/table-engines/integrations/s3queue
|
||||||
sidebar_position: 7
|
sidebar_position: 181
|
||||||
sidebar_label: S3Queue
|
sidebar_label: S3Queue
|
||||||
---
|
---
|
||||||
|
|
||||||
# S3Queue Table Engine
|
# [experimental] S3Queue Table Engine
|
||||||
This engine provides integration with [Amazon S3](https://aws.amazon.com/s3/) ecosystem and allows streaming import. This engine is similar to the [Kafka](../../../engines/table-engines/integrations/kafka.md), [RabbitMQ](../../../engines/table-engines/integrations/rabbitmq.md) engines, but provides S3-specific features.
|
This engine provides integration with [Amazon S3](https://aws.amazon.com/s3/) ecosystem and allows streaming import. This engine is similar to the [Kafka](../../../engines/table-engines/integrations/kafka.md), [RabbitMQ](../../../engines/table-engines/integrations/rabbitmq.md) engines, but provides S3-specific features.
|
||||||
|
|
||||||
|
:::note
|
||||||
|
This table engine is experimental. To use it, set `allow_experimental_s3queue` to 1 by using the `SET` command:
|
||||||
|
```sql
|
||||||
|
SET allow_experimental_s3queue=1
|
||||||
|
```
|
||||||
|
:::
|
||||||
|
|
||||||
## Create Table {#creating-a-table}
|
## Create Table {#creating-a-table}
|
||||||
|
|
||||||
``` sql
|
``` sql
|
||||||
|
@ -203,11 +203,16 @@ Parameter `NumTrees` is the number of trees which the algorithm creates (default
|
|||||||
more accurate search results but slower index creation / query times (approximately linearly) as well as larger index sizes.
|
more accurate search results but slower index creation / query times (approximately linearly) as well as larger index sizes.
|
||||||
|
|
||||||
:::note
|
:::note
|
||||||
Indexes over columns of type `Array` will generally work faster than indexes on `Tuple` columns. All arrays **must** have same length. Use
|
Indexes over columns of type `Array` will generally work faster than indexes on `Tuple` columns. All arrays must have same length. To avoid
|
||||||
[CONSTRAINT](/docs/en/sql-reference/statements/create/table.md#constraints) to avoid errors. For example, `CONSTRAINT constraint_name_1
|
errors, you can use a [CONSTRAINT](/docs/en/sql-reference/statements/create/table.md#constraints), for example, `CONSTRAINT
|
||||||
CHECK length(vectors) = 256`.
|
constraint_name_1 CHECK length(vectors) = 256`. Also, empty `Arrays` and unspecified `Array` values in INSERT statements (i.e. default
|
||||||
|
values) are not supported.
|
||||||
:::
|
:::
|
||||||
|
|
||||||
|
The creation of Annoy indexes (whenever a new part is build, e.g. at the end of a merge) is a relatively slow process. You can increase
|
||||||
|
setting `max_threads_for_annoy_index_creation` (default: 4) which controls how many threads are used to create an Annoy index. Please be
|
||||||
|
careful with this setting, it is possible that multiple indexes are created in parallel in which case there can be overparallelization.
|
||||||
|
|
||||||
Setting `annoy_index_search_k_nodes` (default: `NumTrees * LIMIT`) determines how many tree nodes are inspected during SELECTs. Larger
|
Setting `annoy_index_search_k_nodes` (default: `NumTrees * LIMIT`) determines how many tree nodes are inspected during SELECTs. Larger
|
||||||
values mean more accurate results at the cost of longer query runtime:
|
values mean more accurate results at the cost of longer query runtime:
|
||||||
|
|
||||||
@ -223,6 +228,7 @@ SETTINGS annoy_index_search_k_nodes=100;
|
|||||||
The Annoy index currently does not work with per-table, non-default `index_granularity` settings (see
|
The Annoy index currently does not work with per-table, non-default `index_granularity` settings (see
|
||||||
[here](https://github.com/ClickHouse/ClickHouse/pull/51325#issuecomment-1605920475)). If necessary, the value must be changed in config.xml.
|
[here](https://github.com/ClickHouse/ClickHouse/pull/51325#issuecomment-1605920475)). If necessary, the value must be changed in config.xml.
|
||||||
:::
|
:::
|
||||||
|
|
||||||
## USearch {#usearch}
|
## USearch {#usearch}
|
||||||
|
|
||||||
This type of ANN index is based on the [the USearch library](https://github.com/unum-cloud/usearch), which implements the [HNSW
|
This type of ANN index is based on the [the USearch library](https://github.com/unum-cloud/usearch), which implements the [HNSW
|
||||||
|
@ -100,8 +100,43 @@ SELECT * FROM mySecondReplacingMT FINAL;
|
|||||||
The row is deleted when `OPTIMIZE ... FINAL CLEANUP` or `OPTIMIZE ... FINAL` is used, or if the engine setting `clean_deleted_rows` has been set to `Always`.
|
The row is deleted when `OPTIMIZE ... FINAL CLEANUP` or `OPTIMIZE ... FINAL` is used, or if the engine setting `clean_deleted_rows` has been set to `Always`.
|
||||||
|
|
||||||
No matter the operation on the data, the version must be increased. If two inserted rows have the same version number, the last inserted row is the one kept.
|
No matter the operation on the data, the version must be increased. If two inserted rows have the same version number, the last inserted row is the one kept.
|
||||||
|
|
||||||
|
Always execute `OPTIMIZE ... FINAL CLEANUP` and `OPTIMIZE ... FINAL` to delete rows with `is_deleted=1` defined, especially when you wish to insert a new row with an older version. Otherwise, the new row with an older version will be replaced and not be persisted.
|
||||||
|
|
||||||
:::
|
:::
|
||||||
|
|
||||||
|
Example:
|
||||||
|
```sql
|
||||||
|
-- with ver and is_deleted
|
||||||
|
CREATE OR REPLACE TABLE myThirdReplacingMT
|
||||||
|
(
|
||||||
|
`key` Int64,
|
||||||
|
`someCol` String,
|
||||||
|
`eventTime` DateTime,
|
||||||
|
`is_deleted` UInt8
|
||||||
|
)
|
||||||
|
ENGINE = ReplacingMergeTree(eventTime, is_deleted)
|
||||||
|
ORDER BY key;
|
||||||
|
|
||||||
|
INSERT INTO myThirdReplacingMT Values (1, 'first', '2020-01-01 01:01:01', 0);
|
||||||
|
INSERT INTO myThirdReplacingMT Values (1, 'first', '2020-01-01 01:01:01', 1);
|
||||||
|
|
||||||
|
select * from myThirdReplacingMT final;
|
||||||
|
|
||||||
|
0 rows in set. Elapsed: 0.003 sec.
|
||||||
|
|
||||||
|
-- delete rows with is_deleted
|
||||||
|
OPTIMIZE TABLE myThirdReplacingMT FINAL CLEANUP;
|
||||||
|
|
||||||
|
INSERT INTO myThirdReplacingMT Values (1, 'first', '2020-01-01 00:00:00', 0);
|
||||||
|
|
||||||
|
select * from myThirdReplacingMT final;
|
||||||
|
|
||||||
|
┌─key─┬─someCol─┬───────────eventTime─┬─is_deleted─┐
|
||||||
|
│ 1 │ first │ 2020-01-01 00:00:00 │ 0 │
|
||||||
|
└─────┴─────────┴─────────────────────┴────────────┘
|
||||||
|
```
|
||||||
|
|
||||||
## Query clauses
|
## Query clauses
|
||||||
|
|
||||||
When creating a `ReplacingMergeTree` table the same [clauses](../../../engines/table-engines/mergetree-family/mergetree.md) are required, as when creating a `MergeTree` table.
|
When creating a `ReplacingMergeTree` table the same [clauses](../../../engines/table-engines/mergetree-family/mergetree.md) are required, as when creating a `MergeTree` table.
|
||||||
|
@ -1259,9 +1259,13 @@ SELECT * FROM json_each_row_nested
|
|||||||
|
|
||||||
- [input_format_import_nested_json](/docs/en/operations/settings/settings-formats.md/#input_format_import_nested_json) - map nested JSON data to nested tables (it works for JSONEachRow format). Default value - `false`.
|
- [input_format_import_nested_json](/docs/en/operations/settings/settings-formats.md/#input_format_import_nested_json) - map nested JSON data to nested tables (it works for JSONEachRow format). Default value - `false`.
|
||||||
- [input_format_json_read_bools_as_numbers](/docs/en/operations/settings/settings-formats.md/#input_format_json_read_bools_as_numbers) - allow to parse bools as numbers in JSON input formats. Default value - `true`.
|
- [input_format_json_read_bools_as_numbers](/docs/en/operations/settings/settings-formats.md/#input_format_json_read_bools_as_numbers) - allow to parse bools as numbers in JSON input formats. Default value - `true`.
|
||||||
- [input_format_json_read_numbers_as_strings](/docs/en/operations/settings/settings-formats.md/#input_format_json_read_numbers_as_strings) - allow to parse numbers as strings in JSON input formats. Default value - `false`.
|
- [input_format_json_read_numbers_as_strings](/docs/en/operations/settings/settings-formats.md/#input_format_json_read_numbers_as_strings) - allow to parse numbers as strings in JSON input formats. Default value - `true`.
|
||||||
- [input_format_json_read_objects_as_strings](/docs/en/operations/settings/settings-formats.md/#input_format_json_read_objects_as_strings) - allow to parse JSON objects as strings in JSON input formats. Default value - `false`.
|
- [input_format_json_read_arrays_as_strings](/docs/en/operations/settings/settings-formats.md/#input_format_json_read_arrays_as_strings) - allow to parse JSON arrays as strings in JSON input formats. Default value - `true`.
|
||||||
|
- [input_format_json_read_objects_as_strings](/docs/en/operations/settings/settings-formats.md/#input_format_json_read_objects_as_strings) - allow to parse JSON objects as strings in JSON input formats. Default value - `true`.
|
||||||
- [input_format_json_named_tuples_as_objects](/docs/en/operations/settings/settings-formats.md/#input_format_json_named_tuples_as_objects) - parse named tuple columns as JSON objects. Default value - `true`.
|
- [input_format_json_named_tuples_as_objects](/docs/en/operations/settings/settings-formats.md/#input_format_json_named_tuples_as_objects) - parse named tuple columns as JSON objects. Default value - `true`.
|
||||||
|
- [input_format_json_try_infer_numbers_from_strings](/docs/en/operations/settings/settings-formats.md/#input_format_json_try_infer_numbers_from_strings) - Try to infer numbers from string fields while schema inference. Default value - `false`.
|
||||||
|
- [input_format_json_try_infer_named_tuples_from_objects](/docs/en/operations/settings/settings-formats.md/#input_format_json_try_infer_named_tuples_from_objects) - try to infer named tuple from JSON objects during schema inference. Default value - `true`.
|
||||||
|
- [input_format_json_infer_incomplete_types_as_strings](/docs/en/operations/settings/settings-formats.md/#input_format_json_infer_incomplete_types_as_strings) - use type String for keys that contains only Nulls or empty objects/arrays during schema inference in JSON input formats. Default value - `true`.
|
||||||
- [input_format_json_defaults_for_missing_elements_in_named_tuple](/docs/en/operations/settings/settings-formats.md/#input_format_json_defaults_for_missing_elements_in_named_tuple) - insert default values for missing elements in JSON object while parsing named tuple. Default value - `true`.
|
- [input_format_json_defaults_for_missing_elements_in_named_tuple](/docs/en/operations/settings/settings-formats.md/#input_format_json_defaults_for_missing_elements_in_named_tuple) - insert default values for missing elements in JSON object while parsing named tuple. Default value - `true`.
|
||||||
- [input_format_json_ignore_unknown_keys_in_named_tuple](/docs/en/operations/settings/settings-formats.md/#input_format_json_ignore_unknown_keys_in_named_tuple) - Ignore unknown keys in json object for named tuples. Default value - `false`.
|
- [input_format_json_ignore_unknown_keys_in_named_tuple](/docs/en/operations/settings/settings-formats.md/#input_format_json_ignore_unknown_keys_in_named_tuple) - Ignore unknown keys in json object for named tuples. Default value - `false`.
|
||||||
- [input_format_json_compact_allow_variable_number_of_columns](/docs/en/operations/settings/settings-formats.md/#input_format_json_compact_allow_variable_number_of_columns) - allow variable number of columns in JSONCompact/JSONCompactEachRow format, ignore extra columns and use default values on missing columns. Default value - `false`.
|
- [input_format_json_compact_allow_variable_number_of_columns](/docs/en/operations/settings/settings-formats.md/#input_format_json_compact_allow_variable_number_of_columns) - allow variable number of columns in JSONCompact/JSONCompactEachRow format, ignore extra columns and use default values on missing columns. Default value - `false`.
|
||||||
|
@ -58,7 +58,7 @@ Connection: Close
|
|||||||
Content-Type: text/tab-separated-values; charset=UTF-8
|
Content-Type: text/tab-separated-values; charset=UTF-8
|
||||||
X-ClickHouse-Server-Display-Name: clickhouse.ru-central1.internal
|
X-ClickHouse-Server-Display-Name: clickhouse.ru-central1.internal
|
||||||
X-ClickHouse-Query-Id: 5abe861c-239c-467f-b955-8a201abb8b7f
|
X-ClickHouse-Query-Id: 5abe861c-239c-467f-b955-8a201abb8b7f
|
||||||
X-ClickHouse-Summary: {"read_rows":"0","read_bytes":"0","written_rows":"0","written_bytes":"0","total_rows_to_read":"0","elapsed_ns":"662334,"peak_memory_usage":"0"}
|
X-ClickHouse-Summary: {"read_rows":"0","read_bytes":"0","written_rows":"0","written_bytes":"0","total_rows_to_read":"0","elapsed_ns":"662334"}
|
||||||
|
|
||||||
1
|
1
|
||||||
```
|
```
|
||||||
@ -288,9 +288,9 @@ Similarly, you can use ClickHouse sessions in the HTTP protocol. To do this, you
|
|||||||
You can receive information about the progress of a query in `X-ClickHouse-Progress` response headers. To do this, enable [send_progress_in_http_headers](../operations/settings/settings.md#settings-send_progress_in_http_headers). Example of the header sequence:
|
You can receive information about the progress of a query in `X-ClickHouse-Progress` response headers. To do this, enable [send_progress_in_http_headers](../operations/settings/settings.md#settings-send_progress_in_http_headers). Example of the header sequence:
|
||||||
|
|
||||||
``` text
|
``` text
|
||||||
X-ClickHouse-Progress: {"read_rows":"2752512","read_bytes":"240570816","total_rows_to_read":"8880128","elapsed_ns":"662334","peak_memory_usage":"4371480"}
|
X-ClickHouse-Progress: {"read_rows":"2752512","read_bytes":"240570816","total_rows_to_read":"8880128","elapsed_ns":"662334"}
|
||||||
X-ClickHouse-Progress: {"read_rows":"5439488","read_bytes":"482285394","total_rows_to_read":"8880128","elapsed_ns":"992334","peak_memory_usage":"13621616"}
|
X-ClickHouse-Progress: {"read_rows":"5439488","read_bytes":"482285394","total_rows_to_read":"8880128","elapsed_ns":"992334"}
|
||||||
X-ClickHouse-Progress: {"read_rows":"8783786","read_bytes":"819092887","total_rows_to_read":"8880128","elapsed_ns":"1232334","peak_memory_usage":"23155600"}
|
X-ClickHouse-Progress: {"read_rows":"8783786","read_bytes":"819092887","total_rows_to_read":"8880128","elapsed_ns":"1232334"}
|
||||||
```
|
```
|
||||||
|
|
||||||
Possible header fields:
|
Possible header fields:
|
||||||
@ -439,7 +439,7 @@ $ curl -v 'http://localhost:8123/predefined_query'
|
|||||||
< X-ClickHouse-Format: Template
|
< X-ClickHouse-Format: Template
|
||||||
< X-ClickHouse-Timezone: Asia/Shanghai
|
< X-ClickHouse-Timezone: Asia/Shanghai
|
||||||
< Keep-Alive: timeout=3
|
< Keep-Alive: timeout=3
|
||||||
< X-ClickHouse-Summary: {"read_rows":"0","read_bytes":"0","written_rows":"0","written_bytes":"0","total_rows_to_read":"0","elapsed_ns":"662334","peak_memory_usage":"0"}
|
< X-ClickHouse-Summary: {"read_rows":"0","read_bytes":"0","written_rows":"0","written_bytes":"0","total_rows_to_read":"0","elapsed_ns":"662334"}
|
||||||
<
|
<
|
||||||
# HELP "Query" "Number of executing queries"
|
# HELP "Query" "Number of executing queries"
|
||||||
# TYPE "Query" counter
|
# TYPE "Query" counter
|
||||||
@ -604,7 +604,7 @@ $ curl -vv -H 'XXX:xxx' 'http://localhost:8123/hi'
|
|||||||
< Content-Type: text/html; charset=UTF-8
|
< Content-Type: text/html; charset=UTF-8
|
||||||
< Transfer-Encoding: chunked
|
< Transfer-Encoding: chunked
|
||||||
< Keep-Alive: timeout=3
|
< Keep-Alive: timeout=3
|
||||||
< X-ClickHouse-Summary: {"read_rows":"0","read_bytes":"0","written_rows":"0","written_bytes":"0","total_rows_to_read":"0","elapsed_ns":"662334","peak_memory_usage":"0"}
|
< X-ClickHouse-Summary: {"read_rows":"0","read_bytes":"0","written_rows":"0","written_bytes":"0","total_rows_to_read":"0","elapsed_ns":"662334"}
|
||||||
<
|
<
|
||||||
* Connection #0 to host localhost left intact
|
* Connection #0 to host localhost left intact
|
||||||
Say Hi!%
|
Say Hi!%
|
||||||
@ -644,7 +644,7 @@ $ curl -v -H 'XXX:xxx' 'http://localhost:8123/get_config_static_handler'
|
|||||||
< Content-Type: text/plain; charset=UTF-8
|
< Content-Type: text/plain; charset=UTF-8
|
||||||
< Transfer-Encoding: chunked
|
< Transfer-Encoding: chunked
|
||||||
< Keep-Alive: timeout=3
|
< Keep-Alive: timeout=3
|
||||||
< X-ClickHouse-Summary: {"read_rows":"0","read_bytes":"0","written_rows":"0","written_bytes":"0","total_rows_to_read":"0","elapsed_ns":"662334","peak_memory_usage":"0"}
|
< X-ClickHouse-Summary: {"read_rows":"0","read_bytes":"0","written_rows":"0","written_bytes":"0","total_rows_to_read":"0","elapsed_ns":"662334"}
|
||||||
<
|
<
|
||||||
* Connection #0 to host localhost left intact
|
* Connection #0 to host localhost left intact
|
||||||
<html ng-app="SMI2"><head><base href="http://ui.tabix.io/"></head><body><div ui-view="" class="content-ui"></div><script src="http://loader.tabix.io/master.js"></script></body></html>%
|
<html ng-app="SMI2"><head><base href="http://ui.tabix.io/"></head><body><div ui-view="" class="content-ui"></div><script src="http://loader.tabix.io/master.js"></script></body></html>%
|
||||||
@ -696,7 +696,7 @@ $ curl -vv -H 'XXX:xxx' 'http://localhost:8123/get_absolute_path_static_handler'
|
|||||||
< Content-Type: text/html; charset=UTF-8
|
< Content-Type: text/html; charset=UTF-8
|
||||||
< Transfer-Encoding: chunked
|
< Transfer-Encoding: chunked
|
||||||
< Keep-Alive: timeout=3
|
< Keep-Alive: timeout=3
|
||||||
< X-ClickHouse-Summary: {"read_rows":"0","read_bytes":"0","written_rows":"0","written_bytes":"0","total_rows_to_read":"0","elapsed_ns":"662334","peak_memory_usage":"0"}
|
< X-ClickHouse-Summary: {"read_rows":"0","read_bytes":"0","written_rows":"0","written_bytes":"0","total_rows_to_read":"0","elapsed_ns":"662334"}
|
||||||
<
|
<
|
||||||
<html><body>Absolute Path File</body></html>
|
<html><body>Absolute Path File</body></html>
|
||||||
* Connection #0 to host localhost left intact
|
* Connection #0 to host localhost left intact
|
||||||
@ -715,8 +715,88 @@ $ curl -vv -H 'XXX:xxx' 'http://localhost:8123/get_relative_path_static_handler'
|
|||||||
< Content-Type: text/html; charset=UTF-8
|
< Content-Type: text/html; charset=UTF-8
|
||||||
< Transfer-Encoding: chunked
|
< Transfer-Encoding: chunked
|
||||||
< Keep-Alive: timeout=3
|
< Keep-Alive: timeout=3
|
||||||
< X-ClickHouse-Summary: {"read_rows":"0","read_bytes":"0","written_rows":"0","written_bytes":"0","total_rows_to_read":"0","elapsed_ns":"662334","peak_memory_usage":"0"}
|
< X-ClickHouse-Summary: {"read_rows":"0","read_bytes":"0","written_rows":"0","written_bytes":"0","total_rows_to_read":"0","elapsed_ns":"662334"}
|
||||||
<
|
<
|
||||||
<html><body>Relative Path File</body></html>
|
<html><body>Relative Path File</body></html>
|
||||||
* Connection #0 to host localhost left intact
|
* Connection #0 to host localhost left intact
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## Valid JSON/XML response on exception during HTTP streaming {valid-output-on-exception-http-streaming}
|
||||||
|
|
||||||
|
While query execution over HTTP an exception can happen when part of the data has already been sent. Usually an exception is sent to the client in plain text
|
||||||
|
even if some specific data format was used to output data and the output may become invalid in terms of specified data format.
|
||||||
|
To prevent it, you can use setting `http_write_exception_in_output_format` (enabled by default) that will tell ClickHouse to write an exception in specified format (currently supported for XML and JSON* formats).
|
||||||
|
|
||||||
|
Examples:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
$ curl 'http://localhost:8123/?query=SELECT+number,+throwIf(number>3)+from+system.numbers+format+JSON+settings+max_block_size=1&http_write_exception_in_output_format=1'
|
||||||
|
{
|
||||||
|
"meta":
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"name": "number",
|
||||||
|
"type": "UInt64"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "throwIf(greater(number, 2))",
|
||||||
|
"type": "UInt8"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
|
||||||
|
"data":
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"number": "0",
|
||||||
|
"throwIf(greater(number, 2))": 0
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"number": "1",
|
||||||
|
"throwIf(greater(number, 2))": 0
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"number": "2",
|
||||||
|
"throwIf(greater(number, 2))": 0
|
||||||
|
}
|
||||||
|
],
|
||||||
|
|
||||||
|
"rows": 3,
|
||||||
|
|
||||||
|
"exception": "Code: 395. DB::Exception: Value passed to 'throwIf' function is non-zero: while executing 'FUNCTION throwIf(greater(number, 2) :: 2) -> throwIf(greater(number, 2)) UInt8 : 1'. (FUNCTION_THROW_IF_VALUE_IS_NON_ZERO) (version 23.8.1.1)"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
```bash
|
||||||
|
$ curl 'http://localhost:8123/?query=SELECT+number,+throwIf(number>2)+from+system.numbers+format+XML+settings+max_block_size=1&http_write_exception_in_output_format=1'
|
||||||
|
<?xml version='1.0' encoding='UTF-8' ?>
|
||||||
|
<result>
|
||||||
|
<meta>
|
||||||
|
<columns>
|
||||||
|
<column>
|
||||||
|
<name>number</name>
|
||||||
|
<type>UInt64</type>
|
||||||
|
</column>
|
||||||
|
<column>
|
||||||
|
<name>throwIf(greater(number, 2))</name>
|
||||||
|
<type>UInt8</type>
|
||||||
|
</column>
|
||||||
|
</columns>
|
||||||
|
</meta>
|
||||||
|
<data>
|
||||||
|
<row>
|
||||||
|
<number>0</number>
|
||||||
|
<field>0</field>
|
||||||
|
</row>
|
||||||
|
<row>
|
||||||
|
<number>1</number>
|
||||||
|
<field>0</field>
|
||||||
|
</row>
|
||||||
|
<row>
|
||||||
|
<number>2</number>
|
||||||
|
<field>0</field>
|
||||||
|
</row>
|
||||||
|
</data>
|
||||||
|
<rows>3</rows>
|
||||||
|
<exception>Code: 395. DB::Exception: Value passed to 'throwIf' function is non-zero: while executing 'FUNCTION throwIf(greater(number, 2) :: 2) -> throwIf(greater(number, 2)) UInt8 : 1'. (FUNCTION_THROW_IF_VALUE_IS_NON_ZERO) (version 23.8.1.1)</exception>
|
||||||
|
</result>
|
||||||
|
```
|
||||||
|
@ -389,9 +389,25 @@ DESC format(JSONEachRow, '{"arr" : [null, 42, null]}')
|
|||||||
└──────┴────────────────────────┴──────────────┴────────────────────┴─────────┴──────────────────┴────────────────┘
|
└──────┴────────────────────────┴──────────────┴────────────────────┴─────────┴──────────────────┴────────────────┘
|
||||||
```
|
```
|
||||||
|
|
||||||
Tuples:
|
Named tuples:
|
||||||
|
|
||||||
In JSON formats we treat Arrays with elements of different types as Tuples.
|
When setting `input_format_json_try_infer_named_tuples_from_objects` is enabled, during schema inference ClickHouse will try to infer named Tuple from JSON objects.
|
||||||
|
The resulting named Tuple will contain all elements from all corresponding JSON objects from sample data.
|
||||||
|
|
||||||
|
```sql
|
||||||
|
SET input_format_json_try_infer_named_tuples_from_objects = 1;
|
||||||
|
DESC format(JSONEachRow, '{"obj" : {"a" : 42, "b" : "Hello"}}, {"obj" : {"a" : 43, "c" : [1, 2, 3]}}, {"obj" : {"d" : {"e" : 42}}}')
|
||||||
|
```
|
||||||
|
|
||||||
|
```response
|
||||||
|
┌─name─┬─type───────────────────────────────────────────────────────────────────────────────────────────────┬─default_type─┬─default_expression─┬─comment─┬─codec_expression─┬─ttl_expression─┐
|
||||||
|
│ obj │ Tuple(a Nullable(Int64), b Nullable(String), c Array(Nullable(Int64)), d Tuple(e Nullable(Int64))) │ │ │ │ │ │
|
||||||
|
└──────┴────────────────────────────────────────────────────────────────────────────────────────────────────┴──────────────┴────────────────────┴─────────┴──────────────────┴────────────────┘
|
||||||
|
```
|
||||||
|
|
||||||
|
Unnamed Tuples:
|
||||||
|
|
||||||
|
In JSON formats we treat Arrays with elements of different types as Unnamed Tuples.
|
||||||
```sql
|
```sql
|
||||||
DESC format(JSONEachRow, '{"tuple" : [1, "Hello, World!", [1, 2, 3]]}')
|
DESC format(JSONEachRow, '{"tuple" : [1, "Hello, World!", [1, 2, 3]]}')
|
||||||
```
|
```
|
||||||
@ -418,7 +434,10 @@ DESC format(JSONEachRow, $$
|
|||||||
Maps:
|
Maps:
|
||||||
|
|
||||||
In JSON we can read objects with values of the same type as Map type.
|
In JSON we can read objects with values of the same type as Map type.
|
||||||
|
Note: it will work only when settings `input_format_json_read_objects_as_strings` and `input_format_json_try_infer_named_tuples_from_objects` are disabled.
|
||||||
|
|
||||||
```sql
|
```sql
|
||||||
|
SET input_format_json_read_objects_as_strings = 0, input_format_json_try_infer_named_tuples_from_objects = 0;
|
||||||
DESC format(JSONEachRow, '{"map" : {"key1" : 42, "key2" : 24, "key3" : 4}}')
|
DESC format(JSONEachRow, '{"map" : {"key1" : 42, "key2" : 24, "key3" : 4}}')
|
||||||
```
|
```
|
||||||
```response
|
```response
|
||||||
@ -448,14 +467,22 @@ Nested complex types:
|
|||||||
DESC format(JSONEachRow, '{"value" : [[[42, 24], []], {"key1" : 42, "key2" : 24}]}')
|
DESC format(JSONEachRow, '{"value" : [[[42, 24], []], {"key1" : 42, "key2" : 24}]}')
|
||||||
```
|
```
|
||||||
```response
|
```response
|
||||||
┌─name──┬─type───────────────────────────────────────────────────────────────┬─default_type─┬─default_expression─┬─comment─┬─codec_expression─┬─ttl_expression─┐
|
┌─name──┬─type─────────────────────────────────────────────────────────────────────────────────────┬─default_type─┬─default_expression─┬─comment─┬─codec_expression─┬─ttl_expression─┐
|
||||||
│ value │ Tuple(Array(Array(Nullable(Int64))), Map(String, Nullable(Int64))) │ │ │ │ │ │
|
│ value │ Tuple(Array(Array(Nullable(String))), Tuple(key1 Nullable(Int64), key2 Nullable(Int64))) │ │ │ │ │ │
|
||||||
└───────┴────────────────────────────────────────────────────────────────────┴──────────────┴────────────────────┴─────────┴──────────────────┴────────────────┘
|
└───────┴──────────────────────────────────────────────────────────────────────────────────────────┴──────────────┴────────────────────┴─────────┴──────────────────┴────────────────┘
|
||||||
```
|
```
|
||||||
|
|
||||||
If ClickHouse cannot determine the type, because the data contains only nulls, an exception will be thrown:
|
If ClickHouse cannot determine the type for some key, because the data contains only nulls/empty objects/empty arrays, type `String` will be used if setting `input_format_json_infer_incomplete_types_as_strings` is enabled or an exception will be thrown otherwise:
|
||||||
```sql
|
```sql
|
||||||
DESC format(JSONEachRow, '{"arr" : [null, null]}')
|
DESC format(JSONEachRow, '{"arr" : [null, null]}') SETTINGS input_format_json_infer_incomplete_types_as_strings = 1;
|
||||||
|
```
|
||||||
|
```response
|
||||||
|
┌─name─┬─type────────────────────┬─default_type─┬─default_expression─┬─comment─┬─codec_expression─┬─ttl_expression─┐
|
||||||
|
│ arr │ Array(Nullable(String)) │ │ │ │ │ │
|
||||||
|
└──────┴─────────────────────────┴──────────────┴────────────────────┴─────────┴──────────────────┴────────────────┘
|
||||||
|
```
|
||||||
|
```sql
|
||||||
|
DESC format(JSONEachRow, '{"arr" : [null, null]}') SETTINGS input_format_json_infer_incomplete_types_as_strings = 0;
|
||||||
```
|
```
|
||||||
```response
|
```response
|
||||||
Code: 652. DB::Exception: Received from localhost:9000. DB::Exception:
|
Code: 652. DB::Exception: Received from localhost:9000. DB::Exception:
|
||||||
@ -466,31 +493,11 @@ most likely this column contains only Nulls or empty Arrays/Maps.
|
|||||||
|
|
||||||
#### JSON settings {#json-settings}
|
#### JSON settings {#json-settings}
|
||||||
|
|
||||||
##### input_format_json_read_objects_as_strings
|
|
||||||
|
|
||||||
Enabling this setting allows reading nested JSON objects as strings.
|
|
||||||
This setting can be used to read nested JSON objects without using JSON object type.
|
|
||||||
|
|
||||||
This setting is enabled by default.
|
|
||||||
|
|
||||||
```sql
|
|
||||||
SET input_format_json_read_objects_as_strings = 1;
|
|
||||||
DESC format(JSONEachRow, $$
|
|
||||||
{"obj" : {"key1" : 42, "key2" : [1,2,3,4]}}
|
|
||||||
{"obj" : {"key3" : {"nested_key" : 1}}}
|
|
||||||
$$)
|
|
||||||
```
|
|
||||||
```response
|
|
||||||
┌─name─┬─type─────────────┬─default_type─┬─default_expression─┬─comment─┬─codec_expression─┬─ttl_expression─┐
|
|
||||||
│ obj │ Nullable(String) │ │ │ │ │ │
|
|
||||||
└──────┴──────────────────┴──────────────┴────────────────────┴─────────┴──────────────────┴────────────────┘
|
|
||||||
```
|
|
||||||
|
|
||||||
##### input_format_json_try_infer_numbers_from_strings
|
##### input_format_json_try_infer_numbers_from_strings
|
||||||
|
|
||||||
Enabling this setting allows inferring numbers from string values.
|
Enabling this setting allows inferring numbers from string values.
|
||||||
|
|
||||||
This setting is enabled by default.
|
This setting is disabled by default.
|
||||||
|
|
||||||
**Example:**
|
**Example:**
|
||||||
|
|
||||||
@ -507,11 +514,69 @@ DESC format(JSONEachRow, $$
|
|||||||
└───────┴─────────────────┴──────────────┴────────────────────┴─────────┴──────────────────┴────────────────┘
|
└───────┴─────────────────┴──────────────┴────────────────────┴─────────┴──────────────────┴────────────────┘
|
||||||
```
|
```
|
||||||
|
|
||||||
|
##### input_format_json_try_infer_named_tuples_from_objects
|
||||||
|
|
||||||
|
Enabling this setting allows inferring named Tuples from JSON objects. The resulting named Tuple will contain all elements from all corresponding JSON objects from sample data.
|
||||||
|
It can be useful when JSON data is not sparse so the sample of data will contain all possible object keys.
|
||||||
|
|
||||||
|
This setting is enabled by default.
|
||||||
|
|
||||||
|
**Example**
|
||||||
|
|
||||||
|
```sql
|
||||||
|
SET input_format_json_try_infer_named_tuples_from_objects = 1;
|
||||||
|
DESC format(JSONEachRow, '{"obj" : {"a" : 42, "b" : "Hello"}}, {"obj" : {"a" : 43, "c" : [1, 2, 3]}}, {"obj" : {"d" : {"e" : 42}}}')
|
||||||
|
```
|
||||||
|
|
||||||
|
Result:
|
||||||
|
|
||||||
|
```
|
||||||
|
┌─name─┬─type───────────────────────────────────────────────────────────────────────────────────────────────┬─default_type─┬─default_expression─┬─comment─┬─codec_expression─┬─ttl_expression─┐
|
||||||
|
│ obj │ Tuple(a Nullable(Int64), b Nullable(String), c Array(Nullable(Int64)), d Tuple(e Nullable(Int64))) │ │ │ │ │ │
|
||||||
|
└──────┴────────────────────────────────────────────────────────────────────────────────────────────────────┴──────────────┴────────────────────┴─────────┴──────────────────┴────────────────┘
|
||||||
|
```
|
||||||
|
|
||||||
|
```sql
|
||||||
|
SET input_format_json_try_infer_named_tuples_from_objects = 1;
|
||||||
|
DESC format(JSONEachRow, '{"array" : [{"a" : 42, "b" : "Hello"}, {}, {"c" : [1,2,3]}, {"d" : "2020-01-01"}]}')
|
||||||
|
```
|
||||||
|
|
||||||
|
Result:
|
||||||
|
|
||||||
|
```
|
||||||
|
┌─name──┬─type────────────────────────────────────────────────────────────────────────────────────────────┬─default_type─┬─default_expression─┬─comment─┬─codec_expression─┬─ttl_expression─┐
|
||||||
|
│ array │ Array(Tuple(a Nullable(Int64), b Nullable(String), c Array(Nullable(Int64)), d Nullable(Date))) │ │ │ │ │ │
|
||||||
|
└───────┴─────────────────────────────────────────────────────────────────────────────────────────────────┴──────────────┴────────────────────┴─────────┴──────────────────┴────────────────┘
|
||||||
|
```
|
||||||
|
|
||||||
|
##### input_format_json_read_objects_as_strings
|
||||||
|
|
||||||
|
Enabling this setting allows reading nested JSON objects as strings.
|
||||||
|
This setting can be used to read nested JSON objects without using JSON object type.
|
||||||
|
|
||||||
|
This setting is enabled by default.
|
||||||
|
|
||||||
|
Note: enabling this setting will take effect only if setting `input_format_json_try_infer_named_tuples_from_objects` is disabled.
|
||||||
|
|
||||||
|
```sql
|
||||||
|
SET input_format_json_read_objects_as_strings = 1, input_format_json_try_infer_named_tuples_from_objects = 0;
|
||||||
|
DESC format(JSONEachRow, $$
|
||||||
|
{"obj" : {"key1" : 42, "key2" : [1,2,3,4]}}
|
||||||
|
{"obj" : {"key3" : {"nested_key" : 1}}}
|
||||||
|
$$)
|
||||||
|
```
|
||||||
|
```response
|
||||||
|
┌─name─┬─type─────────────┬─default_type─┬─default_expression─┬─comment─┬─codec_expression─┬─ttl_expression─┐
|
||||||
|
│ obj │ Nullable(String) │ │ │ │ │ │
|
||||||
|
└──────┴──────────────────┴──────────────┴────────────────────┴─────────┴──────────────────┴────────────────┘
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
##### input_format_json_read_numbers_as_strings
|
##### input_format_json_read_numbers_as_strings
|
||||||
|
|
||||||
Enabling this setting allows reading numeric values as strings.
|
Enabling this setting allows reading numeric values as strings.
|
||||||
|
|
||||||
This setting is disabled by default.
|
This setting is enabled by default.
|
||||||
|
|
||||||
**Example**
|
**Example**
|
||||||
|
|
||||||
@ -549,6 +614,49 @@ DESC format(JSONEachRow, $$
|
|||||||
└───────┴─────────────────┴──────────────┴────────────────────┴─────────┴──────────────────┴────────────────┘
|
└───────┴─────────────────┴──────────────┴────────────────────┴─────────┴──────────────────┴────────────────┘
|
||||||
```
|
```
|
||||||
|
|
||||||
|
##### input_format_json_read_arrays_as_strings
|
||||||
|
|
||||||
|
Enabling this setting allows reading JSON array values as strings.
|
||||||
|
|
||||||
|
This setting is enabled by default.
|
||||||
|
|
||||||
|
**Example**
|
||||||
|
|
||||||
|
```sql
|
||||||
|
SET input_format_json_read_arrays_as_strings = 1;
|
||||||
|
SELECT arr, toTypeName(arr), JSONExtractArrayRaw(arr)[3] from format(JSONEachRow, 'arr String', '{"arr" : [1, "Hello", [1,2,3]]}');
|
||||||
|
```
|
||||||
|
```response
|
||||||
|
┌─arr───────────────────┬─toTypeName(arr)─┬─arrayElement(JSONExtractArrayRaw(arr), 3)─┐
|
||||||
|
│ [1, "Hello", [1,2,3]] │ String │ [1,2,3] │
|
||||||
|
└───────────────────────┴─────────────────┴───────────────────────────────────────────┘
|
||||||
|
```
|
||||||
|
|
||||||
|
##### input_format_json_infer_incomplete_types_as_strings
|
||||||
|
|
||||||
|
Enabling this setting allows to use String type for JSON keys that contain only `Null`/`{}`/`[]` in data sample during schema inference.
|
||||||
|
In JSON formats any value can be read as String if all corresponding settings are enabled (they are all enabled by default), and we can avoid errors like `Cannot determine type for column 'column_name' by first 25000 rows of data, most likely this column contains only Nulls or empty Arrays/Maps` during schema inference
|
||||||
|
by using String type for keys with unknown types.
|
||||||
|
|
||||||
|
Example:
|
||||||
|
|
||||||
|
```sql
|
||||||
|
SET input_format_json_infer_incomplete_types_as_strings = 1, input_format_json_try_infer_named_tuples_from_objects = 1;
|
||||||
|
DESCRIBE format(JSONEachRow, '{"obj" : {"a" : [1,2,3], "b" : "hello", "c" : null, "d" : {}, "e" : []}}');
|
||||||
|
SELECT * FROM format(JSONEachRow, '{"obj" : {"a" : [1,2,3], "b" : "hello", "c" : null, "d" : {}, "e" : []}}');
|
||||||
|
```
|
||||||
|
|
||||||
|
Result:
|
||||||
|
```
|
||||||
|
┌─name─┬─type───────────────────────────────────────────────────────────────────────────────────────────────────────────────────┬─default_type─┬─default_expression─┬─comment─┬─codec_expression─┬─ttl_expression─┐
|
||||||
|
│ obj │ Tuple(a Array(Nullable(Int64)), b Nullable(String), c Nullable(String), d Nullable(String), e Array(Nullable(String))) │ │ │ │ │ │
|
||||||
|
└──────┴────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────┴──────────────┴────────────────────┴─────────┴──────────────────┴────────────────┘
|
||||||
|
|
||||||
|
┌─obj────────────────────────────┐
|
||||||
|
│ ([1,2,3],'hello',NULL,'{}',[]) │
|
||||||
|
└────────────────────────────────┘
|
||||||
|
```
|
||||||
|
|
||||||
### CSV {#csv}
|
### CSV {#csv}
|
||||||
|
|
||||||
In CSV format ClickHouse extracts column values from the row according to delimiters. ClickHouse expects all types except numbers and strings to be enclosed in double quotes. If the value is in double quotes, ClickHouse tries to parse
|
In CSV format ClickHouse extracts column values from the row according to delimiters. ClickHouse expects all types except numbers and strings to be enclosed in double quotes. If the value is in double quotes, ClickHouse tries to parse
|
||||||
|
131
docs/en/interfaces/third-party/client-libraries.md
vendored
131
docs/en/interfaces/third-party/client-libraries.md
vendored
@ -2,6 +2,7 @@
|
|||||||
slug: /en/interfaces/third-party/client-libraries
|
slug: /en/interfaces/third-party/client-libraries
|
||||||
sidebar_position: 26
|
sidebar_position: 26
|
||||||
sidebar_label: Client Libraries
|
sidebar_label: Client Libraries
|
||||||
|
description: Third-party client libraries
|
||||||
---
|
---
|
||||||
|
|
||||||
# Client Libraries from Third-party Developers
|
# Client Libraries from Third-party Developers
|
||||||
@ -10,68 +11,68 @@ sidebar_label: Client Libraries
|
|||||||
ClickHouse Inc does **not** maintain the libraries listed below and hasn’t done any extensive testing to ensure their quality.
|
ClickHouse Inc does **not** maintain the libraries listed below and hasn’t done any extensive testing to ensure their quality.
|
||||||
:::
|
:::
|
||||||
|
|
||||||
- Python
|
### Python
|
||||||
- [infi.clickhouse_orm](https://github.com/Infinidat/infi.clickhouse_orm)
|
- [infi.clickhouse_orm](https://github.com/Infinidat/infi.clickhouse_orm)
|
||||||
- [clickhouse-driver](https://github.com/mymarilyn/clickhouse-driver)
|
- [clickhouse-driver](https://github.com/mymarilyn/clickhouse-driver)
|
||||||
- [clickhouse-client](https://github.com/yurial/clickhouse-client)
|
- [clickhouse-client](https://github.com/yurial/clickhouse-client)
|
||||||
- [aiochclient](https://github.com/maximdanilchenko/aiochclient)
|
- [aiochclient](https://github.com/maximdanilchenko/aiochclient)
|
||||||
- [asynch](https://github.com/long2ice/asynch)
|
- [asynch](https://github.com/long2ice/asynch)
|
||||||
- PHP
|
### PHP
|
||||||
- [smi2/phpclickhouse](https://packagist.org/packages/smi2/phpClickHouse)
|
- [smi2/phpclickhouse](https://packagist.org/packages/smi2/phpClickHouse)
|
||||||
- [8bitov/clickhouse-php-client](https://packagist.org/packages/8bitov/clickhouse-php-client)
|
- [8bitov/clickhouse-php-client](https://packagist.org/packages/8bitov/clickhouse-php-client)
|
||||||
- [bozerkins/clickhouse-client](https://packagist.org/packages/bozerkins/clickhouse-client)
|
- [bozerkins/clickhouse-client](https://packagist.org/packages/bozerkins/clickhouse-client)
|
||||||
- [simpod/clickhouse-client](https://packagist.org/packages/simpod/clickhouse-client)
|
- [simpod/clickhouse-client](https://packagist.org/packages/simpod/clickhouse-client)
|
||||||
- [seva-code/php-click-house-client](https://packagist.org/packages/seva-code/php-click-house-client)
|
- [seva-code/php-click-house-client](https://packagist.org/packages/seva-code/php-click-house-client)
|
||||||
- [SeasClick C++ client](https://github.com/SeasX/SeasClick)
|
- [SeasClick C++ client](https://github.com/SeasX/SeasClick)
|
||||||
- [one-ck](https://github.com/lizhichao/one-ck)
|
- [one-ck](https://github.com/lizhichao/one-ck)
|
||||||
- [glushkovds/phpclickhouse-laravel](https://packagist.org/packages/glushkovds/phpclickhouse-laravel)
|
- [glushkovds/phpclickhouse-laravel](https://packagist.org/packages/glushkovds/phpclickhouse-laravel)
|
||||||
- [kolya7k ClickHouse PHP extension](https://github.com//kolya7k/clickhouse-php)
|
- [kolya7k ClickHouse PHP extension](https://github.com//kolya7k/clickhouse-php)
|
||||||
- [hyvor/clickhouse-php](https://github.com/hyvor/clickhouse-php)
|
- [hyvor/clickhouse-php](https://github.com/hyvor/clickhouse-php)
|
||||||
- Go
|
### Go
|
||||||
- [clickhouse](https://github.com/kshvakov/clickhouse/)
|
- [clickhouse](https://github.com/kshvakov/clickhouse/)
|
||||||
- [go-clickhouse](https://github.com/roistat/go-clickhouse)
|
- [go-clickhouse](https://github.com/roistat/go-clickhouse)
|
||||||
- [chconn](https://github.com/vahid-sohrabloo/chconn)
|
- [chconn](https://github.com/vahid-sohrabloo/chconn)
|
||||||
- [mailrugo-clickhouse](https://github.com/mailru/go-clickhouse)
|
- [mailrugo-clickhouse](https://github.com/mailru/go-clickhouse)
|
||||||
- [golang-clickhouse](https://github.com/leprosus/golang-clickhouse)
|
- [golang-clickhouse](https://github.com/leprosus/golang-clickhouse)
|
||||||
- [uptrace/go-clickhouse](https://clickhouse.uptrace.dev/)
|
- [uptrace/go-clickhouse](https://clickhouse.uptrace.dev/)
|
||||||
- Swift
|
### Swift
|
||||||
- [ClickHouseNIO](https://github.com/patrick-zippenfenig/ClickHouseNIO)
|
- [ClickHouseNIO](https://github.com/patrick-zippenfenig/ClickHouseNIO)
|
||||||
- [ClickHouseVapor ORM](https://github.com/patrick-zippenfenig/ClickHouseVapor)
|
- [ClickHouseVapor ORM](https://github.com/patrick-zippenfenig/ClickHouseVapor)
|
||||||
- NodeJs
|
### NodeJs
|
||||||
- [clickhouse (NodeJs)](https://github.com/TimonKK/clickhouse)
|
- [clickhouse (NodeJs)](https://github.com/TimonKK/clickhouse)
|
||||||
- [node-clickhouse](https://github.com/apla/node-clickhouse)
|
- [node-clickhouse](https://github.com/apla/node-clickhouse)
|
||||||
- [nestjs-clickhouse](https://github.com/depyronick/nestjs-clickhouse)
|
- [nestjs-clickhouse](https://github.com/depyronick/nestjs-clickhouse)
|
||||||
- [clickhouse-client](https://github.com/depyronick/clickhouse-client)
|
- [clickhouse-client](https://github.com/depyronick/clickhouse-client)
|
||||||
- [node-clickhouse-orm](https://github.com/zimv/node-clickhouse-orm)
|
- [node-clickhouse-orm](https://github.com/zimv/node-clickhouse-orm)
|
||||||
- Perl
|
### Perl
|
||||||
- [perl-DBD-ClickHouse](https://github.com/elcamlost/perl-DBD-ClickHouse)
|
- [perl-DBD-ClickHouse](https://github.com/elcamlost/perl-DBD-ClickHouse)
|
||||||
- [HTTP-ClickHouse](https://metacpan.org/release/HTTP-ClickHouse)
|
- [HTTP-ClickHouse](https://metacpan.org/release/HTTP-ClickHouse)
|
||||||
- [AnyEvent-ClickHouse](https://metacpan.org/release/AnyEvent-ClickHouse)
|
- [AnyEvent-ClickHouse](https://metacpan.org/release/AnyEvent-ClickHouse)
|
||||||
- Ruby
|
### Ruby
|
||||||
- [ClickHouse (Ruby)](https://github.com/shlima/click_house)
|
- [ClickHouse (Ruby)](https://github.com/shlima/click_house)
|
||||||
- [clickhouse-activerecord](https://github.com/PNixx/clickhouse-activerecord)
|
- [clickhouse-activerecord](https://github.com/PNixx/clickhouse-activerecord)
|
||||||
- Rust
|
### Rust
|
||||||
- [clickhouse.rs](https://github.com/loyd/clickhouse.rs)
|
- [clickhouse.rs](https://github.com/loyd/clickhouse.rs)
|
||||||
- [clickhouse-rs](https://github.com/suharev7/clickhouse-rs)
|
- [clickhouse-rs](https://github.com/suharev7/clickhouse-rs)
|
||||||
- [Klickhouse](https://github.com/Protryon/klickhouse)
|
- [Klickhouse](https://github.com/Protryon/klickhouse)
|
||||||
- R
|
### R
|
||||||
- [RClickHouse](https://github.com/IMSMWU/RClickHouse)
|
- [RClickHouse](https://github.com/IMSMWU/RClickHouse)
|
||||||
- Java
|
### Java
|
||||||
- [clickhouse-client-java](https://github.com/VirtusAI/clickhouse-client-java)
|
- [clickhouse-client-java](https://github.com/VirtusAI/clickhouse-client-java)
|
||||||
- [clickhouse-client](https://github.com/Ecwid/clickhouse-client)
|
- [clickhouse-client](https://github.com/Ecwid/clickhouse-client)
|
||||||
- Scala
|
### Scala
|
||||||
- [clickhouse-scala-client](https://github.com/crobox/clickhouse-scala-client)
|
- [clickhouse-scala-client](https://github.com/crobox/clickhouse-scala-client)
|
||||||
- Kotlin
|
### Kotlin
|
||||||
- [AORM](https://github.com/TanVD/AORM)
|
- [AORM](https://github.com/TanVD/AORM)
|
||||||
- C#
|
### C#
|
||||||
- [Octonica.ClickHouseClient](https://github.com/Octonica/ClickHouseClient)
|
- [Octonica.ClickHouseClient](https://github.com/Octonica/ClickHouseClient)
|
||||||
- [ClickHouse.Ado](https://github.com/killwort/ClickHouse-Net)
|
- [ClickHouse.Ado](https://github.com/killwort/ClickHouse-Net)
|
||||||
- [ClickHouse.Client](https://github.com/DarkWanderer/ClickHouse.Client)
|
- [ClickHouse.Client](https://github.com/DarkWanderer/ClickHouse.Client)
|
||||||
- [ClickHouse.Net](https://github.com/ilyabreev/ClickHouse.Net)
|
- [ClickHouse.Net](https://github.com/ilyabreev/ClickHouse.Net)
|
||||||
- Elixir
|
### Elixir
|
||||||
- [clickhousex](https://github.com/appodeal/clickhousex/)
|
- [clickhousex](https://github.com/appodeal/clickhousex/)
|
||||||
- [pillar](https://github.com/sofakingworld/pillar)
|
- [pillar](https://github.com/sofakingworld/pillar)
|
||||||
- Nim
|
### Nim
|
||||||
- [nim-clickhouse](https://github.com/leonardoce/nim-clickhouse)
|
- [nim-clickhouse](https://github.com/leonardoce/nim-clickhouse)
|
||||||
- Haskell
|
### Haskell
|
||||||
- [hdbc-clickhouse](https://github.com/zaneli/hdbc-clickhouse)
|
- [hdbc-clickhouse](https://github.com/zaneli/hdbc-clickhouse)
|
||||||
|
@ -406,7 +406,7 @@ RESTORE TABLE data AS data_restored FROM Disk('s3_plain', 'cloud_backup');
|
|||||||
:::note
|
:::note
|
||||||
But keep in mind that:
|
But keep in mind that:
|
||||||
- This disk should not be used for `MergeTree` itself, only for `BACKUP`/`RESTORE`
|
- This disk should not be used for `MergeTree` itself, only for `BACKUP`/`RESTORE`
|
||||||
- It has excessive API calls
|
- If your tables are backed by S3 storage, it doesn't use `CopyObject` calls to copy parts to the destination bucket, instead, it downloads and uploads them, which is very inefficient. Prefer to use `BACKUP ... TO S3(<endpoint>)` syntax for this use-case.
|
||||||
:::
|
:::
|
||||||
|
|
||||||
## Alternatives
|
## Alternatives
|
||||||
|
@ -69,7 +69,7 @@ may return cached results then.
|
|||||||
The query cache can be cleared using statement `SYSTEM DROP QUERY CACHE`. The content of the query cache is displayed in system table
|
The query cache can be cleared using statement `SYSTEM DROP QUERY CACHE`. The content of the query cache is displayed in system table
|
||||||
`system.query_cache`. The number of query cache hits and misses since database start are shown as events "QueryCacheHits" and
|
`system.query_cache`. The number of query cache hits and misses since database start are shown as events "QueryCacheHits" and
|
||||||
"QueryCacheMisses" in system table [system.events](system-tables/events.md). Both counters are only updated for `SELECT` queries which run
|
"QueryCacheMisses" in system table [system.events](system-tables/events.md). Both counters are only updated for `SELECT` queries which run
|
||||||
with setting `use_query_cache = true`, other queries do not affect "QueryCacheMisses". Field `query_log_usage` in system table
|
with setting `use_query_cache = true`, other queries do not affect "QueryCacheMisses". Field `query_cache_usage` in system table
|
||||||
[system.query_log](system-tables/query_log.md) shows for each executed query whether the query result was written into or read from the
|
[system.query_log](system-tables/query_log.md) shows for each executed query whether the query result was written into or read from the
|
||||||
query cache. Asynchronous metrics "QueryCacheEntries" and "QueryCacheBytes" in system table
|
query cache. Asynchronous metrics "QueryCacheEntries" and "QueryCacheBytes" in system table
|
||||||
[system.asynchronous_metrics](system-tables/asynchronous_metrics.md) show how many entries / bytes the query cache currently contains.
|
[system.asynchronous_metrics](system-tables/asynchronous_metrics.md) show how many entries / bytes the query cache currently contains.
|
||||||
|
@ -502,7 +502,7 @@ Possible values:
|
|||||||
Default value: 480.
|
Default value: 480.
|
||||||
|
|
||||||
After merging several parts into a new part, ClickHouse marks the original parts as inactive and deletes them only after `old_parts_lifetime` seconds.
|
After merging several parts into a new part, ClickHouse marks the original parts as inactive and deletes them only after `old_parts_lifetime` seconds.
|
||||||
Inactive parts are removed if they are not used by current queries, i.e. if the `refcount` of the part is zero.
|
Inactive parts are removed if they are not used by current queries, i.e. if the `refcount` of the part is 1.
|
||||||
|
|
||||||
`fsync` is not called for new parts, so for some time new parts exist only in the server's RAM (OS cache). If the server is rebooted spontaneously, new parts can be lost or damaged.
|
`fsync` is not called for new parts, so for some time new parts exist only in the server's RAM (OS cache). If the server is rebooted spontaneously, new parts can be lost or damaged.
|
||||||
To protect data inactive parts are not deleted immediately.
|
To protect data inactive parts are not deleted immediately.
|
||||||
@ -555,7 +555,7 @@ Merge reads rows from parts in blocks of `merge_max_block_size` rows, then merge
|
|||||||
|
|
||||||
## number_of_free_entries_in_pool_to_lower_max_size_of_merge {#number-of-free-entries-in-pool-to-lower-max-size-of-merge}
|
## number_of_free_entries_in_pool_to_lower_max_size_of_merge {#number-of-free-entries-in-pool-to-lower-max-size-of-merge}
|
||||||
|
|
||||||
When there is less than specified number of free entries in pool (or replicated queue), start to lower maximum size of merge to process (or to put in queue).
|
When there is less than specified number of free entries in pool (or replicated queue), start to lower maximum size of merge to process (or to put in queue).
|
||||||
This is to allow small merges to process - not filling the pool with long running merges.
|
This is to allow small merges to process - not filling the pool with long running merges.
|
||||||
|
|
||||||
Possible values:
|
Possible values:
|
||||||
@ -566,7 +566,7 @@ Default value: 8
|
|||||||
|
|
||||||
## number_of_free_entries_in_pool_to_execute_mutation {#number-of-free-entries-in-pool-to-execute-mutation}
|
## number_of_free_entries_in_pool_to_execute_mutation {#number-of-free-entries-in-pool-to-execute-mutation}
|
||||||
|
|
||||||
When there is less than specified number of free entries in pool, do not execute part mutations.
|
When there is less than specified number of free entries in pool, do not execute part mutations.
|
||||||
This is to leave free threads for regular merges and avoid "Too many parts".
|
This is to leave free threads for regular merges and avoid "Too many parts".
|
||||||
|
|
||||||
Possible values:
|
Possible values:
|
||||||
@ -746,14 +746,14 @@ Default value: `0` (limit never applied).
|
|||||||
|
|
||||||
Minimal ratio of the number of _default_ values to the number of _all_ values in a column. Setting this value causes the column to be stored using sparse serializations.
|
Minimal ratio of the number of _default_ values to the number of _all_ values in a column. Setting this value causes the column to be stored using sparse serializations.
|
||||||
|
|
||||||
If a column is sparse (contains mostly zeros), ClickHouse can encode it in a sparse format and automatically optimize calculations - the data does not require full decompression during queries. To enable this sparse serialization, define the `ratio_of_defaults_for_sparse_serialization` setting to be less than 1.0. If the value is greater than or equal to 1.0 (the default), then the columns will be always written using the normal full serialization.
|
If a column is sparse (contains mostly zeros), ClickHouse can encode it in a sparse format and automatically optimize calculations - the data does not require full decompression during queries. To enable this sparse serialization, define the `ratio_of_defaults_for_sparse_serialization` setting to be less than 1.0. If the value is greater than or equal to 1.0, then the columns will be always written using the normal full serialization.
|
||||||
|
|
||||||
Possible values:
|
Possible values:
|
||||||
|
|
||||||
- Float between 0 and 1 to enable sparse serialization
|
- Float between 0 and 1 to enable sparse serialization
|
||||||
- 1.0 (or greater) if you do not want to use sparse serialization
|
- 1.0 (or greater) if you do not want to use sparse serialization
|
||||||
|
|
||||||
Default value: `1.0` (sparse serialization is disabled)
|
Default value: `0.9375`
|
||||||
|
|
||||||
**Example**
|
**Example**
|
||||||
|
|
||||||
@ -845,6 +845,13 @@ You can see which parts of `s` were stored using the sparse serialization:
|
|||||||
└────────┴────────────────────┘
|
└────────┴────────────────────┘
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## replace_long_file_name_to_hash {#replace_long_file_name_to_hash}
|
||||||
|
If the file name for column is too long (more than `max_file_name_length` bytes) replace it to SipHash128. Default value: `false`.
|
||||||
|
|
||||||
|
## max_file_name_length {#max_file_name_length}
|
||||||
|
|
||||||
|
The maximal length of the file name to keep it as is without hashing. Takes effect only if setting `replace_long_file_name_to_hash` is enabled. The value of this setting does not include the length of file extension. So, it is recommended to set it below the maximum filename length (usually 255 bytes) with some gap to avoid filesystem errors. Default value: 127.
|
||||||
|
|
||||||
## clean_deleted_rows
|
## clean_deleted_rows
|
||||||
|
|
||||||
Enable/disable automatic deletion of rows flagged as `is_deleted` when perform `OPTIMIZE ... FINAL` on a table using the ReplacingMergeTree engine. When disabled, the `CLEANUP` keyword has to be added to the `OPTIMIZE ... FINAL` to have the same behaviour.
|
Enable/disable automatic deletion of rows flagged as `is_deleted` when perform `OPTIMIZE ... FINAL` on a table using the ReplacingMergeTree engine. When disabled, the `CLEANUP` keyword has to be added to the `OPTIMIZE ... FINAL` to have the same behaviour.
|
||||||
|
@ -381,6 +381,13 @@ Enabled by default.
|
|||||||
|
|
||||||
Allow parsing numbers as strings in JSON input formats.
|
Allow parsing numbers as strings in JSON input formats.
|
||||||
|
|
||||||
|
Enabled by default.
|
||||||
|
|
||||||
|
## input_format_json_try_infer_numbers_from_strings {#input_format_json_try_infer_numbers_from_strings}
|
||||||
|
|
||||||
|
If enabled, during schema inference ClickHouse will try to infer numbers from string fields.
|
||||||
|
It can be useful if JSON data contains quoted UInt64 numbers.
|
||||||
|
|
||||||
Disabled by default.
|
Disabled by default.
|
||||||
|
|
||||||
## input_format_json_read_objects_as_strings {#input_format_json_read_objects_as_strings}
|
## input_format_json_read_objects_as_strings {#input_format_json_read_objects_as_strings}
|
||||||
@ -404,7 +411,76 @@ Result:
|
|||||||
└────┴──────────────────────────┴────────────┘
|
└────┴──────────────────────────┴────────────┘
|
||||||
```
|
```
|
||||||
|
|
||||||
Disabled by default.
|
Enabled by default.
|
||||||
|
|
||||||
|
## input_format_json_try_infer_named_tuples_from_objects {#input_format_json_try_infer_named_tuples_from_objects}
|
||||||
|
|
||||||
|
If enabled, during schema inference ClickHouse will try to infer named Tuple from JSON objects.
|
||||||
|
The resulting named Tuple will contain all elements from all corresponding JSON objects from sample data.
|
||||||
|
|
||||||
|
Example:
|
||||||
|
|
||||||
|
```sql
|
||||||
|
SET input_format_json_try_infer_named_tuples_from_objects = 1;
|
||||||
|
DESC format(JSONEachRow, '{"obj" : {"a" : 42, "b" : "Hello"}}, {"obj" : {"a" : 43, "c" : [1, 2, 3]}}, {"obj" : {"d" : {"e" : 42}}}')
|
||||||
|
```
|
||||||
|
|
||||||
|
Result:
|
||||||
|
|
||||||
|
```
|
||||||
|
┌─name─┬─type───────────────────────────────────────────────────────────────────────────────────────────────┬─default_type─┬─default_expression─┬─comment─┬─codec_expression─┬─ttl_expression─┐
|
||||||
|
│ obj │ Tuple(a Nullable(Int64), b Nullable(String), c Array(Nullable(Int64)), d Tuple(e Nullable(Int64))) │ │ │ │ │ │
|
||||||
|
└──────┴────────────────────────────────────────────────────────────────────────────────────────────────────┴──────────────┴────────────────────┴─────────┴──────────────────┴────────────────┘
|
||||||
|
```
|
||||||
|
|
||||||
|
Enabled by default.
|
||||||
|
|
||||||
|
## input_format_json_read_arrays_as_strings {#input_format_json_read_arrays_as_strings}
|
||||||
|
|
||||||
|
Allow parsing JSON arrays as strings in JSON input formats.
|
||||||
|
|
||||||
|
Example:
|
||||||
|
|
||||||
|
```sql
|
||||||
|
SET input_format_json_read_arrays_as_strings = 1;
|
||||||
|
SELECT arr, toTypeName(arr), JSONExtractArrayRaw(arr)[3] from format(JSONEachRow, 'arr String', '{"arr" : [1, "Hello", [1,2,3]]}');
|
||||||
|
```
|
||||||
|
|
||||||
|
Result:
|
||||||
|
```
|
||||||
|
┌─arr───────────────────┬─toTypeName(arr)─┬─arrayElement(JSONExtractArrayRaw(arr), 3)─┐
|
||||||
|
│ [1, "Hello", [1,2,3]] │ String │ [1,2,3] │
|
||||||
|
└───────────────────────┴─────────────────┴───────────────────────────────────────────┘
|
||||||
|
```
|
||||||
|
|
||||||
|
Enabled by default.
|
||||||
|
|
||||||
|
## input_format_json_infer_incomplete_types_as_strings {#input_format_json_infer_incomplete_types_as_strings}
|
||||||
|
|
||||||
|
Allow to use String type for JSON keys that contain only `Null`/`{}`/`[]` in data sample during schema inference.
|
||||||
|
In JSON formats any value can be read as String, and we can avoid errors like `Cannot determine type for column 'column_name' by first 25000 rows of data, most likely this column contains only Nulls or empty Arrays/Maps` during schema inference
|
||||||
|
by using String type for keys with unknown types.
|
||||||
|
|
||||||
|
Example:
|
||||||
|
|
||||||
|
```sql
|
||||||
|
SET input_format_json_infer_incomplete_types_as_strings = 1, input_format_json_try_infer_named_tuples_from_objects = 1;
|
||||||
|
DESCRIBE format(JSONEachRow, '{"obj" : {"a" : [1,2,3], "b" : "hello", "c" : null, "d" : {}, "e" : []}}');
|
||||||
|
SELECT * FROM format(JSONEachRow, '{"obj" : {"a" : [1,2,3], "b" : "hello", "c" : null, "d" : {}, "e" : []}}');
|
||||||
|
```
|
||||||
|
|
||||||
|
Result:
|
||||||
|
```
|
||||||
|
┌─name─┬─type───────────────────────────────────────────────────────────────────────────────────────────────────────────────────┬─default_type─┬─default_expression─┬─comment─┬─codec_expression─┬─ttl_expression─┐
|
||||||
|
│ obj │ Tuple(a Array(Nullable(Int64)), b Nullable(String), c Nullable(String), d Nullable(String), e Array(Nullable(String))) │ │ │ │ │ │
|
||||||
|
└──────┴────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────┴──────────────┴────────────────────┴─────────┴──────────────────┴────────────────┘
|
||||||
|
|
||||||
|
┌─obj────────────────────────────┐
|
||||||
|
│ ([1,2,3],'hello',NULL,'{}',[]) │
|
||||||
|
└────────────────────────────────┘
|
||||||
|
```
|
||||||
|
|
||||||
|
Enabled by default.
|
||||||
|
|
||||||
## input_format_json_validate_types_from_metadata {#input_format_json_validate_types_from_metadata}
|
## input_format_json_validate_types_from_metadata {#input_format_json_validate_types_from_metadata}
|
||||||
|
|
||||||
|
@ -3279,6 +3279,17 @@ Possible values:
|
|||||||
|
|
||||||
Default value: `0`.
|
Default value: `0`.
|
||||||
|
|
||||||
|
## use_mysql_types_in_show_columns {#use_mysql_types_in_show_columns}
|
||||||
|
|
||||||
|
Show the names of MySQL data types corresponding to ClickHouse data types in [SHOW COLUMNS](../../sql-reference/statements/show.md#show_columns) and SELECTs on [system.columns](../system-tables/columns.md).
|
||||||
|
|
||||||
|
Possible values:
|
||||||
|
|
||||||
|
- 0 - Show names of native ClickHouse data types.
|
||||||
|
- 1 - Show names of MySQL data types corresponding to ClickHouse data types.
|
||||||
|
|
||||||
|
Default value: `0`.
|
||||||
|
|
||||||
## execute_merges_on_single_replica_time_threshold {#execute-merges-on-single-replica-time-threshold}
|
## execute_merges_on_single_replica_time_threshold {#execute-merges-on-single-replica-time-threshold}
|
||||||
|
|
||||||
Enables special logic to perform merges on replicas.
|
Enables special logic to perform merges on replicas.
|
||||||
@ -4067,6 +4078,17 @@ Result:
|
|||||||
└─────┴─────┴───────┘
|
└─────┴─────┴───────┘
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## splitby_max_substrings_includes_remaining_string {#splitby_max_substrings_includes_remaining_string}
|
||||||
|
|
||||||
|
Controls whether function [splitBy*()](../../sql-reference/functions/splitting-merging-functions.md) with argument `max_substrings` > 0 will include the remaining string in the last element of the result array.
|
||||||
|
|
||||||
|
Possible values:
|
||||||
|
|
||||||
|
- `0` - The remaining string will not be included in the last element of the result array.
|
||||||
|
- `1` - The remaining string will be included in the last element of the result array. This is the behavior of Spark's [`split()`](https://spark.apache.org/docs/3.1.2/api/python/reference/api/pyspark.sql.functions.split.html) function and Python's ['string.split()'](https://docs.python.org/3/library/stdtypes.html#str.split) method.
|
||||||
|
|
||||||
|
Default value: `0`
|
||||||
|
|
||||||
## enable_extended_results_for_datetime_functions {#enable-extended-results-for-datetime-functions}
|
## enable_extended_results_for_datetime_functions {#enable-extended-results-for-datetime-functions}
|
||||||
|
|
||||||
Enables or disables returning results of type:
|
Enables or disables returning results of type:
|
||||||
@ -4648,6 +4670,10 @@ SELECT toFloat64('1.7091'), toFloat64('1.5008753E7') SETTINGS precise_float_pars
|
|||||||
|
|
||||||
Interval (in milliseconds) for sending updates with partial data about the result table to the client (in interactive mode) during query execution. Setting to 0 disables partial results. Only supported for single-threaded GROUP BY without key, ORDER BY, LIMIT and OFFSET.
|
Interval (in milliseconds) for sending updates with partial data about the result table to the client (in interactive mode) during query execution. Setting to 0 disables partial results. Only supported for single-threaded GROUP BY without key, ORDER BY, LIMIT and OFFSET.
|
||||||
|
|
||||||
|
:::note
|
||||||
|
It's an experimental feature. Enable `allow_experimental_partial_result` setting first to use it.
|
||||||
|
:::
|
||||||
|
|
||||||
## max_rows_in_partial_result
|
## max_rows_in_partial_result
|
||||||
|
|
||||||
Maximum rows to show in the partial result after every real-time update while the query runs (use partial result limit + OFFSET as a value in case of OFFSET in the query).
|
Maximum rows to show in the partial result after every real-time update while the query runs (use partial result limit + OFFSET as a value in case of OFFSET in the query).
|
||||||
@ -4668,43 +4694,35 @@ The default value is `false`.
|
|||||||
<validate_tcp_client_information>true</validate_tcp_client_information>
|
<validate_tcp_client_information>true</validate_tcp_client_information>
|
||||||
```
|
```
|
||||||
|
|
||||||
## ignore_access_denied_multidirectory_globs {#ignore_access_denied_multidirectory_globs}
|
## print_pretty_type_names {#print_pretty_type_names}
|
||||||
|
|
||||||
Allows to ignore 'permission denied' errors when using multi-directory `{}` globs for [File](../../sql-reference/table-functions/file.md#globs_in_path) and [HDFS](../../sql-reference/table-functions/hdfs.md) storages.
|
Allows to print deep-nested type names in a pretty way with indents in `DESCRIBE` query and in `toTypeName()` function.
|
||||||
This setting is only applicable to multi directory `{}` globs.
|
|
||||||
|
|
||||||
Possible values: `0`, `1`.
|
Example:
|
||||||
|
|
||||||
Default value: `0`.
|
|
||||||
|
|
||||||
### Example
|
|
||||||
|
|
||||||
Having the following structure in `user_files`:
|
|
||||||
```
|
|
||||||
my_directory/
|
|
||||||
├── data1
|
|
||||||
│ ├── f1.csv
|
|
||||||
├── data2
|
|
||||||
│ ├── f2.csv
|
|
||||||
└── test_root
|
|
||||||
```
|
|
||||||
where `data1`, `data2` directories are accessible, but one has no rights to read `test_root` directories.
|
|
||||||
|
|
||||||
For a query like `SELECT *, _path, _file FROM file('my_directory/{data1/f1,data2/f2}.csv', CSV)` an exception will be thrown:
|
|
||||||
`Code: 1001. DB::Exception: std::__1::__fs::filesystem::filesystem_error: filesystem error: in directory_iterator::directory_iterator(...): Permission denied`.
|
|
||||||
It happens because a multi-directory glob requires a recursive search in _all_ available directories under `my_directory`.
|
|
||||||
|
|
||||||
If this setting is on, all inaccessible directories will be silently skipped, even if they are explicitly specified inside `{}`.
|
|
||||||
|
|
||||||
```sql
|
```sql
|
||||||
SELECT _path, _file FROM file('my_directory/{data1/f1,data2/f2}.csv', CSV) SETTINGS ignore_access_denied_multidirectory_globs = 0;
|
CREATE TABLE test (a Tuple(b String, c Tuple(d Nullable(UInt64), e Array(UInt32), f Array(Tuple(g String, h Map(String, Array(Tuple(i String, j UInt64))))), k Date), l Nullable(String))) ENGINE=Memory;
|
||||||
|
DESCRIBE TABLE test FORMAT TSVRaw SETTINGS print_pretty_type_names=1;
|
||||||
Code: 1001. DB::Exception: std::__1::__fs::filesystem::filesystem_error: filesystem error: in directory_iterator::directory_iterator(...): Permission denied
|
|
||||||
```
|
```
|
||||||
```sql
|
|
||||||
SELECT _path, _file FROM file('my_directory/{data1/f1,data2/f2}.csv', CSV) SETTINGS ignore_access_denied_multidirectory_globs = 1;
|
|
||||||
|
|
||||||
┌─_path───────────────────┬─_file───────┐
|
```
|
||||||
│ <full path to file> │ <file name> │
|
a Tuple(
|
||||||
└─────────────────────────┴─────────────┘
|
b String,
|
||||||
|
c Tuple(
|
||||||
|
d Nullable(UInt64),
|
||||||
|
e Array(UInt32),
|
||||||
|
f Array(Tuple(
|
||||||
|
g String,
|
||||||
|
h Map(
|
||||||
|
String,
|
||||||
|
Array(Tuple(
|
||||||
|
i String,
|
||||||
|
j UInt64
|
||||||
|
))
|
||||||
|
)
|
||||||
|
)),
|
||||||
|
k Date
|
||||||
|
),
|
||||||
|
l Nullable(String)
|
||||||
|
)
|
||||||
```
|
```
|
||||||
|
@ -14,7 +14,7 @@ The `system.columns` table contains the following columns (the column type is sh
|
|||||||
- `database` ([String](../../sql-reference/data-types/string.md)) — Database name.
|
- `database` ([String](../../sql-reference/data-types/string.md)) — Database name.
|
||||||
- `table` ([String](../../sql-reference/data-types/string.md)) — Table name.
|
- `table` ([String](../../sql-reference/data-types/string.md)) — Table name.
|
||||||
- `name` ([String](../../sql-reference/data-types/string.md)) — Column name.
|
- `name` ([String](../../sql-reference/data-types/string.md)) — Column name.
|
||||||
- `type` ([String](../../sql-reference/data-types/string.md)) — Column type.
|
- `type` ([String](../../sql-reference/data-types/string.md)) — Column type. If setting `[use_mysql_types_in_show_columns](../../operations/settings/settings.md#use_mysql_types_in_show_columns) = 1` (default: 0), then the equivalent type name in MySQL is shown.
|
||||||
- `position` ([UInt64](../../sql-reference/data-types/int-uint.md)) — Ordinal position of a column in a table starting with 1.
|
- `position` ([UInt64](../../sql-reference/data-types/int-uint.md)) — Ordinal position of a column in a table starting with 1.
|
||||||
- `default_kind` ([String](../../sql-reference/data-types/string.md)) — Expression type (`DEFAULT`, `MATERIALIZED`, `ALIAS`) for the default value, or an empty string if it is not defined.
|
- `default_kind` ([String](../../sql-reference/data-types/string.md)) — Expression type (`DEFAULT`, `MATERIALIZED`, `ALIAS`) for the default value, or an empty string if it is not defined.
|
||||||
- `default_expression` ([String](../../sql-reference/data-types/string.md)) — Expression for the default value, or an empty string if it is not defined.
|
- `default_expression` ([String](../../sql-reference/data-types/string.md)) — Expression for the default value, or an empty string if it is not defined.
|
||||||
|
@ -7,28 +7,34 @@ Contains information about normal and aggregate functions.
|
|||||||
|
|
||||||
Columns:
|
Columns:
|
||||||
|
|
||||||
- `name`(`String`) – The name of the function.
|
- `name` ([String](../../sql-reference/data-types/string.md)) – The name of the function.
|
||||||
- `is_aggregate`(`UInt8`) — Whether the function is aggregate.
|
- `is_aggregate` ([UInt8](../../sql-reference/data-types/int-uint.md)) — Whether the function is an aggregate function.
|
||||||
|
- `is_deterministic` ([Nullable](../../sql-reference/data-types/nullable.md)([UInt8](../../sql-reference/data-types/int-uint.md))) - Whether the function is deterministic.
|
||||||
|
- `case_insensitive`, ([UInt8](../../sql-reference/data-types/int-uint.md)) - Whether the function name can be used case-insensitively.
|
||||||
|
- `alias_to`, ([String](../../sql-reference/data-types/string.md)) - The original function name, if the function name is an alias.
|
||||||
|
- `create_query`, ([String](../../sql-reference/data-types/enum.md)) - Unused.
|
||||||
|
- `origin`, ([Enum8](../../sql-reference/data-types/string.md)) - Unused.
|
||||||
|
- `description`, ([String](../../sql-reference/data-types/string.md)) - A high-level description what the function does.
|
||||||
|
- `syntax`, ([String](../../sql-reference/data-types/string.md)) - Signature of the function.
|
||||||
|
- `arguments`, ([String](../../sql-reference/data-types/string.md)) - What arguments does the function take.
|
||||||
|
- `returned_value`, ([String](../../sql-reference/data-types/string.md)) - What does the function return.
|
||||||
|
- `examples`, ([String](../../sql-reference/data-types/string.md)) - Example usage of the function.
|
||||||
|
- `categories`, ([String](../../sql-reference/data-types/string.md)) - The category of the function.
|
||||||
|
|
||||||
**Example**
|
**Example**
|
||||||
|
|
||||||
```sql
|
```sql
|
||||||
SELECT * FROM system.functions LIMIT 10;
|
SELECT name, is_aggregate, is_deterministic, case_insensitive, alias_to FROM system.functions LIMIT 5;
|
||||||
```
|
```
|
||||||
|
|
||||||
```text
|
```text
|
||||||
┌─name──────────────────┬─is_aggregate─┬─case_insensitive─┬─alias_to─┬─create_query─┬─origin─┐
|
┌─name─────────────────────┬─is_aggregate─┬─is_deterministic─┬─case_insensitive─┬─alias_to─┐
|
||||||
│ logTrace │ 0 │ 0 │ │ │ System │
|
│ BLAKE3 │ 0 │ 1 │ 0 │ │
|
||||||
│ aes_decrypt_mysql │ 0 │ 0 │ │ │ System │
|
│ sipHash128Reference │ 0 │ 1 │ 0 │ │
|
||||||
│ aes_encrypt_mysql │ 0 │ 0 │ │ │ System │
|
│ mapExtractKeyLike │ 0 │ 1 │ 0 │ │
|
||||||
│ decrypt │ 0 │ 0 │ │ │ System │
|
│ sipHash128ReferenceKeyed │ 0 │ 1 │ 0 │ │
|
||||||
│ encrypt │ 0 │ 0 │ │ │ System │
|
│ mapPartialSort │ 0 │ 1 │ 0 │ │
|
||||||
│ toBool │ 0 │ 0 │ │ │ System │
|
└──────────────────────────┴──────────────┴──────────────────┴──────────────────┴──────────┘
|
||||||
│ windowID │ 0 │ 0 │ │ │ System │
|
|
||||||
│ hopStart │ 0 │ 0 │ │ │ System │
|
|
||||||
│ hop │ 0 │ 0 │ │ │ System │
|
|
||||||
│ snowflakeToDateTime64 │ 0 │ 0 │ │ │ System │
|
|
||||||
└───────────────────────┴──────────────┴──────────────────┴──────────┴──────────────┴────────┘
|
|
||||||
|
|
||||||
10 rows in set. Elapsed: 0.002 sec.
|
5 rows in set. Elapsed: 0.002 sec.
|
||||||
```
|
```
|
||||||
|
@ -13,16 +13,20 @@ SHOW TABLES FROM information_schema;
|
|||||||
```
|
```
|
||||||
|
|
||||||
``` text
|
``` text
|
||||||
┌─name─────┐
|
┌─name────────────────────┐
|
||||||
│ COLUMNS │
|
│ COLUMNS │
|
||||||
│ SCHEMATA │
|
│ KEY_COLUMN_USAGE │
|
||||||
│ TABLES │
|
│ REFERENTIAL_CONSTRAINTS │
|
||||||
│ VIEWS │
|
│ SCHEMATA │
|
||||||
│ columns │
|
│ TABLES │
|
||||||
│ schemata │
|
│ VIEWS │
|
||||||
│ tables │
|
│ columns │
|
||||||
│ views │
|
│ key_column_usage │
|
||||||
└──────────┘
|
│ referential_constraints │
|
||||||
|
│ schemata │
|
||||||
|
│ tables │
|
||||||
|
│ views │
|
||||||
|
└─────────────────────────┘
|
||||||
```
|
```
|
||||||
|
|
||||||
`INFORMATION_SCHEMA` contains the following views:
|
`INFORMATION_SCHEMA` contains the following views:
|
||||||
@ -31,8 +35,10 @@ SHOW TABLES FROM information_schema;
|
|||||||
- [SCHEMATA](#schemata)
|
- [SCHEMATA](#schemata)
|
||||||
- [TABLES](#tables)
|
- [TABLES](#tables)
|
||||||
- [VIEWS](#views)
|
- [VIEWS](#views)
|
||||||
|
- [KEY_COLUMN_USAGE](#key_column_usage)
|
||||||
|
- [REFERENTIAL_CONSTRAINTS](#referential_constraints)
|
||||||
|
|
||||||
Case-insensitive equivalent views, e.g. `INFORMATION_SCHEMA.columns` are provided for reasons of compatibility with other databases.
|
Case-insensitive equivalent views, e.g. `INFORMATION_SCHEMA.columns` are provided for reasons of compatibility with other databases. The same applies to all the columns in these views - both lowercase (for example, `table_name`) and uppercase (`TABLE_NAME`) variants are provided.
|
||||||
|
|
||||||
## COLUMNS {#columns}
|
## COLUMNS {#columns}
|
||||||
|
|
||||||
@ -63,13 +69,43 @@ Columns:
|
|||||||
- `domain_catalog` ([Nullable](../../sql-reference/data-types/nullable.md)([String](../../sql-reference/data-types/string.md))) — `NULL`, not supported.
|
- `domain_catalog` ([Nullable](../../sql-reference/data-types/nullable.md)([String](../../sql-reference/data-types/string.md))) — `NULL`, not supported.
|
||||||
- `domain_schema` ([Nullable](../../sql-reference/data-types/nullable.md)([String](../../sql-reference/data-types/string.md))) — `NULL`, not supported.
|
- `domain_schema` ([Nullable](../../sql-reference/data-types/nullable.md)([String](../../sql-reference/data-types/string.md))) — `NULL`, not supported.
|
||||||
- `domain_name` ([Nullable](../../sql-reference/data-types/nullable.md)([String](../../sql-reference/data-types/string.md))) — `NULL`, not supported.
|
- `domain_name` ([Nullable](../../sql-reference/data-types/nullable.md)([String](../../sql-reference/data-types/string.md))) — `NULL`, not supported.
|
||||||
|
- `extra` ([Nullable](../../sql-reference/data-types/nullable.md)([String](../../sql-reference/data-types/string.md))) — `STORED GENERATED` for `MATERIALIZED`-type columns, `VIRTUAL GENERATED` for `ALIAS`-type columns, `DEFAULT_GENERATED` for `DEFAULT`-type columns, or `NULL`.
|
||||||
|
|
||||||
**Example**
|
**Example**
|
||||||
|
|
||||||
Query:
|
Query:
|
||||||
|
|
||||||
``` sql
|
``` sql
|
||||||
SELECT * FROM INFORMATION_SCHEMA.COLUMNS WHERE (table_schema=currentDatabase() OR table_schema='') AND table_name NOT LIKE '%inner%' LIMIT 1 FORMAT Vertical;
|
SELECT table_catalog,
|
||||||
|
table_schema,
|
||||||
|
table_name,
|
||||||
|
column_name,
|
||||||
|
ordinal_position,
|
||||||
|
column_default,
|
||||||
|
is_nullable,
|
||||||
|
data_type,
|
||||||
|
character_maximum_length,
|
||||||
|
character_octet_length,
|
||||||
|
numeric_precision,
|
||||||
|
numeric_precision_radix,
|
||||||
|
numeric_scale,
|
||||||
|
datetime_precision,
|
||||||
|
character_set_catalog,
|
||||||
|
character_set_schema,
|
||||||
|
character_set_name,
|
||||||
|
collation_catalog,
|
||||||
|
collation_schema,
|
||||||
|
collation_name,
|
||||||
|
domain_catalog,
|
||||||
|
domain_schema,
|
||||||
|
domain_name,
|
||||||
|
column_comment,
|
||||||
|
column_type
|
||||||
|
FROM INFORMATION_SCHEMA.COLUMNS
|
||||||
|
WHERE (table_schema = currentDatabase() OR table_schema = '')
|
||||||
|
AND table_name NOT LIKE '%inner%'
|
||||||
|
LIMIT 1
|
||||||
|
FORMAT Vertical;
|
||||||
```
|
```
|
||||||
|
|
||||||
Result:
|
Result:
|
||||||
@ -121,7 +157,17 @@ Columns:
|
|||||||
Query:
|
Query:
|
||||||
|
|
||||||
``` sql
|
``` sql
|
||||||
SELECT * FROM information_schema.schemata WHERE schema_name ILIKE 'information_schema' LIMIT 1 FORMAT Vertical;
|
SELECT catalog_name,
|
||||||
|
schema_name,
|
||||||
|
schema_owner,
|
||||||
|
default_character_set_catalog,
|
||||||
|
default_character_set_schema,
|
||||||
|
default_character_set_name,
|
||||||
|
sql_path
|
||||||
|
FROM information_schema.schemata
|
||||||
|
WHERE schema_name ilike 'information_schema'
|
||||||
|
LIMIT 1
|
||||||
|
FORMAT Vertical;
|
||||||
```
|
```
|
||||||
|
|
||||||
Result:
|
Result:
|
||||||
@ -147,19 +193,35 @@ Columns:
|
|||||||
- `table_catalog` ([String](../../sql-reference/data-types/string.md)) — The name of the database in which the table is located.
|
- `table_catalog` ([String](../../sql-reference/data-types/string.md)) — The name of the database in which the table is located.
|
||||||
- `table_schema` ([String](../../sql-reference/data-types/string.md)) — The name of the database in which the table is located.
|
- `table_schema` ([String](../../sql-reference/data-types/string.md)) — The name of the database in which the table is located.
|
||||||
- `table_name` ([String](../../sql-reference/data-types/string.md)) — Table name.
|
- `table_name` ([String](../../sql-reference/data-types/string.md)) — Table name.
|
||||||
- `table_type` ([Enum8](../../sql-reference/data-types/enum.md)) — Table type. Possible values:
|
- `table_type` ([String](../../sql-reference/data-types/string.md)) — Table type. Possible values:
|
||||||
- `BASE TABLE`
|
- `BASE TABLE`
|
||||||
- `VIEW`
|
- `VIEW`
|
||||||
- `FOREIGN TABLE`
|
- `FOREIGN TABLE`
|
||||||
- `LOCAL TEMPORARY`
|
- `LOCAL TEMPORARY`
|
||||||
- `SYSTEM VIEW`
|
- `SYSTEM VIEW`
|
||||||
|
- `table_rows` ([Nullable](../../sql-reference/data-types/nullable.md)([UInt64](../../sql-reference/data-types/int-uint.md))) — The total
|
||||||
|
number of rows. NULL if it could not be determined.
|
||||||
|
- `data_length` ([Nullable](../../sql-reference/data-types/nullable.md)([UInt64](../../sql-reference/data-types/int-uint.md))) — The size of
|
||||||
|
the data on-disk. NULL if it could not be determined.
|
||||||
|
- `table_collation` ([Nullable](../../sql-reference/data-types/nullable.md)([String](../../sql-reference/data-types/string.md))) — The table default collation. Always `utf8mb4_0900_ai_ci`.
|
||||||
|
- `table_comment` ([Nullable](../../sql-reference/data-types/nullable.md)([String](../../sql-reference/data-types/string.md))) — The comment used when creating the table.
|
||||||
|
|
||||||
**Example**
|
**Example**
|
||||||
|
|
||||||
Query:
|
Query:
|
||||||
|
|
||||||
``` sql
|
``` sql
|
||||||
SELECT * FROM INFORMATION_SCHEMA.TABLES WHERE (table_schema = currentDatabase() OR table_schema = '') AND table_name NOT LIKE '%inner%' LIMIT 1 FORMAT Vertical;
|
SELECT table_catalog,
|
||||||
|
table_schema,
|
||||||
|
table_name,
|
||||||
|
table_type,
|
||||||
|
table_collation,
|
||||||
|
table_comment
|
||||||
|
FROM INFORMATION_SCHEMA.TABLES
|
||||||
|
WHERE (table_schema = currentDatabase() OR table_schema = '')
|
||||||
|
AND table_name NOT LIKE '%inner%'
|
||||||
|
LIMIT 1
|
||||||
|
FORMAT Vertical;
|
||||||
```
|
```
|
||||||
|
|
||||||
Result:
|
Result:
|
||||||
@ -167,10 +229,12 @@ Result:
|
|||||||
``` text
|
``` text
|
||||||
Row 1:
|
Row 1:
|
||||||
──────
|
──────
|
||||||
table_catalog: default
|
table_catalog: default
|
||||||
table_schema: default
|
table_schema: default
|
||||||
table_name: describe_example
|
table_name: describe_example
|
||||||
table_type: BASE TABLE
|
table_type: BASE TABLE
|
||||||
|
table_collation: utf8mb4_0900_ai_ci
|
||||||
|
table_comment:
|
||||||
```
|
```
|
||||||
|
|
||||||
## VIEWS {#views}
|
## VIEWS {#views}
|
||||||
@ -199,7 +263,20 @@ Query:
|
|||||||
``` sql
|
``` sql
|
||||||
CREATE VIEW v (n Nullable(Int32), f Float64) AS SELECT n, f FROM t;
|
CREATE VIEW v (n Nullable(Int32), f Float64) AS SELECT n, f FROM t;
|
||||||
CREATE MATERIALIZED VIEW mv ENGINE = Null AS SELECT * FROM system.one;
|
CREATE MATERIALIZED VIEW mv ENGINE = Null AS SELECT * FROM system.one;
|
||||||
SELECT * FROM information_schema.views WHERE table_schema = currentDatabase() LIMIT 1 FORMAT Vertical;
|
SELECT table_catalog,
|
||||||
|
table_schema,
|
||||||
|
table_name,
|
||||||
|
view_definition,
|
||||||
|
check_option,
|
||||||
|
is_updatable,
|
||||||
|
is_insertable_into,
|
||||||
|
is_trigger_updatable,
|
||||||
|
is_trigger_deletable,
|
||||||
|
is_trigger_insertable_into
|
||||||
|
FROM information_schema.views
|
||||||
|
WHERE table_schema = currentDatabase()
|
||||||
|
LIMIT 1
|
||||||
|
FORMAT Vertical;
|
||||||
```
|
```
|
||||||
|
|
||||||
Result:
|
Result:
|
||||||
@ -218,3 +295,80 @@ is_trigger_updatable: NO
|
|||||||
is_trigger_deletable: NO
|
is_trigger_deletable: NO
|
||||||
is_trigger_insertable_into: NO
|
is_trigger_insertable_into: NO
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## KEY_COLUMN_USAGE {#key_column_usage}
|
||||||
|
|
||||||
|
Contains columns from the [system.tables](../../operations/system-tables/tables.md) system table which are restricted by constraints.
|
||||||
|
|
||||||
|
Columns:
|
||||||
|
|
||||||
|
- `constraint_catalog` ([String](../../sql-reference/data-types/string.md)) — Currently unused. Always `def`.
|
||||||
|
- `constraint_schema` ([String](../../sql-reference/data-types/string.md)) — The name of the schema (database) to which the constraint belongs.
|
||||||
|
- `constraint_name` ([Nullable](../../sql-reference/data-types/nullable.md)([String](../../sql-reference/data-types/string.md))) — The name of the constraint.
|
||||||
|
- `table_catalog` ([String](../../sql-reference/data-types/string.md)) — Currently unused. Always `def`.
|
||||||
|
- `table_schema` ([String](../../sql-reference/data-types/string.md)) — The name of the schema (database) to which the table belongs.
|
||||||
|
- `table_name` ([String](../../sql-reference/data-types/string.md)) — The name of the table that has the constraint.
|
||||||
|
- `column_name` ([Nullable](../../sql-reference/data-types/nullable.md)([String](../../sql-reference/data-types/string.md))) — The name of the column that has the constraint.
|
||||||
|
- `ordinal_position` ([UInt32](../../sql-reference/data-types/int-uint.md)) — Currently unused. Always `1`.
|
||||||
|
- `position_in_unique_constraint` ([Nullable](../../sql-reference/data-types/nullable.md)([UInt32](../../sql-reference/data-types/int-uint.md))) — Currently unused. Always `NULL`.
|
||||||
|
- `referenced_table_schema` ([Nullable](../../sql-reference/data-types/nullable.md)([String](../../sql-reference/data-types/string.md))) — Currently unused. Always NULL.
|
||||||
|
- `referenced_table_name` ([Nullable](../../sql-reference/data-types/nullable.md)([String](../../sql-reference/data-types/string.md))) — Currently unused. Always NULL.
|
||||||
|
- `referenced_column_name` ([Nullable](../../sql-reference/data-types/nullable.md)([String](../../sql-reference/data-types/string.md))) — Currently unused. Always NULL.
|
||||||
|
|
||||||
|
**Example**
|
||||||
|
|
||||||
|
```sql
|
||||||
|
CREATE TABLE test (i UInt32, s String) ENGINE MergeTree ORDER BY i;
|
||||||
|
SELECT constraint_catalog,
|
||||||
|
constraint_schema,
|
||||||
|
constraint_name,
|
||||||
|
table_catalog,
|
||||||
|
table_schema,
|
||||||
|
table_name,
|
||||||
|
column_name,
|
||||||
|
ordinal_position,
|
||||||
|
position_in_unique_constraint,
|
||||||
|
referenced_table_schema,
|
||||||
|
referenced_table_name,
|
||||||
|
referenced_column_name
|
||||||
|
FROM information_schema.key_column_usage
|
||||||
|
WHERE table_name = 'test'
|
||||||
|
FORMAT Vertical;
|
||||||
|
```
|
||||||
|
|
||||||
|
Result:
|
||||||
|
|
||||||
|
```
|
||||||
|
Row 1:
|
||||||
|
──────
|
||||||
|
constraint_catalog: def
|
||||||
|
constraint_schema: default
|
||||||
|
constraint_name: PRIMARY
|
||||||
|
table_catalog: def
|
||||||
|
table_schema: default
|
||||||
|
table_name: test
|
||||||
|
column_name: i
|
||||||
|
ordinal_position: 1
|
||||||
|
position_in_unique_constraint: ᴺᵁᴸᴸ
|
||||||
|
referenced_table_schema: ᴺᵁᴸᴸ
|
||||||
|
referenced_table_name: ᴺᵁᴸᴸ
|
||||||
|
referenced_column_name: ᴺᵁᴸᴸ
|
||||||
|
```
|
||||||
|
|
||||||
|
## REFERENTIAL_CONSTRAINTS {#referential_constraints}
|
||||||
|
|
||||||
|
Contains information about foreign keys. Currently returns an empty result (no rows) which is just enough to provide compatibility with 3rd party tools like Tableau Online.
|
||||||
|
|
||||||
|
Columns:
|
||||||
|
|
||||||
|
- `constraint_catalog` ([String](../../sql-reference/data-types/string.md)) — Currently unused.
|
||||||
|
- `constraint_schema` ([String](../../sql-reference/data-types/string.md)) — Currently unused.
|
||||||
|
- `constraint_name` ([Nullable](../../sql-reference/data-types/nullable.md)([String](../../sql-reference/data-types/string.md))) — Currently unused.
|
||||||
|
- `unique_constraint_catalog` ([String](../../sql-reference/data-types/string.md)) — Currently unused.
|
||||||
|
- `unique_constraint_schema` ([String](../../sql-reference/data-types/string.md)) — Currently unused.
|
||||||
|
- `unique_constraint_name` ([Nullable](../../sql-reference/data-types/nullable.md)([String](../../sql-reference/data-types/string.md))) — Currently unused.
|
||||||
|
- `match_option` ([String](../../sql-reference/data-types/string.md)) — Currently unused.
|
||||||
|
- `update_rule` ([String](../../sql-reference/data-types/string.md)) — Currently unused.
|
||||||
|
- `delete_rule` ([String](../../sql-reference/data-types/string.md)) — Currently unused.
|
||||||
|
- `table_name` ([String](../../sql-reference/data-types/string.md)) — Currently unused.
|
||||||
|
- `referenced_table_name` ([String](../../sql-reference/data-types/string.md)) — Currently unused.
|
||||||
|
@ -17,7 +17,7 @@ Columns:
|
|||||||
- `0` — Current user can change the setting.
|
- `0` — Current user can change the setting.
|
||||||
- `1` — Current user can’t change the setting.
|
- `1` — Current user can’t change the setting.
|
||||||
- `type` ([String](../../sql-reference/data-types/string.md)) — Setting type (implementation specific string value).
|
- `type` ([String](../../sql-reference/data-types/string.md)) — Setting type (implementation specific string value).
|
||||||
- `is_obsolete` ([UInt8](../../sql-reference/data-types/int-uint.md#uint-ranges)) _ Shows whether a setting is obsolete.
|
- `is_obsolete` ([UInt8](../../sql-reference/data-types/int-uint.md#uint-ranges)) - Shows whether a setting is obsolete.
|
||||||
|
|
||||||
**Example**
|
**Example**
|
||||||
```sql
|
```sql
|
||||||
|
@ -38,6 +38,10 @@ inflight_requests: ᴺᵁᴸᴸ
|
|||||||
inflight_cost: ᴺᵁᴸᴸ
|
inflight_cost: ᴺᵁᴸᴸ
|
||||||
max_requests: ᴺᵁᴸᴸ
|
max_requests: ᴺᵁᴸᴸ
|
||||||
max_cost: ᴺᵁᴸᴸ
|
max_cost: ᴺᵁᴸᴸ
|
||||||
|
max_speed: ᴺᵁᴸᴸ
|
||||||
|
max_burst: ᴺᵁᴸᴸ
|
||||||
|
throttling_us: ᴺᵁᴸᴸ
|
||||||
|
tokens: ᴺᵁᴸᴸ
|
||||||
```
|
```
|
||||||
|
|
||||||
Columns:
|
Columns:
|
||||||
@ -62,3 +66,7 @@ Columns:
|
|||||||
- `inflight_cost` (`Nullable(Int64)`) - For `inflight_limit` nodes only. The sum of costs (e.g. bytes) of all resource requests dequeued from this node, that are currently in consumption state.
|
- `inflight_cost` (`Nullable(Int64)`) - For `inflight_limit` nodes only. The sum of costs (e.g. bytes) of all resource requests dequeued from this node, that are currently in consumption state.
|
||||||
- `max_requests` (`Nullable(Int64)`) - For `inflight_limit` nodes only. Upper limit for `inflight_requests` leading to constraint violation.
|
- `max_requests` (`Nullable(Int64)`) - For `inflight_limit` nodes only. Upper limit for `inflight_requests` leading to constraint violation.
|
||||||
- `max_cost` (`Nullable(Int64)`) - For `inflight_limit` nodes only. Upper limit for `inflight_cost` leading to constraint violation.
|
- `max_cost` (`Nullable(Int64)`) - For `inflight_limit` nodes only. Upper limit for `inflight_cost` leading to constraint violation.
|
||||||
|
- `max_speed` (`Nullable(Float64)`) - For `bandwidth_limit` nodes only. Upper limit for bandwidth in tokens per second.
|
||||||
|
- `max_burst` (`Nullable(Float64)`) - For `bandwidth_limit` nodes only. Upper limit for `tokens` available in token-bucket throttler.
|
||||||
|
- `throttling_us` (`Nullable(Int64)`) - For `bandwidth_limit` nodes only. Total number of microseconds this node was in throttling state.
|
||||||
|
- `tokens` (`Nullable(Float64)`) - For `bandwidth_limit` nodes only. Number of tokens currently available in token-bucket throttler.
|
||||||
|
@ -14,7 +14,7 @@ Columns:
|
|||||||
- `changed` ([UInt8](../../sql-reference/data-types/int-uint.md#uint-ranges)) — Shows whether a setting was specified in `config.xml`
|
- `changed` ([UInt8](../../sql-reference/data-types/int-uint.md#uint-ranges)) — Shows whether a setting was specified in `config.xml`
|
||||||
- `description` ([String](../../sql-reference/data-types/string.md)) — Short server setting description.
|
- `description` ([String](../../sql-reference/data-types/string.md)) — Short server setting description.
|
||||||
- `type` ([String](../../sql-reference/data-types/string.md)) — Server setting value type.
|
- `type` ([String](../../sql-reference/data-types/string.md)) — Server setting value type.
|
||||||
- `is_obsolete` ([UInt8](../../sql-reference/data-types/int-uint.md#uint-ranges)) _ Shows whether a setting is obsolete.
|
- `is_obsolete` ([UInt8](../../sql-reference/data-types/int-uint.md#uint-ranges)) - Shows whether a setting is obsolete.
|
||||||
|
|
||||||
**Example**
|
**Example**
|
||||||
|
|
||||||
|
@ -17,7 +17,7 @@ Columns:
|
|||||||
- `0` — Current user can change the setting.
|
- `0` — Current user can change the setting.
|
||||||
- `1` — Current user can’t change the setting.
|
- `1` — Current user can’t change the setting.
|
||||||
- `default` ([String](../../sql-reference/data-types/string.md)) — Setting default value.
|
- `default` ([String](../../sql-reference/data-types/string.md)) — Setting default value.
|
||||||
- `is_obsolete` ([UInt8](../../sql-reference/data-types/int-uint.md#uint-ranges)) _ Shows whether a setting is obsolete.
|
- `is_obsolete` ([UInt8](../../sql-reference/data-types/int-uint.md#uint-ranges)) - Shows whether a setting is obsolete.
|
||||||
|
|
||||||
**Example**
|
**Example**
|
||||||
|
|
||||||
|
@ -33,7 +33,7 @@ Columns:
|
|||||||
- `MemoryPeak` represents collecting updates of peak memory usage.
|
- `MemoryPeak` represents collecting updates of peak memory usage.
|
||||||
- `ProfileEvent` represents collecting of increments of profile events.
|
- `ProfileEvent` represents collecting of increments of profile events.
|
||||||
|
|
||||||
- `thread_number` ([UInt32](../../sql-reference/data-types/int-uint.md)) — Thread identifier.
|
- `thread_id` ([UInt64](../../sql-reference/data-types/int-uint.md)) — Thread identifier.
|
||||||
|
|
||||||
- `query_id` ([String](../../sql-reference/data-types/string.md)) — Query identifier that can be used to get details about a query that was running from the [query_log](#system_tables-query_log) system table.
|
- `query_id` ([String](../../sql-reference/data-types/string.md)) — Query identifier that can be used to get details about a query that was running from the [query_log](#system_tables-query_log) system table.
|
||||||
|
|
||||||
|
@ -79,10 +79,13 @@ graph TD
|
|||||||
|
|
||||||
**Possible node types:**
|
**Possible node types:**
|
||||||
* `inflight_limit` (constraint) - blocks if either number of concurrent in-flight requests exceeds `max_requests`, or their total cost exceeds `max_cost`; must have a single child.
|
* `inflight_limit` (constraint) - blocks if either number of concurrent in-flight requests exceeds `max_requests`, or their total cost exceeds `max_cost`; must have a single child.
|
||||||
|
* `bandwidth_limit` (constraint) - blocks if current bandwidth exceeds `max_speed` (0 means unlimited) or burst exceeds `max_burst` (by default equals `max_speed`); must have a single child.
|
||||||
* `fair` (policy) - selects the next request to serve from one of its children nodes according to max-min fairness; children nodes can specify `weight` (default is 1).
|
* `fair` (policy) - selects the next request to serve from one of its children nodes according to max-min fairness; children nodes can specify `weight` (default is 1).
|
||||||
* `priority` (policy) - selects the next request to serve from one of its children nodes according to static priorities (lower value means higher priority); children nodes can specify `priority` (default is 0).
|
* `priority` (policy) - selects the next request to serve from one of its children nodes according to static priorities (lower value means higher priority); children nodes can specify `priority` (default is 0).
|
||||||
* `fifo` (queue) - leaf of the hierarchy capable of holding requests that exceed resource capacity.
|
* `fifo` (queue) - leaf of the hierarchy capable of holding requests that exceed resource capacity.
|
||||||
|
|
||||||
|
To be able to use the full capacity of the underlying resource, you should use `inflight_limit`. Note that a low number of `max_requests` or `max_cost` could lead to not full resource utilization, while too high numbers could lead to empty queues inside the scheduler, which in turn will result in policies being ignored (unfairness or ignoring of priorities) in the subtree. On the other hand, if you want to protect resources from too high utilization, you should use `bandwidth_limit`. It throttles when the amount of resource consumed in `duration` seconds exceeds `max_burst + max_speed * duration` bytes. Two `bandwidth_limit` nodes on the same resource could be used to limit peak bandwidth during short intervals and average bandwidth for longer ones.
|
||||||
|
|
||||||
The following example shows how to define IO scheduling hierarchies shown in the picture:
|
The following example shows how to define IO scheduling hierarchies shown in the picture:
|
||||||
|
|
||||||
```xml
|
```xml
|
||||||
|
@ -33,8 +33,8 @@ The null hypothesis is that means of populations are equal. Normal distribution
|
|||||||
|
|
||||||
- calculated t-statistic. [Float64](../../../sql-reference/data-types/float.md).
|
- calculated t-statistic. [Float64](../../../sql-reference/data-types/float.md).
|
||||||
- calculated p-value. [Float64](../../../sql-reference/data-types/float.md).
|
- calculated p-value. [Float64](../../../sql-reference/data-types/float.md).
|
||||||
- [calculated confidence-interval-low.] [Float64](../../../sql-reference/data-types/float.md).
|
- [calculated confidence-interval-low. [Float64](../../../sql-reference/data-types/float.md).]
|
||||||
- [calculated confidence-interval-high.] [Float64](../../../sql-reference/data-types/float.md).
|
- [calculated confidence-interval-high. [Float64](../../../sql-reference/data-types/float.md).]
|
||||||
|
|
||||||
|
|
||||||
**Example**
|
**Example**
|
||||||
|
@ -4,7 +4,7 @@ sidebar_position: 52
|
|||||||
sidebar_label: Array(T)
|
sidebar_label: Array(T)
|
||||||
---
|
---
|
||||||
|
|
||||||
# Array(t)
|
# Array(T)
|
||||||
|
|
||||||
An array of `T`-type items, with the starting array index as 1. `T` can be any data type, including an array.
|
An array of `T`-type items, with the starting array index as 1. `T` can be any data type, including an array.
|
||||||
|
|
||||||
|
@ -26,7 +26,12 @@ ENGINE = TinyLog;
|
|||||||
```
|
```
|
||||||
|
|
||||||
``` sql
|
``` sql
|
||||||
INSERT INTO dt VALUES (1546300800, 1), ('2019-01-01', 2);
|
-- Parse Date
|
||||||
|
-- - from string,
|
||||||
|
-- - from 'small' integer interpreted as number of days since 1970-01-01, and
|
||||||
|
-- - from 'big' integer interpreted as number of seconds since 1970-01-01.
|
||||||
|
INSERT INTO dt VALUES ('2019-01-01', 1), (17897, 2), (1546300800, 3);
|
||||||
|
|
||||||
SELECT * FROM dt;
|
SELECT * FROM dt;
|
||||||
```
|
```
|
||||||
|
|
||||||
@ -34,6 +39,7 @@ SELECT * FROM dt;
|
|||||||
┌──timestamp─┬─event_id─┐
|
┌──timestamp─┬─event_id─┐
|
||||||
│ 2019-01-01 │ 1 │
|
│ 2019-01-01 │ 1 │
|
||||||
│ 2019-01-01 │ 2 │
|
│ 2019-01-01 │ 2 │
|
||||||
|
│ 2019-01-01 │ 3 │
|
||||||
└────────────┴──────────┘
|
└────────────┴──────────┘
|
||||||
```
|
```
|
||||||
|
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user