Merge branch 'master' into refactor-join-step

This commit is contained in:
Nikolai Kochetov 2021-04-29 12:23:36 +03:00
commit 1f5a0f8842
308 changed files with 3048 additions and 3544 deletions

View File

@ -7,7 +7,7 @@ assignees: ''
---
(you don't have to strictly follow this form)
You have to provide the following information whenever possible.
**Describe the bug**
A clear and concise description of what works not as it is supposed to.

1
.gitignore vendored
View File

@ -27,6 +27,7 @@
/docs/zh/single.md
/docs/ja/single.md
/docs/fa/single.md
/docs/en/development/cmake-in-clickhouse.md
# callgrind files
callgrind.out.*

View File

@ -167,6 +167,7 @@ endif ()
# If turned `ON`, assumes the user has either the system GTest library or the bundled one.
option(ENABLE_TESTS "Provide unit_test_dbms target with Google.Test unit tests" ON)
option(ENABLE_EXAMPLES "Build all example programs in 'examples' subdirectories" OFF)
if (OS_LINUX AND NOT UNBUNDLED AND MAKE_STATIC_LIBRARIES AND NOT SPLIT_SHARED_LIBRARIES AND CMAKE_VERSION VERSION_GREATER "3.9.0")
# Only for Linux, x86_64.

2
contrib/boost vendored

@ -1 +1 @@
Subproject commit a8d43d3142cc6b26fc55bec33f7f6edb1156ab7a
Subproject commit 9f0ff347e50429686604002d8ad1fd07515c4f31

2
contrib/cyrus-sasl vendored

@ -1 +1 @@
Subproject commit 9995bf9d8e14f58934d9313ac64f13780d6dd3c9
Subproject commit e6466edfd638cc5073debe941c53345b18a09512

View File

@ -3,10 +3,10 @@ compilers and build settings. Correctly configured Docker daemon is single depen
Usage:
Build deb package with `gcc-9` in `debug` mode:
Build deb package with `clang-11` in `debug` mode:
```
$ mkdir deb/test_output
$ ./packager --output-dir deb/test_output/ --package-type deb --compiler=gcc-9 --build-type=debug
$ ./packager --output-dir deb/test_output/ --package-type deb --compiler=clang-11 --build-type=debug
$ ls -l deb/test_output
-rw-r--r-- 1 root root 3730 clickhouse-client_18.14.2+debug_all.deb
-rw-r--r-- 1 root root 84221888 clickhouse-common-static_18.14.2+debug_amd64.deb
@ -18,11 +18,11 @@ $ ls -l deb/test_output
```
Build ClickHouse binary with `clang-10` and `address` sanitizer in `relwithdebuginfo`
Build ClickHouse binary with `clang-11` and `address` sanitizer in `relwithdebuginfo`
mode:
```
$ mkdir $HOME/some_clickhouse
$ ./packager --output-dir=$HOME/some_clickhouse --package-type binary --compiler=clang-10 --sanitizer=address
$ ./packager --output-dir=$HOME/some_clickhouse --package-type binary --compiler=clang-11 --sanitizer=address
$ ls -l $HOME/some_clickhouse
-rwxr-xr-x 1 root root 787061952 clickhouse
lrwxrwxrwx 1 root root 10 clickhouse-benchmark -> clickhouse

View File

@ -37,23 +37,18 @@ RUN apt-get update \
bash \
build-essential \
ccache \
clang-10 \
clang-11 \
clang-tidy-10 \
clang-tidy-11 \
cmake \
curl \
g++-9 \
gcc-9 \
g++-10 \
gcc-10 \
gdb \
git \
gperf \
libicu-dev \
libreadline-dev \
lld-10 \
lld-11 \
llvm-10 \
llvm-10-dev \
llvm-11 \
llvm-11-dev \
moreutils \

View File

@ -35,22 +35,15 @@ RUN curl -O https://clickhouse-builds.s3.yandex.net/utils/1/dpkg-deb \
RUN apt-get update \
&& apt-get install \
alien \
clang-10 \
clang-11 \
clang-tidy-10 \
clang-tidy-11 \
cmake \
debhelper \
devscripts \
g++-9 \
gcc-9 \
gdb \
git \
gperf \
lld-10 \
lld-11 \
llvm-10 \
llvm-10-dev \
llvm-11 \
llvm-11-dev \
moreutils \

View File

@ -181,9 +181,8 @@ if __name__ == "__main__":
parser.add_argument("--clickhouse-repo-path", default=os.path.join(os.path.dirname(os.path.abspath(__file__)), os.pardir, os.pardir))
parser.add_argument("--output-dir", required=True)
parser.add_argument("--build-type", choices=("debug", ""), default="")
parser.add_argument("--compiler", choices=("clang-10", "clang-10-darwin", "clang-10-aarch64", "clang-10-freebsd",
"clang-11", "clang-11-darwin", "clang-11-aarch64", "clang-11-freebsd",
"gcc-9", "gcc-10"), default="gcc-9")
parser.add_argument("--compiler", choices=("clang-11", "clang-11-darwin", "clang-11-aarch64", "clang-11-freebsd",
"gcc-10"), default="clang-11")
parser.add_argument("--sanitizer", choices=("address", "thread", "memory", "undefined", ""), default="")
parser.add_argument("--unbundled", action="store_true")
parser.add_argument("--split-binary", action="store_true")

View File

@ -1,6 +1,17 @@
#!/bin/bash
set -e
mkdir -p /etc/docker/
cat > /etc/docker/daemon.json << EOF
{
"ipv6": true,
"fixed-cidr-v6": "fd00::/8",
"ip-forward": true,
"insecure-registries" : ["dockerhub-proxy.sas.yp-c.yandex.net:5000"],
"registry-mirrors" : ["http://dockerhub-proxy.sas.yp-c.yandex.net:5000"]
}
EOF
dockerd --host=unix:///var/run/docker.sock --host=tcp://0.0.0.0:2375 &>/var/log/somefile &
set +e

View File

@ -1019,6 +1019,7 @@ done
wait
# Create per-query flamegraphs
touch report/query-files.txt
IFS=$'\n'
for version in {right,left}
do
@ -1209,6 +1210,12 @@ function upload_results
/^metric/ { print old_sha, new_sha, $2, $3 }' \
| "${client[@]}" --query "INSERT INTO run_attributes_v1 FORMAT TSV"
# Grepping numactl results from log is too crazy, I'll just call it again.
"${client[@]}" --query "INSERT INTO run_attributes_v1 FORMAT TSV" <<EOF
$REF_SHA $SHA_TO_TEST $(numactl --show | sed -n 's/^cpubind:[[:space:]]\+/numactl-cpubind /p')
$REF_SHA $SHA_TO_TEST $(numactl --hardware | sed -n 's/^available:[[:space:]]\+/numactl-available /p')
EOF
set -x
}

View File

@ -81,7 +81,10 @@ def substitute_parameters(query_templates, other_templates = []):
query_results = []
other_results = [[]] * (len(other_templates))
for i, q in enumerate(query_templates):
keys = set(n for _, n, _, _ in string.Formatter().parse(q) if n)
# We need stable order of keys here, so that the order of substitutions
# is always the same, and the query indexes are consistent across test
# runs.
keys = sorted(set(n for _, n, _, _ in string.Formatter().parse(q) if n))
values = [available_parameters[k] for k in keys]
combos = itertools.product(*values)
for c in combos:

View File

@ -2,6 +2,7 @@
FROM ubuntu:20.04
RUN apt-get update --yes && env DEBIAN_FRONTEND=noninteractive apt-get install wget unzip git openjdk-14-jdk maven python3 --yes --no-install-recommends
RUN wget https://github.com/sqlancer/sqlancer/archive/master.zip -O /sqlancer.zip
RUN mkdir /sqlancer && \
cd /sqlancer && \

View File

@ -73,9 +73,9 @@ The build requires the following components:
- Git (is used only to checkout the sources, its not needed for the build)
- CMake 3.10 or newer
- Ninja (recommended) or Make
- C++ compiler: gcc 10 or clang 8 or newer
- Linker: lld or gold (the classic GNU ld wont work)
- Ninja
- C++ compiler: clang-11 or newer
- Linker: lld
- Python (is only used inside LLVM build and it is optional)
If all the components are installed, you may build in the same way as the steps above.
@ -83,7 +83,7 @@ If all the components are installed, you may build in the same way as the steps
Example for Ubuntu Eoan:
``` bash
sudo apt update
sudo apt install git cmake ninja-build g++ python
sudo apt install git cmake ninja-build clang++ python
git clone --recursive https://github.com/ClickHouse/ClickHouse.git
mkdir build && cd build
cmake ../ClickHouse
@ -92,7 +92,7 @@ ninja
Example for OpenSUSE Tumbleweed:
``` bash
sudo zypper install git cmake ninja gcc-c++ python lld
sudo zypper install git cmake ninja clang-c++ python lld
git clone --recursive https://github.com/ClickHouse/ClickHouse.git
mkdir build && cd build
cmake ../ClickHouse
@ -102,7 +102,7 @@ ninja
Example for Fedora Rawhide:
``` bash
sudo yum update
yum --nogpg install git cmake make gcc-c++ python3
yum --nogpg install git cmake make clang-c++ python3
git clone --recursive https://github.com/ClickHouse/ClickHouse.git
mkdir build && cd build
cmake ../ClickHouse

View File

@ -1,288 +0,0 @@
# CMake in ClickHouse
## TL; DR How to make ClickHouse compile and link faster?
Developer only! This command will likely fulfill most of your needs. Run before calling `ninja`.
```cmake
cmake .. \
-DCMAKE_C_COMPILER=/bin/clang-10 \
-DCMAKE_CXX_COMPILER=/bin/clang++-10 \
-DCMAKE_BUILD_TYPE=Debug \
-DENABLE_CLICKHOUSE_ALL=OFF \
-DENABLE_CLICKHOUSE_SERVER=ON \
-DENABLE_CLICKHOUSE_CLIENT=ON \
-DUSE_STATIC_LIBRARIES=OFF \
-DSPLIT_SHARED_LIBRARIES=ON \
-DENABLE_LIBRARIES=OFF \
-DUSE_UNWIND=ON \
-DENABLE_UTILS=OFF \
-DENABLE_TESTS=OFF
```
## CMake files types
1. ClickHouse's source CMake files (located in the root directory and in `/src`).
2. Arch-dependent CMake files (located in `/cmake/*os_name*`).
3. Libraries finders (search for contrib libraries, located in `/cmake/find`).
3. Contrib build CMake files (used instead of libraries' own CMake files, located in `/cmake/modules`)
## List of CMake flags
* This list is auto-generated by [this Python script](https://github.com/clickhouse/clickhouse/blob/master/docs/tools/cmake_in_clickhouse_generator.py).
* The flag name is a link to its position in the code.
* If an option's default value is itself an option, it's also a link to its position in this list.
### ClickHouse modes
| Name | Default value | Description | Comment |
|------|---------------|-------------|---------|
| <a name="enable-clickhouse-all"></a>[`ENABLE_CLICKHOUSE_ALL`](https://github.com/clickhouse/clickhouse/blob/master/programs/CMakeLists.txt#L8) | `ON` | Enable all ClickHouse modes by default | The `clickhouse` binary is a multi purpose tool that contains multiple execution modes (client, server, etc.), each of them may be built and linked as a separate library. If you do not know what modes you need, turn this option OFF and enable SERVER and CLIENT only. |
| <a name="enable-clickhouse-benchmark"></a>[`ENABLE_CLICKHOUSE_BENCHMARK`](https://github.com/clickhouse/clickhouse/blob/master/programs/CMakeLists.txt#L18) | `ENABLE_CLICKHOUSE_ALL` | Queries benchmarking mode | https://clickhouse.tech/docs/en/operations/utilities/clickhouse-benchmark/ |
| <a name="enable-clickhouse-client"></a>[`ENABLE_CLICKHOUSE_CLIENT`](https://github.com/clickhouse/clickhouse/blob/master/programs/CMakeLists.txt#L11) | `ENABLE_CLICKHOUSE_ALL` | Client mode (interactive tui/shell that connects to the server) | |
| <a name="enable-clickhouse-compressor"></a>[`ENABLE_CLICKHOUSE_COMPRESSOR`](https://github.com/clickhouse/clickhouse/blob/master/programs/CMakeLists.txt#L23) | `ENABLE_CLICKHOUSE_ALL` | Data compressor and decompressor | https://clickhouse.tech/docs/en/operations/utilities/clickhouse-compressor/ |
| <a name="enable-clickhouse-copier"></a>[`ENABLE_CLICKHOUSE_COPIER`](https://github.com/clickhouse/clickhouse/blob/master/programs/CMakeLists.txt#L26) | `ENABLE_CLICKHOUSE_ALL` | Inter-cluster data copying mode | https://clickhouse.tech/docs/en/operations/utilities/clickhouse-copier/ |
| <a name="enable-clickhouse-extract-from-config"></a>[`ENABLE_CLICKHOUSE_EXTRACT_FROM_CONFIG`](https://github.com/clickhouse/clickhouse/blob/master/programs/CMakeLists.txt#L20) | `ENABLE_CLICKHOUSE_ALL` | Configs processor (extract values etc.) | |
| <a name="enable-clickhouse-format"></a>[`ENABLE_CLICKHOUSE_FORMAT`](https://github.com/clickhouse/clickhouse/blob/master/programs/CMakeLists.txt#L28) | `ENABLE_CLICKHOUSE_ALL` | Queries pretty-printer and formatter with syntax highlighting | |
| <a name="enable-clickhouse-git-import"></a>[`ENABLE_CLICKHOUSE_GIT_IMPORT`](https://github.com/clickhouse/clickhouse/blob/master/programs/CMakeLists.txt#L47) | `ENABLE_CLICKHOUSE_ALL` | A tool to analyze Git repositories | https://presentations.clickhouse.tech/matemarketing_2020/ |
| <a name="enable-clickhouse-install"></a>[`ENABLE_CLICKHOUSE_INSTALL`](https://github.com/clickhouse/clickhouse/blob/master/programs/CMakeLists.txt#L51) | `OFF` | Install ClickHouse without .deb/.rpm/.tgz packages (having the binary only) | |
| <a name="enable-clickhouse-library-bridge"></a>[`ENABLE_CLICKHOUSE_LIBRARY_BRIDGE`](https://github.com/clickhouse/clickhouse/blob/master/programs/CMakeLists.txt#L43) | `ENABLE_CLICKHOUSE_ALL` | HTTP-server working like a proxy to Library dictionary source | |
| <a name="enable-clickhouse-local"></a>[`ENABLE_CLICKHOUSE_LOCAL`](https://github.com/clickhouse/clickhouse/blob/master/programs/CMakeLists.txt#L15) | `ENABLE_CLICKHOUSE_ALL` | Local files fast processing mode | https://clickhouse.tech/docs/en/operations/utilities/clickhouse-local/ |
| <a name="enable-clickhouse-obfuscator"></a>[`ENABLE_CLICKHOUSE_OBFUSCATOR`](https://github.com/clickhouse/clickhouse/blob/master/programs/CMakeLists.txt#L32) | `ENABLE_CLICKHOUSE_ALL` | Table data obfuscator (convert real data to benchmark-ready one) | https://clickhouse.tech/docs/en/operations/utilities/clickhouse-obfuscator/ |
| <a name="enable-clickhouse-odbc-bridge"></a>[`ENABLE_CLICKHOUSE_ODBC_BRIDGE`](https://github.com/clickhouse/clickhouse/blob/master/programs/CMakeLists.txt#L37) | `ENABLE_CLICKHOUSE_ALL` | HTTP-server working like a proxy to ODBC driver | |
| <a name="enable-clickhouse-server"></a>[`ENABLE_CLICKHOUSE_SERVER`](https://github.com/clickhouse/clickhouse/blob/master/programs/CMakeLists.txt#L10) | `ENABLE_CLICKHOUSE_ALL` | Server mode (main mode) | |
### External libraries
Note that ClickHouse uses forks of these libraries, see https://github.com/ClickHouse-Extras.
| Name | Default value | Description | Comment |
|------|---------------|-------------|---------|
| <a name="enable-amqpcpp"></a>[`ENABLE_AMQPCPP`](https://github.com/clickhouse/clickhouse/blob/master/cmake/find/amqpcpp.cmake#L6) | `ENABLE_LIBRARIES` | Enalbe AMQP-CPP | |
| <a name="enable-avro"></a>[`ENABLE_AVRO`](https://github.com/clickhouse/clickhouse/blob/master/cmake/find/avro.cmake#L2) | `ENABLE_LIBRARIES` | Enable Avro | Needed when using Apache Avro serialization format |
| <a name="enable-base"></a>[`ENABLE_BASE64`](https://github.com/clickhouse/clickhouse/blob/master/cmake/find/base64.cmake#L2) | `ENABLE_LIBRARIES` | Enable base64 | |
| <a name="enable-brotli"></a>[`ENABLE_BROTLI`](https://github.com/clickhouse/clickhouse/blob/master/cmake/find/brotli.cmake#L1) | `ENABLE_LIBRARIES` | Enable brotli | |
| <a name="enable-capnp"></a>[`ENABLE_CAPNP`](https://github.com/clickhouse/clickhouse/blob/master/cmake/find/capnp.cmake#L1) | `ENABLE_LIBRARIES` | Enable Cap'n Proto | |
| <a name="enable-cassandra"></a>[`ENABLE_CASSANDRA`](https://github.com/clickhouse/clickhouse/blob/master/cmake/find/cassandra.cmake#L6) | `ENABLE_LIBRARIES` | Enable Cassandra | |
| <a name="enable-ccache"></a>[`ENABLE_CCACHE`](https://github.com/clickhouse/clickhouse/blob/master/cmake/find/ccache.cmake#L22) | `ENABLE_CCACHE_BY_DEFAULT` | Speedup re-compilations using ccache (external tool) | https://ccache.dev/ |
| <a name="enable-clang-tidy"></a>[`ENABLE_CLANG_TIDY`](https://github.com/clickhouse/clickhouse/blob/master/cmake/analysis.cmake#L2) | `OFF` | Use clang-tidy static analyzer | https://clang.llvm.org/extra/clang-tidy/ |
| <a name="enable-curl"></a>[`ENABLE_CURL`](https://github.com/clickhouse/clickhouse/blob/master/cmake/find/curl.cmake#L1) | `ENABLE_LIBRARIES` | Enable curl | |
| <a name="enable-datasketches"></a>[`ENABLE_DATASKETCHES`](https://github.com/clickhouse/clickhouse/blob/master/cmake/find/datasketches.cmake#L1) | `ENABLE_LIBRARIES` | Enable DataSketches | |
| <a name="enable-embedded-compiler"></a>[`ENABLE_EMBEDDED_COMPILER`](https://github.com/clickhouse/clickhouse/blob/master/cmake/find/llvm.cmake#L5) | `ENABLE_LIBRARIES` | Set to TRUE to enable support for 'compile_expressions' option for query execution | |
| <a name="enable-fastops"></a>[`ENABLE_FASTOPS`](https://github.com/clickhouse/clickhouse/blob/master/cmake/find/fastops.cmake#L2) | `ENABLE_LIBRARIES` | Enable fast vectorized mathematical functions library by Mikhail Parakhin | |
| <a name="enable-gperf"></a>[`ENABLE_GPERF`](https://github.com/clickhouse/clickhouse/blob/master/cmake/find/gperf.cmake#L5) | `ENABLE_LIBRARIES` | Use gperf function hash generator tool | |
| <a name="enable-grpc"></a>[`ENABLE_GRPC`](https://github.com/clickhouse/clickhouse/blob/master/cmake/find/grpc.cmake#L8) | `ENABLE_GRPC_DEFAULT` | Use gRPC | |
| <a name="enable-gsasl-library"></a>[`ENABLE_GSASL_LIBRARY`](https://github.com/clickhouse/clickhouse/blob/master/cmake/find/libgsasl.cmake#L1) | `ENABLE_LIBRARIES` | Enable gsasl library | |
| <a name="enable-h"></a>[`ENABLE_H3`](https://github.com/clickhouse/clickhouse/blob/master/cmake/find/h3.cmake#L1) | `ENABLE_LIBRARIES` | Enable H3 | |
| <a name="enable-hdfs"></a>[`ENABLE_HDFS`](https://github.com/clickhouse/clickhouse/blob/master/cmake/find/hdfs3.cmake#L2) | `ENABLE_LIBRARIES` | Enable HDFS | |
| <a name="enable-icu"></a>[`ENABLE_ICU`](https://github.com/clickhouse/clickhouse/blob/master/cmake/find/icu.cmake#L2) | `ENABLE_LIBRARIES` | Enable ICU | |
| <a name="enable-ldap"></a>[`ENABLE_LDAP`](https://github.com/clickhouse/clickhouse/blob/master/cmake/find/ldap.cmake#L5) | `ENABLE_LIBRARIES` | Enable LDAP | |
| <a name="enable-libpqxx"></a>[`ENABLE_LIBPQXX`](https://github.com/clickhouse/clickhouse/blob/master/cmake/find/libpqxx.cmake#L1) | `ENABLE_LIBRARIES` | Enalbe libpqxx | |
| <a name="enable-msgpack"></a>[`ENABLE_MSGPACK`](https://github.com/clickhouse/clickhouse/blob/master/cmake/find/msgpack.cmake#L1) | `ENABLE_LIBRARIES` | Enable msgpack library | |
| <a name="enable-mysql"></a>[`ENABLE_MYSQL`](https://github.com/clickhouse/clickhouse/blob/master/cmake/find/mysqlclient.cmake#L2) | `ENABLE_LIBRARIES` | Enable MySQL | |
| <a name="enable-nuraft"></a>[`ENABLE_NURAFT`](https://github.com/clickhouse/clickhouse/blob/master/cmake/find/nuraft.cmake#L1) | `ENABLE_LIBRARIES` | Enable NuRaft | |
| <a name="enable-odbc"></a>[`ENABLE_ODBC`](https://github.com/clickhouse/clickhouse/blob/master/cmake/find/odbc.cmake#L1) | `ENABLE_LIBRARIES` | Enable ODBC library | |
| <a name="enable-orc"></a>[`ENABLE_ORC`](https://github.com/clickhouse/clickhouse/blob/master/cmake/find/orc.cmake#L1) | `ENABLE_LIBRARIES` | Enable ORC | |
| <a name="enable-parquet"></a>[`ENABLE_PARQUET`](https://github.com/clickhouse/clickhouse/blob/master/cmake/find/parquet.cmake#L2) | `ENABLE_LIBRARIES` | Enable parquet | |
| <a name="enable-protobuf"></a>[`ENABLE_PROTOBUF`](https://github.com/clickhouse/clickhouse/blob/master/cmake/find/protobuf.cmake#L1) | `ENABLE_LIBRARIES` | Enable protobuf | |
| <a name="enable-rapidjson"></a>[`ENABLE_RAPIDJSON`](https://github.com/clickhouse/clickhouse/blob/master/cmake/find/rapidjson.cmake#L1) | `ENABLE_LIBRARIES` | Use rapidjson | |
| <a name="enable-rdkafka"></a>[`ENABLE_RDKAFKA`](https://github.com/clickhouse/clickhouse/blob/master/cmake/find/rdkafka.cmake#L1) | `ENABLE_LIBRARIES` | Enable kafka | |
| <a name="enable-rocksdb"></a>[`ENABLE_ROCKSDB`](https://github.com/clickhouse/clickhouse/blob/master/cmake/find/rocksdb.cmake#L5) | `ENABLE_LIBRARIES` | Enable ROCKSDB | |
| <a name="enable-s"></a>[`ENABLE_S3`](https://github.com/clickhouse/clickhouse/blob/master/cmake/find/s3.cmake#L2) | `ENABLE_LIBRARIES` | Enable S3 | |
| <a name="enable-ssl"></a>[`ENABLE_SSL`](https://github.com/clickhouse/clickhouse/blob/master/cmake/find/ssl.cmake#L3) | `ENABLE_LIBRARIES` | Enable ssl | Needed when securely connecting to an external server, e.g. clickhouse-client --host ... --secure |
| <a name="enable-stats"></a>[`ENABLE_STATS`](https://github.com/clickhouse/clickhouse/blob/master/cmake/find/stats.cmake#L1) | `ENABLE_LIBRARIES` | Enalbe StatsLib library | |
### External libraries system/bundled mode
| Name | Default value | Description | Comment |
|------|---------------|-------------|---------|
| <a name="use-internal-avro-library"></a>[`USE_INTERNAL_AVRO_LIBRARY`](https://github.com/clickhouse/clickhouse/blob/master/cmake/find/avro.cmake#L11) | `ON` | Set to FALSE to use system avro library instead of bundled | |
| <a name="use-internal-aws-s-library"></a>[`USE_INTERNAL_AWS_S3_LIBRARY`](https://github.com/clickhouse/clickhouse/blob/master/cmake/find/s3.cmake#L14) | `ON` | Set to FALSE to use system S3 instead of bundled (experimental set to OFF on your own risk) | |
| <a name="use-internal-brotli-library"></a>[`USE_INTERNAL_BROTLI_LIBRARY`](https://github.com/clickhouse/clickhouse/blob/master/cmake/find/brotli.cmake#L12) | `USE_STATIC_LIBRARIES` | Set to FALSE to use system libbrotli library instead of bundled | Many system ship only dynamic brotly libraries, so we back off to bundled by default |
| <a name="use-internal-capnp-library"></a>[`USE_INTERNAL_CAPNP_LIBRARY`](https://github.com/clickhouse/clickhouse/blob/master/cmake/find/capnp.cmake#L10) | `NOT_UNBUNDLED` | Set to FALSE to use system capnproto library instead of bundled | |
| <a name="use-internal-curl"></a>[`USE_INTERNAL_CURL`](https://github.com/clickhouse/clickhouse/blob/master/cmake/find/curl.cmake#L10) | `NOT_UNBUNDLED` | Use internal curl library | |
| <a name="use-internal-datasketches-library"></a>[`USE_INTERNAL_DATASKETCHES_LIBRARY`](https://github.com/clickhouse/clickhouse/blob/master/cmake/find/datasketches.cmake#L5) | `NOT_UNBUNDLED` | Set to FALSE to use system DataSketches library instead of bundled | |
| <a name="use-internal-grpc-library"></a>[`USE_INTERNAL_GRPC_LIBRARY`](https://github.com/clickhouse/clickhouse/blob/master/cmake/find/grpc.cmake#L25) | `NOT_UNBUNDLED` | Set to FALSE to use system gRPC library instead of bundled. (Experimental. Set to OFF on your own risk) | Normally we use the internal gRPC framework. You can set USE_INTERNAL_GRPC_LIBRARY to OFF to force using the external gRPC framework, which should be installed in the system in this case. The external gRPC framework can be installed in the system by running sudo apt-get install libgrpc++-dev protobuf-compiler-grpc |
| <a name="use-internal-gtest-library"></a>[`USE_INTERNAL_GTEST_LIBRARY`](https://github.com/clickhouse/clickhouse/blob/master/cmake/find/gtest.cmake#L3) | `NOT_UNBUNDLED` | Set to FALSE to use system Google Test instead of bundled | |
| <a name="use-internal-h-library"></a>[`USE_INTERNAL_H3_LIBRARY`](https://github.com/clickhouse/clickhouse/blob/master/cmake/find/h3.cmake#L9) | `ON` | Set to FALSE to use system h3 library instead of bundled | |
| <a name="use-internal-hdfs-library"></a>[`USE_INTERNAL_HDFS3_LIBRARY`](https://github.com/clickhouse/clickhouse/blob/master/cmake/find/hdfs3.cmake#L14) | `ON` | Set to FALSE to use system HDFS3 instead of bundled (experimental - set to OFF on your own risk) | |
| <a name="use-internal-icu-library"></a>[`USE_INTERNAL_ICU_LIBRARY`](https://github.com/clickhouse/clickhouse/blob/master/cmake/find/icu.cmake#L15) | `NOT_UNBUNDLED` | Set to FALSE to use system ICU library instead of bundled | |
| <a name="use-internal-ldap-library"></a>[`USE_INTERNAL_LDAP_LIBRARY`](https://github.com/clickhouse/clickhouse/blob/master/cmake/find/ldap.cmake#L14) | `NOT_UNBUNDLED` | Set to FALSE to use system *LDAP library instead of bundled | |
| <a name="use-internal-libcxx-library"></a>[`USE_INTERNAL_LIBCXX_LIBRARY`](https://github.com/clickhouse/clickhouse/blob/master/cmake/find/cxx.cmake#L15) | `USE_INTERNAL_LIBCXX_LIBRARY_DEFAULT` | Disable to use system libcxx and libcxxabi libraries instead of bundled | |
| <a name="use-internal-libgsasl-library"></a>[`USE_INTERNAL_LIBGSASL_LIBRARY`](https://github.com/clickhouse/clickhouse/blob/master/cmake/find/libgsasl.cmake#L12) | `USE_STATIC_LIBRARIES` | Set to FALSE to use system libgsasl library instead of bundled | when USE_STATIC_LIBRARIES we usually need to pick up hell a lot of dependencies for libgsasl |
| <a name="use-internal-libxml-library"></a>[`USE_INTERNAL_LIBXML2_LIBRARY`](https://github.com/clickhouse/clickhouse/blob/master/cmake/find/libxml2.cmake#L1) | `NOT_UNBUNDLED` | Set to FALSE to use system libxml2 library instead of bundled | |
| <a name="use-internal-llvm-library"></a>[`USE_INTERNAL_LLVM_LIBRARY`](https://github.com/clickhouse/clickhouse/blob/master/cmake/find/llvm.cmake#L8) | `NOT_UNBUNDLED` | Use bundled or system LLVM library. | |
| <a name="use-internal-msgpack-library"></a>[`USE_INTERNAL_MSGPACK_LIBRARY`](https://github.com/clickhouse/clickhouse/blob/master/cmake/find/msgpack.cmake#L10) | `NOT_UNBUNDLED` | Set to FALSE to use system msgpack library instead of bundled | |
| <a name="use-internal-mysql-library"></a>[`USE_INTERNAL_MYSQL_LIBRARY`](https://github.com/clickhouse/clickhouse/blob/master/cmake/find/mysqlclient.cmake#L15) | `NOT_UNBUNDLED` | Set to FALSE to use system mysqlclient library instead of bundled | |
| <a name="use-internal-odbc-library"></a>[`USE_INTERNAL_ODBC_LIBRARY`](https://github.com/clickhouse/clickhouse/blob/master/cmake/find/odbc.cmake#L22) | `NOT_UNBUNDLED` | Use internal ODBC library | |
| <a name="use-internal-orc-library"></a>[`USE_INTERNAL_ORC_LIBRARY`](https://github.com/clickhouse/clickhouse/blob/master/cmake/find/orc.cmake#L11) | `ON` | Set to FALSE to use system ORC instead of bundled (experimental set to OFF on your own risk) | |
| <a name="use-internal-parquet-library"></a>[`USE_INTERNAL_PARQUET_LIBRARY`](https://github.com/clickhouse/clickhouse/blob/master/cmake/find/parquet.cmake#L16) | `NOT_UNBUNDLED` | Set to FALSE to use system parquet library instead of bundled | |
| <a name="use-internal-poco-library"></a>[`USE_INTERNAL_POCO_LIBRARY`](https://github.com/clickhouse/clickhouse/blob/master/cmake/find/poco.cmake#L1) | `ON` | Use internal Poco library | |
| <a name="use-internal-protobuf-library"></a>[`USE_INTERNAL_PROTOBUF_LIBRARY`](https://github.com/clickhouse/clickhouse/blob/master/cmake/find/protobuf.cmake#L14) | `NOT_UNBUNDLED` | Set to FALSE to use system protobuf instead of bundled. (Experimental. Set to OFF on your own risk) | Normally we use the internal protobuf library. You can set USE_INTERNAL_PROTOBUF_LIBRARY to OFF to force using the external protobuf library, which should be installed in the system in this case. The external protobuf library can be installed in the system by running sudo apt-get install libprotobuf-dev protobuf-compiler libprotoc-dev |
| <a name="use-internal-rapidjson-library"></a>[`USE_INTERNAL_RAPIDJSON_LIBRARY`](https://github.com/clickhouse/clickhouse/blob/master/cmake/find/rapidjson.cmake#L9) | `NOT_UNBUNDLED` | Set to FALSE to use system rapidjson library instead of bundled | |
| <a name="use-internal-rdkafka-library"></a>[`USE_INTERNAL_RDKAFKA_LIBRARY`](https://github.com/clickhouse/clickhouse/blob/master/cmake/find/rdkafka.cmake#L10) | `NOT_UNBUNDLED` | Set to FALSE to use system librdkafka instead of the bundled | |
| <a name="use-internal-re-library"></a>[`USE_INTERNAL_RE2_LIBRARY`](https://github.com/clickhouse/clickhouse/blob/master/cmake/find/re2.cmake#L1) | `NOT_UNBUNDLED` | Set to FALSE to use system re2 library instead of bundled [slower] | |
| <a name="use-internal-rocksdb-library"></a>[`USE_INTERNAL_ROCKSDB_LIBRARY`](https://github.com/clickhouse/clickhouse/blob/master/cmake/find/rocksdb.cmake#L14) | `NOT_UNBUNDLED` | Set to FALSE to use system ROCKSDB library instead of bundled | |
| <a name="use-internal-snappy-library"></a>[`USE_INTERNAL_SNAPPY_LIBRARY`](https://github.com/clickhouse/clickhouse/blob/master/cmake/find/snappy.cmake#L10) | `NOT_UNBUNDLED` | Set to FALSE to use system snappy library instead of bundled | |
| <a name="use-internal-sparsehash-library"></a>[`USE_INTERNAL_SPARSEHASH_LIBRARY`](https://github.com/clickhouse/clickhouse/blob/master/cmake/find/sparsehash.cmake#L1) | `ON` | Set to FALSE to use system sparsehash library instead of bundled | |
| <a name="use-internal-ssl-library"></a>[`USE_INTERNAL_SSL_LIBRARY`](https://github.com/clickhouse/clickhouse/blob/master/cmake/find/ssl.cmake#L12) | `NOT_UNBUNDLED` | Set to FALSE to use system *ssl library instead of bundled | |
| <a name="use-internal-xz-library"></a>[`USE_INTERNAL_XZ_LIBRARY`](https://github.com/clickhouse/clickhouse/blob/master/cmake/find/xz.cmake#L1) | `NOT_UNBUNDLED` | Set to OFF to use system xz (lzma) library instead of bundled | |
| <a name="use-internal-zlib-library"></a>[`USE_INTERNAL_ZLIB_LIBRARY`](https://github.com/clickhouse/clickhouse/blob/master/cmake/find/zlib.cmake#L1) | `NOT_UNBUNDLED` | Set to FALSE to use system zlib library instead of bundled | |
| <a name="use-internal-zstd-library"></a>[`USE_INTERNAL_ZSTD_LIBRARY`](https://github.com/clickhouse/clickhouse/blob/master/cmake/find/zstd.cmake#L1) | `NOT_UNBUNDLED` | Set to FALSE to use system zstd library instead of bundled | |
### Other flags
| Name | Default value | Description | Comment |
|------|---------------|-------------|---------|
| <a name="add-gdb-index-for-gold"></a>[`ADD_GDB_INDEX_FOR_GOLD`](https://github.com/clickhouse/clickhouse/blob/master/CMakeLists.txt#L209) | `OFF` | Add .gdb-index to resulting binaries for gold linker. | Ignored if `lld` is used |
| <a name="arch-native"></a>[`ARCH_NATIVE`](https://github.com/clickhouse/clickhouse/blob/master/CMakeLists.txt#L260) | `OFF` | Add -march=native compiler flag | |
| <a name="clickhouse-split-binary"></a>[`CLICKHOUSE_SPLIT_BINARY`](https://github.com/clickhouse/clickhouse/blob/master/CMakeLists.txt#L113) | `OFF` | Make several binaries (clickhouse-server, clickhouse-client etc.) instead of one bundled | |
| <a name="compiler-pipe"></a>[`COMPILER_PIPE`](https://github.com/clickhouse/clickhouse/blob/master/CMakeLists.txt#L249) | `ON` | -pipe compiler option | Less `/tmp` usage, more RAM usage. |
| <a name="enable-check-heavy-builds"></a>[`ENABLE_CHECK_HEAVY_BUILDS`](https://github.com/clickhouse/clickhouse/blob/master/CMakeLists.txt#L80) | `OFF` | Don't allow C++ translation units to compile too long or to take too much memory while compiling. | Take care to add prlimit in command line before ccache, or else ccache thinks that prlimit is compiler, and clang++ is its input file, and refuses to work with multiple inputs, e.g in ccache log: [2021-03-31T18:06:32.655327 36900] Command line: /usr/bin/ccache prlimit --as=10000000000 --data=5000000000 --cpu=600 /usr/bin/clang++-11 - ...... std=gnu++2a -MD -MT src/CMakeFiles/dbms.dir/Storages/MergeTree/IMergeTreeDataPart.cpp.o -MF src/CMakeFiles/dbms.dir/Storages/MergeTree/IMergeTreeDataPart.cpp.o.d -o src/CMakeFiles/dbms.dir/Storages/MergeTree/IMergeTreeDataPart.cpp.o -c ../src/Storages/MergeTree/IMergeTreeDataPart.cpp [2021-03-31T18:06:32.656704 36900] Multiple input files: /usr/bin/clang++-11 and ../src/Storages/MergeTree/IMergeTreeDataPart.cpp Another way would be to use --ccache-skip option before clang++-11 to make ccache ignore it. |
| <a name="enable-fuzzing"></a>[`ENABLE_FUZZING`](https://github.com/clickhouse/clickhouse/blob/master/CMakeLists.txt#L130) | `OFF` | Fuzzy testing using libfuzzer | Implies `WITH_COVERAGE` |
| <a name="enable-libraries"></a>[`ENABLE_LIBRARIES`](https://github.com/clickhouse/clickhouse/blob/master/CMakeLists.txt#L383) | `ON` | Enable all external libraries by default | Turns on all external libs like s3, kafka, ODBC, ... |
| <a name="enable-multitarget-code"></a>[`ENABLE_MULTITARGET_CODE`](https://github.com/clickhouse/clickhouse/blob/master/src/Functions/CMakeLists.txt#L102) | `ON` | Enable platform-dependent code | ClickHouse developers may use platform-dependent code under some macro (e.g. `ifdef ENABLE_MULTITARGET`). If turned ON, this option defines such macro. See `src/Functions/TargetSpecific.h` |
| <a name="enable-tests"></a>[`ENABLE_TESTS`](https://github.com/clickhouse/clickhouse/blob/master/CMakeLists.txt#L169) | `ON` | Provide unit_test_dbms target with Google.Test unit tests | If turned `ON`, assumes the user has either the system GTest library or the bundled one. |
| <a name="enable-thinlto"></a>[`ENABLE_THINLTO`](https://github.com/clickhouse/clickhouse/blob/master/CMakeLists.txt#L339) | `ON` | Clang-specific link time optimization | https://clang.llvm.org/docs/ThinLTO.html Applies to clang only. Disabled when building with tests or sanitizers. |
| <a name="fail-on-unsupported-options-combination"></a>[`FAIL_ON_UNSUPPORTED_OPTIONS_COMBINATION`](https://github.com/clickhouse/clickhouse/blob/master/CMakeLists.txt#L32) | `ON` | Stop/Fail CMake configuration if some ENABLE_XXX option is defined (either ON or OFF) but is not possible to satisfy | If turned off: e.g. when ENABLE_FOO is ON, but FOO tool was not found, the CMake will continue. |
| <a name="glibc-compatibility"></a>[`GLIBC_COMPATIBILITY`](https://github.com/clickhouse/clickhouse/blob/master/CMakeLists.txt#L173) | `ON` | Enable compatibility with older glibc libraries. | Only for Linux, x86_64. |
| <a name="linker-name"></a>[`LINKER_NAME`](https://github.com/clickhouse/clickhouse/blob/master/cmake/tools.cmake#L44) | `OFF` | Linker name or full path | Example values: `lld-10`, `gold`. |
| <a name="llvm-has-rtti"></a>[`LLVM_HAS_RTTI`](https://github.com/clickhouse/clickhouse/blob/master/cmake/find/llvm.cmake#L40) | `ON` | Enable if LLVM was build with RTTI enabled | |
| <a name="make-static-libraries"></a>[`MAKE_STATIC_LIBRARIES`](https://github.com/clickhouse/clickhouse/blob/master/CMakeLists.txt#L106) | `USE_STATIC_LIBRARIES` | Disable to make shared libraries | |
| <a name="parallel-compile-jobs"></a>[`PARALLEL_COMPILE_JOBS`](https://github.com/clickhouse/clickhouse/blob/master/cmake/limit_jobs.cmake#L10) | `""` | Maximum number of concurrent compilation jobs | 1 if not set |
| <a name="parallel-link-jobs"></a>[`PARALLEL_LINK_JOBS`](https://github.com/clickhouse/clickhouse/blob/master/cmake/limit_jobs.cmake#L13) | `""` | Maximum number of concurrent link jobs | 1 if not set |
| <a name="sanitize"></a>[`SANITIZE`](https://github.com/clickhouse/clickhouse/blob/master/cmake/sanitize.cmake#L7) | `""` | Enable one of the code sanitizers | Possible values: - `address` (ASan) - `memory` (MSan) - `thread` (TSan) - `undefined` (UBSan) - "" (no sanitizing) |
| <a name="split-shared-libraries"></a>[`SPLIT_SHARED_LIBRARIES`](https://github.com/clickhouse/clickhouse/blob/master/CMakeLists.txt#L111) | `OFF` | Keep all internal libraries as separate .so files | DEVELOPER ONLY. Faster linking if turned on. |
| <a name="strip-debug-symbols-functions"></a>[`STRIP_DEBUG_SYMBOLS_FUNCTIONS`](https://github.com/clickhouse/clickhouse/blob/master/src/Functions/CMakeLists.txt#L51) | `STRIP_DSF_DEFAULT` | Do not generate debugger info for ClickHouse functions | Provides faster linking and lower binary size. Tradeoff is the inability to debug some source files with e.g. gdb (empty stack frames and no local variables)." |
| <a name="unbundled"></a>[`UNBUNDLED`](https://github.com/clickhouse/clickhouse/blob/master/CMakeLists.txt#L389) | `OFF` | Use system libraries instead of ones in contrib/ | We recommend avoiding this mode for production builds because we can't guarantee all needed libraries exist in your system. This mode exists for enthusiastic developers who are searching for trouble. Useful for maintainers of OS packages. |
| <a name="use-include-what-you-use"></a>[`USE_INCLUDE_WHAT_YOU_USE`](https://github.com/clickhouse/clickhouse/blob/master/CMakeLists.txt#L444) | `OFF` | Automatically reduce unneeded includes in source code (external tool) | https://github.com/include-what-you-use/include-what-you-use |
| <a name="use-libcxx"></a>[`USE_LIBCXX`](https://github.com/clickhouse/clickhouse/blob/master/cmake/find/cxx.cmake#L1) | `NOT_UNBUNDLED` | Use libc++ and libc++abi instead of libstdc++ | |
| <a name="use-sentry"></a>[`USE_SENTRY`](https://github.com/clickhouse/clickhouse/blob/master/cmake/find/sentry.cmake#L13) | `ENABLE_LIBRARIES` | Use Sentry | |
| <a name="use-simdjson"></a>[`USE_SIMDJSON`](https://github.com/clickhouse/clickhouse/blob/master/cmake/find/simdjson.cmake#L1) | `ENABLE_LIBRARIES` | Use simdjson | |
| <a name="use-snappy"></a>[`USE_SNAPPY`](https://github.com/clickhouse/clickhouse/blob/master/cmake/find/snappy.cmake#L1) | `ENABLE_LIBRARIES` | Enable snappy library | |
| <a name="use-static-libraries"></a>[`USE_STATIC_LIBRARIES`](https://github.com/clickhouse/clickhouse/blob/master/CMakeLists.txt#L105) | `ON` | Disable to use shared libraries | |
| <a name="use-unwind"></a>[`USE_UNWIND`](https://github.com/clickhouse/clickhouse/blob/master/cmake/find/unwind.cmake#L1) | `ENABLE_LIBRARIES` | Enable libunwind (better stacktraces) | |
| <a name="werror"></a>[`WERROR`](https://github.com/clickhouse/clickhouse/blob/master/CMakeLists.txt#L399) | `OFF` | Enable -Werror compiler option | Using system libs can cause a lot of warnings in includes (on macro expansion). |
| <a name="weverything"></a>[`WEVERYTHING`](https://github.com/clickhouse/clickhouse/blob/master/cmake/warnings.cmake#L17) | `ON` | Enable -Weverything option with some exceptions. | Add some warnings that are not available even with -Wall -Wextra -Wpedantic. Intended for exploration of new compiler warnings that may be found useful. Applies to clang only |
| <a name="with-coverage"></a>[`WITH_COVERAGE`](https://github.com/clickhouse/clickhouse/blob/master/CMakeLists.txt#L300) | `OFF` | Profile the resulting binary/binaries | Compiler-specific coverage flags e.g. -fcoverage-mapping for gcc |
## Developer's guide for adding new CMake options
### Don't be obvious. Be informative.
Bad:
```cmake
option (ENABLE_TESTS "Enables testing" OFF)
```
This description is quite useless as is neither gives the viewer any additional information nor explains the option purpose.
Better:
```cmake
option(ENABLE_TESTS "Provide unit_test_dbms target with Google.test unit tests" OFF)
```
If the option's purpose can't be guessed by its name, or the purpose guess may be misleading, or option has some
pre-conditions, leave a comment above the `option()` line and explain what it does.
The best way would be linking the docs page (if it exists).
The comment is parsed into a separate column (see below).
Even better:
```cmake
# implies ${TESTS_ARE_ENABLED}
# see tests/CMakeLists.txt for implementation detail.
option(ENABLE_TESTS "Provide unit_test_dbms target with Google.test unit tests" OFF)
```
### If the option's state could produce unwanted (or unusual) result, explicitly warn the user.
Suppose you have an option that may strip debug symbols from the ClickHouse's part.
This can speed up the linking process, but produces a binary that cannot be debugged.
In that case, prefer explicitly raising a warning telling the developer that he may be doing something wrong.
Also, such options should be disabled if applies.
Bad:
```cmake
option(STRIP_DEBUG_SYMBOLS_FUNCTIONS
"Do not generate debugger info for ClickHouse functions.
${STRIP_DSF_DEFAULT})
if (STRIP_DEBUG_SYMBOLS_FUNCTIONS)
target_compile_options(clickhouse_functions PRIVATE "-g0")
endif()
```
Better:
```cmake
# Provides faster linking and lower binary size.
# Tradeoff is the inability to debug some source files with e.g. gdb
# (empty stack frames and no local variables)."
option(STRIP_DEBUG_SYMBOLS_FUNCTIONS
"Do not generate debugger info for ClickHouse functions."
${STRIP_DSF_DEFAULT})
if (STRIP_DEBUG_SYMBOLS_FUNCTIONS)
message(WARNING "Not generating debugger info for ClickHouse functions")
target_compile_options(clickhouse_functions PRIVATE "-g0")
endif()
```
### In the option's description, explain WHAT the option does rather than WHY it does something.
The WHY explanation should be placed in the comment.
You may find that the option's name is self-descriptive.
Bad:
```cmake
option(ENABLE_THINLTO "Enable Thin LTO. Only applicable for clang. It's also suppressed when building with tests or sanitizers." ON)
```
Better:
```cmake
# Only applicable for clang.
# Turned off when building with tests or sanitizers.
option(ENABLE_THINLTO "Clang-specific link time optimisation" ON).
```
### Don't assume other developers know as much as you do.
In ClickHouse, there are many tools used that an ordinary developer may not know. If you are in doubt, give a link to
the tool's docs. It won't take much of your time.
Bad:
```cmake
option(ENABLE_THINLTO "Enable Thin LTO. Only applicable for clang. It's also suppressed when building with tests or sanitizers." ON)
```
Better (combined with the above hint):
```cmake
# https://clang.llvm.org/docs/ThinLTO.html
# Only applicable for clang.
# Turned off when building with tests or sanitizers.
option(ENABLE_THINLTO "Clang-specific link time optimisation" ON).
```
Other example, bad:
```cmake
option (USE_INCLUDE_WHAT_YOU_USE "Use 'include-what-you-use' tool" OFF)
```
Better:
```cmake
# https://github.com/include-what-you-use/include-what-you-use
option (USE_INCLUDE_WHAT_YOU_USE "Reduce unneeded #include s (external tool)" OFF)
```
### Prefer consistent default values.
CMake allows you to pass a plethora of values representing boolean `true/false`, e.g. `1, ON, YES, ...`.
Prefer the `ON/OFF` values, if possible.

View File

@ -2864,6 +2864,50 @@ Possible values:
Default value: `0`.
## prefer_column_name_to_alias {#prefer-column-name-to-alias}
Enables or disables using the original column names instead of aliases in query expressions and clauses. It especially matters when alias is the same as the column name, see [Expression Aliases](../../sql-reference/syntax.md#notes-on-usage). Enable this setting to make aliases syntax rules in ClickHouse more compatible with most other database engines.
Possible values:
- 0 — The column name is substituted with the alias.
- 1 — The column name is not substituted with the alias.
Default value: `0`.
**Example**
The difference between enabled and disabled:
Query:
```sql
SET prefer_column_name_to_alias = 0;
SELECT avg(number) AS number, max(number) FROM numbers(10);
```
Result:
```text
Received exception from server (version 21.5.1):
Code: 184. DB::Exception: Received from localhost:9000. DB::Exception: Aggregate function avg(number) is found inside another aggregate function in query: While processing avg(number) AS number.
```
Query:
```sql
SET prefer_column_name_to_alias = 1;
SELECT avg(number) AS number, max(number) FROM numbers(10);
```
Result:
```text
┌─number─┬─max(number)─┐
│ 4.5 │ 9 │
└────────┴─────────────┘
```
## limit {#limit}
Sets the maximum number of rows to get from the query result. It adjusts the value set by the [LIMIT](../../sql-reference/statements/select/limit.md#limit-clause) clause, so that the limit, specified in the query, cannot exceed the limit, set by this setting.

View File

@ -4,4 +4,21 @@ toc_priority: 3
# max {#agg_function-max}
Calculates the maximum.
Aggregate function that calculates the maximum across a group of values.
Example:
```
SELECT max(salary) FROM employees;
```
```
SELECT department, max(salary) FROM employees GROUP BY department;
```
If you need non-aggregate function to choose a maximum of two values, see `greatest`:
```
SELECT greatest(a, b) FROM table;
```

View File

@ -4,4 +4,20 @@ toc_priority: 2
## min {#agg_function-min}
Calculates the minimum.
Aggregate function that calculates the minimum across a group of values.
Example:
```
SELECT min(salary) FROM employees;
```
```
SELECT department, min(salary) FROM employees GROUP BY department;
```
If you need non-aggregate function to choose a minimum of two values, see `least`:
```
SELECT least(a, b) FROM table;
```

View File

@ -23,7 +23,7 @@ The point in time is saved as a [Unix timestamp](https://en.wikipedia.org/wiki/U
Timezone agnostic unix timestamp is stored in tables, and the timezone is used to transform it to text format or back during data import/export or to make calendar calculations on the values (example: `toDate`, `toHour` functions et cetera). The time zone is not stored in the rows of the table (or in resultset), but is stored in the column metadata.
A list of supported time zones can be found in the [IANA Time Zone Database](https://www.iana.org/time-zones) and also can be queried by `SELECT * FROM system.time_zones`.
A list of supported time zones can be found in the [IANA Time Zone Database](https://www.iana.org/time-zones) and also can be queried by `SELECT * FROM system.time_zones`. [The list](https://en.wikipedia.org/wiki/List_of_tz_database_time_zones) is also available at Wikipedia.
You can explicitly set a time zone for `DateTime`-type columns when creating a table. Example: `DateTime('UTC')`. If the time zone isnt set, ClickHouse uses the value of the [timezone](../../operations/server-configuration-parameters/settings.md#server_configuration_parameters-timezone) parameter in the server settings or the operating system settings at the moment of the ClickHouse server start.

View File

@ -134,6 +134,46 @@ Result:
└───────────────────┘
```
## timeZoneOffset {#timezoneoffset}
Returns a timezone offset in seconds from [UTC](https://en.wikipedia.org/wiki/Coordinated_Universal_Time). The function takes into account [daylight saving time](https://en.wikipedia.org/wiki/Daylight_saving_time) and historical timezone changes at the specified date and time.
[IANA timezone database](https://www.iana.org/time-zones) is used to calculate the offset.
**Syntax**
``` sql
timeZoneOffset(value)
```
Alias: `timezoneOffset`.
**Arguments**
- `value` — Date and time. [DateTime](../../sql-reference/data-types/datetime.md) or [DateTime64](../../sql-reference/data-types/datetime64.md).
**Returned value**
- Offset from UTC in seconds.
Type: [Int32](../../sql-reference/data-types/int-uint.md).
**Example**
Query:
``` sql
SELECT toDateTime('2021-04-21 10:20:30', 'America/New_York') AS Time, toTypeName(Time) AS Type,
timeZoneOffset(Time) AS Offset_in_seconds, (Offset_in_seconds / 3600) AS Offset_in_hours;
```
Result:
``` text
┌────────────────Time─┬─Type─────────────────────────┬─Offset_in_seconds─┬─Offset_in_hours─┐
│ 2021-04-21 10:20:30 │ DateTime('America/New_York') │ -14400 │ -4 │
└─────────────────────┴──────────────────────────────┴───────────────────┴─────────────────┘
```
## toYear {#toyear}
Converts a date or date with time to a UInt16 number containing the year number (AD).

View File

@ -171,7 +171,7 @@ Received exception from server (version 18.14.17):
Code: 184. DB::Exception: Received from localhost:9000, 127.0.0.1. DB::Exception: Aggregate function sum(b) is found inside another aggregate function in query.
```
In this example, we declared table `t` with column `b`. Then, when selecting data, we defined the `sum(b) AS b` alias. As aliases are global, ClickHouse substituted the literal `b` in the expression `argMax(a, b)` with the expression `sum(b)`. This substitution caused the exception.
In this example, we declared table `t` with column `b`. Then, when selecting data, we defined the `sum(b) AS b` alias. As aliases are global, ClickHouse substituted the literal `b` in the expression `argMax(a, b)` with the expression `sum(b)`. This substitution caused the exception. You can change this default behavior by setting [prefer_column_name_to_alias](../operations/settings/settings.md#prefer_column_name_to_alias) to `1`.
## Asterisk {#asterisk}

View File

@ -2755,6 +2755,50 @@ SELECT * FROM test2;
Значение по умолчанию: `0`.
## prefer_column_name_to_alias {#prefer-column-name-to-alias}
Включает или отключает замену названий столбцов на синонимы в выражениях и секциях запросов, см. [Примечания по использованию синонимов](../../sql-reference/syntax.md#syntax-expression_aliases). Включите эту настройку, чтобы синтаксис синонимов в ClickHouse был более совместим с большинством других СУБД.
Возможные значения:
- 0 — синоним подставляется вместо имени столбца.
- 1 — синоним не подставляется вместо имени столбца.
Значение по умолчанию: `0`.
**Пример**
Какие изменения привносит включение и выключение настройки:
Запрос:
```sql
SET prefer_column_name_to_alias = 0;
SELECT avg(number) AS number, max(number) FROM numbers(10);
```
Результат:
```text
Received exception from server (version 21.5.1):
Code: 184. DB::Exception: Received from localhost:9000. DB::Exception: Aggregate function avg(number) is found inside another aggregate function in query: While processing avg(number) AS number.
```
Запрос:
```sql
SET prefer_column_name_to_alias = 1;
SELECT avg(number) AS number, max(number) FROM numbers(10);
```
Результат:
```text
┌─number─┬─max(number)─┐
│ 4.5 │ 9 │
└────────┴─────────────┘
```
## limit {#limit}
Устанавливает максимальное количество строк, возвращаемых запросом. Ограничивает сверху значение, установленное в запросе в секции [LIMIT](../../sql-reference/statements/select/limit.md#limit-clause).

View File

@ -1,8 +0,0 @@
---
toc_priority: 3
---
# max {#agg_function-max}
Вычисляет максимум.

View File

@ -0,0 +1 @@
../../../../en/sql-reference/aggregate-functions/reference/max.md

View File

@ -1,8 +0,0 @@
---
toc_priority: 2
---
## min {#agg_function-min}
Вычисляет минимум.

View File

@ -0,0 +1 @@
../../../../en/sql-reference/aggregate-functions/reference/min.md

View File

@ -20,8 +20,7 @@ DateTime([timezone])
## Использование {#ispolzovanie}
Момент времени сохраняется как [Unix timestamp](https://ru.wikipedia.org/wiki/Unix-%D0%B2%D1%80%D0%B5%D0%BC%D1%8F), независимо от часового пояса и переходов на летнее/зимнее время. Дополнительно, тип `DateTime` позволяет хранить часовой пояс, единый для всей колонки, который влияет на то, как будут отображаться значения типа `DateTime` в текстовом виде и как будут парситься значения заданные в виде строк (2020-01-01 05:00:01). Часовой пояс не хранится в строках таблицы (выборки), а хранится в метаданных колонки.
Список поддерживаемых временных зон можно найти в [IANA Time Zone Database](https://www.iana.org/time-zones).
Пакет `tzdata`, содержащий [базу данных часовых поясов IANA](https://www.iana.org/time-zones), должен быть установлен в системе. Используйте команду `timedatectl list-timezones` для получения списка часовых поясов, известных локальной системе.
Список поддерживаемых часовых поясов можно найти в [IANA Time Zone Database](https://www.iana.org/time-zones) или получить из базы данных, выполнив запрос `SELECT * FROM system.time_zones`. Также [список](https://en.wikipedia.org/wiki/List_of_tz_database_time_zones) есть в Википедии.
Часовой пояс для столбца типа `DateTime` можно в явном виде установить при создании таблицы. Если часовой пояс не установлен, то ClickHouse использует значение параметра [timezone](../../sql-reference/data-types/datetime.md#server_configuration_parameters-timezone), установленное в конфигурации сервера или в настройках операционной системы на момент запуска сервера.

View File

@ -134,6 +134,46 @@ SELECT timezoneOf(now());
└───────────────────┘
```
## timeZoneOffset {#timezoneoffset}
Возвращает смещение часового пояса в секундах от [UTC](https://ru.wikipedia.org/wiki/Всемирноеоординированноеремя). Функция учитывает [летнее время](https://ru.wikipedia.org/wiki/Летнееремя) и исторические изменения часовых поясов, которые действовали на указанную дату.
Для вычисления смещения используется информация из [базы данных IANA](https://www.iana.org/time-zones).
**Синтаксис**
``` sql
timeZoneOffset(value)
```
Псевдоним: `timezoneOffset`.
**Аргументы**
- `value` — Дата с временем. [DateTime](../../sql-reference/data-types/datetime.md) or [DateTime64](../../sql-reference/data-types/datetime64.md).
**Возвращаемое значение**
- Смещение в секундах от UTC.
Тип: [Int32](../../sql-reference/data-types/int-uint.md).
**Пример**
Запрос:
``` sql
SELECT toDateTime('2021-04-21 10:20:30', 'Europe/Moscow') AS Time, toTypeName(Time) AS Type,
timeZoneOffset(Time) AS Offset_in_seconds, (Offset_in_seconds / 3600) AS Offset_in_hours;
```
Результат:
``` text
┌────────────────Time─┬─Type──────────────────────┬─Offset_in_seconds─┬─Offset_in_hours─┐
│ 2021-04-21 10:20:30 │ DateTime('Europe/Moscow') │ 10800 │ 3 │
└─────────────────────┴───────────────────────────┴───────────────────┴─────────────────┘
```
## toYear {#toyear}
Переводит дату или дату-с-временем в число типа UInt16, содержащее номер года (AD).

View File

@ -128,7 +128,7 @@ expr AS alias
Например, `SELECT table_name_alias.column_name FROM table_name table_name_alias`.
В функции [CAST](sql_reference/syntax.md#type_conversion_function-cast), ключевое слово `AS` имеет другое значение. Смотрите описание функции.
В функции [CAST](../sql_reference/syntax.md#type_conversion_function-cast), ключевое слово `AS` имеет другое значение. Смотрите описание функции.
- `expr` — любое выражение, которое поддерживает ClickHouse.
@ -138,7 +138,7 @@ expr AS alias
Например, `SELECT "table t".column_name FROM table_name AS "table t"`.
### Примечания по использованию {#primechaniia-po-ispolzovaniiu}
### Примечания по использованию {#notes-on-usage}
Синонимы являются глобальными для запроса или подзапроса, и вы можете определить синоним в любой части запроса для любого выражения. Например, `SELECT (1 AS n) + 2, n`.
@ -169,9 +169,9 @@ Received exception from server (version 18.14.17):
Code: 184. DB::Exception: Received from localhost:9000, 127.0.0.1. DB::Exception: Aggregate function sum(b) is found inside another aggregate function in query.
```
В этом примере мы объявили таблицу `t` со столбцом `b`. Затем, при выборе данных, мы определили синоним `sum(b) AS b`. Поскольку синонимы глобальные, то ClickHouse заменил литерал `b` в выражении `argMax(a, b)` выражением `sum(b)`. Эта замена вызвала исключение.
В этом примере мы объявили таблицу `t` со столбцом `b`. Затем, при выборе данных, мы определили синоним `sum(b) AS b`. Поскольку синонимы глобальные, то ClickHouse заменил литерал `b` в выражении `argMax(a, b)` выражением `sum(b)`. Эта замена вызвала исключение. Можно изменить это поведение, включив настройку [prefer_column_name_to_alias](../operations/settings/settings.md#prefer_column_name_to_alias), для этого нужно установить ее в значение `1`.
## Звёздочка {#zviozdochka}
## Звёздочка {#asterisk}
В запросе `SELECT`, вместо выражения может стоять звёздочка. Подробнее смотрите раздел «SELECT».
@ -180,4 +180,3 @@ Code: 184. DB::Exception: Received from localhost:9000, 127.0.0.1. DB::Exception
Выражение представляет собой функцию, идентификатор, литерал, применение оператора, выражение в скобках, подзапрос, звёздочку. А также может содержать синоним.
Список выражений - одно выражение или несколько выражений через запятую.
Функции и операторы, в свою очередь, в качестве аргументов, могут иметь произвольные выражения.

View File

@ -113,6 +113,16 @@ public:
nested_func->merge(place, rhs, arena);
}
void mergeBatch(
size_t batch_size,
AggregateDataPtr * places,
size_t place_offset,
const AggregateDataPtr * rhs,
Arena * arena) const override
{
nested_func->mergeBatch(batch_size, places, place_offset, rhs, arena);
}
void serialize(ConstAggregateDataPtr __restrict place, WriteBuffer & buf) const override
{
nested_func->serialize(place, buf);

View File

@ -196,6 +196,18 @@ public:
place[size_of_data] |= rhs[size_of_data];
}
void mergeBatch(
size_t batch_size,
AggregateDataPtr * places,
size_t place_offset,
const AggregateDataPtr * rhs,
Arena * arena) const override
{
nested_function->mergeBatch(batch_size, places, place_offset, rhs, arena);
for (size_t i = 0; i < batch_size; ++i)
(places[i] + place_offset)[size_of_data] |= rhs[i][size_of_data];
}
void serialize(
ConstAggregateDataPtr place,
WriteBuffer & buf) const override

View File

@ -24,6 +24,6 @@ list(REMOVE_ITEM clickhouse_aggregate_functions_headers
add_library(clickhouse_aggregate_functions ${clickhouse_aggregate_functions_sources})
target_link_libraries(clickhouse_aggregate_functions PRIVATE dbms PUBLIC ${CITYHASH_LIBRARIES})
if(ENABLE_TESTS)
add_subdirectory(tests)
if(ENABLE_EXAMPLES)
add_subdirectory(examples)
endif()

View File

@ -153,6 +153,13 @@ public:
Arena * arena,
ssize_t if_argument_pos = -1) const = 0;
virtual void mergeBatch(
size_t batch_size,
AggregateDataPtr * places,
size_t place_offset,
const AggregateDataPtr * rhs,
Arena * arena) const = 0;
/** The same for single place.
*/
virtual void addBatchSinglePlace(
@ -279,6 +286,18 @@ public:
}
}
void mergeBatch(
size_t batch_size,
AggregateDataPtr * places,
size_t place_offset,
const AggregateDataPtr * rhs,
Arena * arena) const override
{
for (size_t i = 0; i < batch_size; ++i)
if (places[i])
static_cast<const Derived *>(this)->merge(places[i] + place_offset, rhs[i], arena);
}
void addBatchSinglePlace(
size_t batch_size, AggregateDataPtr place, const IColumn ** columns, Arena * arena, ssize_t if_argument_pos = -1) const override
{

View File

@ -11,6 +11,11 @@
namespace DB
{
namespace ErrorCodes
{
extern const int BAD_ARGUMENTS;
}
template <typename F>
static Float64 integrateSimpson(Float64 a, Float64 b, F && func)
{
@ -48,6 +53,11 @@ std::pair<RanksArray, Float64> computeRanksAndTieCorrection(const Values & value
++right;
auto adjusted = (left + right + 1.) / 2.;
auto count_equal = right - left;
/// Scipy implementation throws exception in this case too.
if (count_equal == size)
throw Exception("All numbers in both samples are identical", ErrorCodes::BAD_ARGUMENTS);
tie_numenator += std::pow(count_equal, 3) - count_equal;
for (size_t iter = left; iter < right; ++iter)
out[indexes[iter]] = adjusted;

View File

@ -1 +1,3 @@
add_subdirectory(tests)
if (ENABLE_EXAMPLES)
add_subdirectory(examples)
endif()

View File

@ -1,3 +1,3 @@
if (ENABLE_TESTS)
add_subdirectory (tests)
if (ENABLE_EXAMPLES)
add_subdirectory (examples)
endif ()

View File

@ -3,6 +3,6 @@ add_subdirectory(StringUtils)
#add_subdirectory(ZooKeeper)
#add_subdirectory(ConfigProcessor)
if (ENABLE_TESTS)
add_subdirectory (tests)
endif ()
if (ENABLE_EXAMPLES)
add_subdirectory(examples)
endif()

View File

@ -0,0 +1,99 @@
#include <Common/JSONBuilder.h>
#include <IO/WriteHelpers.h>
#include <Common/typeid_cast.h>
namespace DB::JSONBuilder
{
static bool isArrayOrMap(const IItem & item)
{
return typeid_cast<const JSONArray *>(&item) || typeid_cast<const JSONMap *>(&item);
}
static bool isSimpleArray(const std::vector<ItemPtr> & values)
{
for (const auto & value : values)
if (isArrayOrMap(*value))
return false;
return true;
}
void JSONString::format(const FormatSettings & settings, FormatContext & context)
{
writeJSONString(value, context.out, settings.settings);
}
void JSONBool::format(const FormatSettings &, FormatContext & context)
{
writeString(value ? "true" : "false", context.out);
}
void JSONArray::format(const FormatSettings & settings, FormatContext & context)
{
writeChar('[', context.out);
context.offset += settings.indent;
bool single_row = settings.print_simple_arrays_in_single_row && isSimpleArray(values);
bool first = true;
for (const auto & value : values)
{
if (!first)
writeChar(',', context.out);
if (!single_row)
{
writeChar('\n', context.out);
writeChar(' ', context.offset, context.out);
}
else if (!first)
writeChar(' ', context.out);
first = false;
value->format(settings, context);
}
context.offset -= settings.indent;
if (!single_row)
{
writeChar('\n', context.out);
writeChar(' ', context.offset, context.out);
}
writeChar(']', context.out);
}
void JSONMap::format(const FormatSettings & settings, FormatContext & context)
{
writeChar('{', context.out);
context.offset += settings.indent;
bool first = true;
for (const auto & value : values)
{
if (!first)
writeChar(',', context.out);
first = false;
writeChar('\n', context.out);
writeChar(' ', context.offset, context.out);
writeJSONString(value.key, context.out, settings.settings);
writeChar(':', context.out);
writeChar(' ', context.out);
value.value->format(settings, context);
}
context.offset -= settings.indent;
writeChar('\n', context.out);
writeChar(' ', context.offset, context.out);
writeChar('}', context.out);
}
}

111
src/Common/JSONBuilder.h Normal file
View File

@ -0,0 +1,111 @@
#pragma once
#include <type_traits>
#include <vector>
#include <IO/WriteBuffer.h>
#include <Formats/FormatSettings.h>
#include <IO/WriteHelpers.h>
namespace DB::JSONBuilder
{
struct FormatSettings
{
const DB::FormatSettings & settings;
size_t indent = 2;
bool print_simple_arrays_in_single_row = true;
};
struct FormatContext
{
WriteBuffer & out;
size_t offset = 0;
};
class IItem
{
public:
virtual ~IItem() = default;
virtual void format(const FormatSettings & settings, FormatContext & context) = 0;
};
using ItemPtr = std::unique_ptr<IItem>;
class JSONString : public IItem
{
public:
explicit JSONString(std::string value_) : value(std::move(value_)) {}
void format(const FormatSettings & settings, FormatContext & context) override;
private:
std::string value;
};
template <typename T>
class JSONNumber : public IItem
{
public:
explicit JSONNumber(T value_) : value(value_)
{
static_assert(std::is_arithmetic_v<T>, "JSONNumber support only numeric types");
}
void format(const FormatSettings & settings, FormatContext & context) override
{
writeJSONNumber(value, context.out, settings.settings);
}
private:
T value;
};
class JSONBool : public IItem
{
public:
explicit JSONBool(bool value_) : value(std::move(value_)) {}
void format(const FormatSettings & settings, FormatContext & context) override;
private:
bool value;
};
class JSONArray : public IItem
{
public:
void add(ItemPtr value) { values.push_back(std::move(value)); }
void add(std::string value) { add(std::make_unique<JSONString>(std::move(value))); }
void add(const char * value) { add(std::make_unique<JSONString>(value)); }
void add(bool value) { add(std::make_unique<JSONBool>(std::move(value))); }
template <typename T, std::enable_if_t<std::is_arithmetic<T>::value, bool> = true>
void add(T value) { add(std::make_unique<JSONNumber<T>>(value)); }
void format(const FormatSettings & settings, FormatContext & context) override;
private:
std::vector<ItemPtr> values;
};
class JSONMap : public IItem
{
struct Pair
{
std::string key;
ItemPtr value;
};
public:
void add(std::string key, ItemPtr value) { values.emplace_back(Pair{.key = std::move(key), .value = std::move(value)}); }
void add(std::string key, std::string value) { add(std::move(key), std::make_unique<JSONString>(std::move(value))); }
void add(std::string key, const char * value) { add(std::move(key), std::make_unique<JSONString>(value)); }
void add(std::string key, bool value) { add(std::move(key), std::make_unique<JSONBool>(std::move(value))); }
template <typename T, std::enable_if_t<std::is_arithmetic<T>::value, bool> = true>
void add(std::string key, T value) { add(std::move(key), std::make_unique<JSONNumber<T>>(value)); }
void format(const FormatSettings & settings, FormatContext & context) override;
private:
std::vector<Pair> values;
};
}

View File

@ -1,5 +1,5 @@
#include <Common/StatusInfo.h>
#include <Interpreters/ExternalLoader.h>
#include <Common/ExternalLoaderStatus.h>
/// Available status. Add something here as you wish.
#define APPLY_FOR_STATUS(M) \

View File

@ -205,6 +205,13 @@ size_t ThreadPoolImpl<Thread>::active() const
return scheduled_jobs;
}
template <typename Thread>
bool ThreadPoolImpl<Thread>::finished() const
{
std::unique_lock lock(mutex);
return shutdown;
}
template <typename Thread>
void ThreadPoolImpl<Thread>::worker(typename std::list<Thread>::iterator thread_it)
{

View File

@ -67,6 +67,10 @@ public:
/// Returns number of running and scheduled jobs.
size_t active() const;
/// Returns true if the pool already terminated
/// (and any further scheduling will produce CANNOT_SCHEDULE_TASK exception)
bool finished() const;
void setMaxThreads(size_t value);
void setMaxFreeThreads(size_t value);
void setQueueSize(size_t value);

View File

@ -6,6 +6,6 @@ add_library(clickhouse_common_zookeeper ${clickhouse_common_zookeeper_headers} $
target_link_libraries (clickhouse_common_zookeeper PUBLIC clickhouse_common_io common PRIVATE string_utils)
if (ENABLE_TESTS)
add_subdirectory (tests)
endif ()
if (ENABLE_EXAMPLES)
add_subdirectory(examples)
endif()

View File

@ -35,9 +35,6 @@ target_include_directories(radix_sort SYSTEM PRIVATE ${PDQSORT_INCLUDE_DIR})
add_executable (arena_with_free_lists arena_with_free_lists.cpp)
target_link_libraries (arena_with_free_lists PRIVATE dbms)
add_executable (pod_array pod_array.cpp)
target_link_libraries (pod_array PRIVATE clickhouse_common_io)
add_executable (lru_hash_map_perf lru_hash_map_perf.cpp)
target_link_libraries (lru_hash_map_perf PRIVATE clickhouse_common_io)

View File

@ -4,6 +4,419 @@
using namespace DB;
TEST(Common, PODArrayBasicMove)
{
using namespace DB;
static constexpr size_t initial_bytes = 32;
using Array = PODArray<UInt64, initial_bytes,
AllocatorWithStackMemory<Allocator<false>, initial_bytes>>;
{
Array arr;
Array arr2;
arr2 = std::move(arr);
}
{
Array arr;
arr.push_back(1);
arr.push_back(2);
arr.push_back(3);
Array arr2;
arr2 = std::move(arr);
ASSERT_EQ(arr2.size(), 3);
ASSERT_EQ(arr2[0], 1);
ASSERT_EQ(arr2[1], 2);
ASSERT_EQ(arr2[2], 3);
arr = std::move(arr2);
ASSERT_EQ(arr.size(), 3);
ASSERT_EQ(arr[0], 1);
ASSERT_EQ(arr[1], 2);
ASSERT_EQ(arr[2], 3);
}
{
Array arr;
arr.push_back(1);
arr.push_back(2);
arr.push_back(3);
arr.push_back(4);
arr.push_back(5);
Array arr2;
arr2 = std::move(arr);
ASSERT_EQ(arr2.size(), 5);
ASSERT_EQ(arr2[0], 1);
ASSERT_EQ(arr2[1], 2);
ASSERT_EQ(arr2[2], 3);
ASSERT_EQ(arr2[3], 4);
ASSERT_EQ(arr2[4], 5);
arr = std::move(arr2);
ASSERT_EQ(arr.size(), 5);
ASSERT_EQ(arr[0], 1);
ASSERT_EQ(arr[1], 2);
ASSERT_EQ(arr[2], 3);
ASSERT_EQ(arr[3], 4);
ASSERT_EQ(arr[4], 5);
}
{
Array arr;
arr.push_back(1);
arr.push_back(2);
arr.push_back(3);
Array arr2;
arr2.push_back(4);
arr2.push_back(5);
arr2.push_back(6);
arr2.push_back(7);
arr2 = std::move(arr);
ASSERT_EQ(arr2.size(), 3);
ASSERT_EQ(arr2[0], 1);
ASSERT_EQ(arr2[1], 2);
ASSERT_EQ(arr2[2], 3);
}
{
Array arr;
arr.push_back(1);
arr.push_back(2);
arr.push_back(3);
Array arr2;
arr2.push_back(4);
arr2.push_back(5);
arr2.push_back(6);
arr2.push_back(7);
arr2.push_back(8);
arr = std::move(arr2);
ASSERT_EQ(arr.size(), 5);
ASSERT_EQ(arr[0], 4);
ASSERT_EQ(arr[1], 5);
ASSERT_EQ(arr[2], 6);
ASSERT_EQ(arr[3], 7);
ASSERT_EQ(arr[4], 8);
}
}
TEST(Common, PODArrayBasicSwap)
{
using namespace DB;
static constexpr size_t initial_bytes = 32;
using Array = PODArray<UInt64, initial_bytes,
AllocatorWithStackMemory<Allocator<false>, initial_bytes>>;
{
Array arr;
Array arr2;
arr.swap(arr2);
arr2.swap(arr);
}
{
Array arr;
Array arr2;
arr2.push_back(1);
arr2.push_back(2);
arr2.push_back(3);
arr.swap(arr2);
ASSERT_TRUE(arr.size() == 3);
ASSERT_TRUE(arr[0] == 1);
ASSERT_TRUE(arr[1] == 2);
ASSERT_TRUE(arr[2] == 3);
ASSERT_TRUE(arr2.empty());
arr.swap(arr2);
ASSERT_TRUE(arr.empty());
ASSERT_TRUE(arr2.size() == 3);
ASSERT_TRUE(arr2[0] == 1);
ASSERT_TRUE(arr2[1] == 2);
ASSERT_TRUE(arr2[2] == 3);
}
{
Array arr;
Array arr2;
arr2.push_back(1);
arr2.push_back(2);
arr2.push_back(3);
arr2.push_back(4);
arr2.push_back(5);
arr.swap(arr2);
ASSERT_TRUE(arr.size() == 5);
ASSERT_TRUE(arr[0] == 1);
ASSERT_TRUE(arr[1] == 2);
ASSERT_TRUE(arr[2] == 3);
ASSERT_TRUE(arr[3] == 4);
ASSERT_TRUE(arr[4] == 5);
ASSERT_TRUE(arr2.empty());
arr.swap(arr2);
ASSERT_TRUE(arr.empty());
ASSERT_TRUE(arr2.size() == 5);
ASSERT_TRUE(arr2[0] == 1);
ASSERT_TRUE(arr2[1] == 2);
ASSERT_TRUE(arr2[2] == 3);
ASSERT_TRUE(arr2[3] == 4);
ASSERT_TRUE(arr2[4] == 5);
}
{
Array arr;
arr.push_back(1);
arr.push_back(2);
arr.push_back(3);
Array arr2;
arr2.push_back(4);
arr2.push_back(5);
arr2.push_back(6);
arr.swap(arr2);
ASSERT_TRUE(arr.size() == 3);
ASSERT_TRUE(arr[0] == 4);
ASSERT_TRUE(arr[1] == 5);
ASSERT_TRUE(arr[2] == 6);
ASSERT_TRUE(arr2.size() == 3);
ASSERT_TRUE(arr2[0] == 1);
ASSERT_TRUE(arr2[1] == 2);
ASSERT_TRUE(arr2[2] == 3);
arr.swap(arr2);
ASSERT_TRUE(arr.size() == 3);
ASSERT_TRUE(arr[0] == 1);
ASSERT_TRUE(arr[1] == 2);
ASSERT_TRUE(arr[2] == 3);
ASSERT_TRUE(arr2.size() == 3);
ASSERT_TRUE(arr2[0] == 4);
ASSERT_TRUE(arr2[1] == 5);
ASSERT_TRUE(arr2[2] == 6);
}
{
Array arr;
arr.push_back(1);
arr.push_back(2);
Array arr2;
arr2.push_back(3);
arr2.push_back(4);
arr2.push_back(5);
arr.swap(arr2);
ASSERT_TRUE(arr.size() == 3);
ASSERT_TRUE(arr[0] == 3);
ASSERT_TRUE(arr[1] == 4);
ASSERT_TRUE(arr[2] == 5);
ASSERT_TRUE(arr2.size() == 2);
ASSERT_TRUE(arr2[0] == 1);
ASSERT_TRUE(arr2[1] == 2);
arr.swap(arr2);
ASSERT_TRUE(arr.size() == 2);
ASSERT_TRUE(arr[0] == 1);
ASSERT_TRUE(arr[1] == 2);
ASSERT_TRUE(arr2.size() == 3);
ASSERT_TRUE(arr2[0] == 3);
ASSERT_TRUE(arr2[1] == 4);
ASSERT_TRUE(arr2[2] == 5);
}
{
Array arr;
arr.push_back(1);
arr.push_back(2);
arr.push_back(3);
Array arr2;
arr2.push_back(4);
arr2.push_back(5);
arr2.push_back(6);
arr2.push_back(7);
arr2.push_back(8);
arr.swap(arr2);
ASSERT_TRUE(arr.size() == 5);
ASSERT_TRUE(arr[0] == 4);
ASSERT_TRUE(arr[1] == 5);
ASSERT_TRUE(arr[2] == 6);
ASSERT_TRUE(arr[3] == 7);
ASSERT_TRUE(arr[4] == 8);
ASSERT_TRUE(arr2.size() == 3);
ASSERT_TRUE(arr2[0] == 1);
ASSERT_TRUE(arr2[1] == 2);
ASSERT_TRUE(arr2[2] == 3);
arr.swap(arr2);
ASSERT_TRUE(arr.size() == 3);
ASSERT_TRUE(arr[0] == 1);
ASSERT_TRUE(arr[1] == 2);
ASSERT_TRUE(arr[2] == 3);
ASSERT_TRUE(arr2.size() == 5);
ASSERT_TRUE(arr2[0] == 4);
ASSERT_TRUE(arr2[1] == 5);
ASSERT_TRUE(arr2[2] == 6);
ASSERT_TRUE(arr2[3] == 7);
ASSERT_TRUE(arr2[4] == 8);
}
{
Array arr;
arr.push_back(1);
arr.push_back(2);
arr.push_back(3);
arr.push_back(4);
arr.push_back(5);
Array arr2;
arr2.push_back(6);
arr2.push_back(7);
arr2.push_back(8);
arr2.push_back(9);
arr2.push_back(10);
arr.swap(arr2);
ASSERT_TRUE(arr.size() == 5);
ASSERT_TRUE(arr[0] == 6);
ASSERT_TRUE(arr[1] == 7);
ASSERT_TRUE(arr[2] == 8);
ASSERT_TRUE(arr[3] == 9);
ASSERT_TRUE(arr[4] == 10);
ASSERT_TRUE(arr2.size() == 5);
ASSERT_TRUE(arr2[0] == 1);
ASSERT_TRUE(arr2[1] == 2);
ASSERT_TRUE(arr2[2] == 3);
ASSERT_TRUE(arr2[3] == 4);
ASSERT_TRUE(arr2[4] == 5);
arr.swap(arr2);
ASSERT_TRUE(arr.size() == 5);
ASSERT_TRUE(arr[0] == 1);
ASSERT_TRUE(arr[1] == 2);
ASSERT_TRUE(arr[2] == 3);
ASSERT_TRUE(arr[3] == 4);
ASSERT_TRUE(arr[4] == 5);
ASSERT_TRUE(arr2.size() == 5);
ASSERT_TRUE(arr2[0] == 6);
ASSERT_TRUE(arr2[1] == 7);
ASSERT_TRUE(arr2[2] == 8);
ASSERT_TRUE(arr2[3] == 9);
ASSERT_TRUE(arr2[4] == 10);
}
}
TEST(Common, PODArrayBasicSwapMoveConstructor)
{
static constexpr size_t initial_bytes = 32;
using Array = PODArray<UInt64, initial_bytes,
AllocatorWithStackMemory<Allocator<false>, initial_bytes>>;
{
Array arr;
Array arr2{std::move(arr)};
}
{
Array arr;
arr.push_back(1);
arr.push_back(2);
arr.push_back(3);
Array arr2{std::move(arr)};
ASSERT_TRUE(arr.empty()); // NOLINT
ASSERT_TRUE(arr2.size() == 3);
ASSERT_TRUE(arr2[0] == 1);
ASSERT_TRUE(arr2[1] == 2);
ASSERT_TRUE(arr2[2] == 3);
}
{
Array arr;
arr.push_back(1);
arr.push_back(2);
arr.push_back(3);
arr.push_back(4);
arr.push_back(5);
Array arr2{std::move(arr)};
ASSERT_TRUE(arr.empty()); // NOLINT
ASSERT_TRUE(arr2.size() == 5);
ASSERT_TRUE(arr2[0] == 1);
ASSERT_TRUE(arr2[1] == 2);
ASSERT_TRUE(arr2[2] == 3);
ASSERT_TRUE(arr2[3] == 4);
ASSERT_TRUE(arr2[4] == 5);
}
}
TEST(Common, PODArrayInsert)
{
std::string str = "test_string_abacaba";

View File

@ -1,455 +0,0 @@
#include <Common/PODArray.h>
#include <common/types.h>
#include <iostream>
#define ASSERT_CHECK(cond, res) \
do \
{ \
if (!(cond)) \
{ \
std::cerr << __FILE__ << ":" << __LINE__ << ":" \
<< "Assertion " << #cond << " failed.\n"; \
if ((res)) { (res) = false; } \
} \
} \
while (false)
static void test1()
{
using namespace DB;
static constexpr size_t initial_bytes = 32;
using Array = PODArray<UInt64, initial_bytes,
AllocatorWithStackMemory<Allocator<false>, initial_bytes>>;
bool res = true;
{
Array arr;
Array arr2;
arr2 = std::move(arr);
}
{
Array arr;
arr.push_back(1);
arr.push_back(2);
arr.push_back(3);
Array arr2;
arr2 = std::move(arr);
ASSERT_CHECK((arr2.size() == 3), res);
ASSERT_CHECK((arr2[0] == 1), res);
ASSERT_CHECK((arr2[1] == 2), res);
ASSERT_CHECK((arr2[2] == 3), res);
arr = std::move(arr2);
ASSERT_CHECK((arr.size() == 3), res);
ASSERT_CHECK((arr[0] == 1), res);
ASSERT_CHECK((arr[1] == 2), res);
ASSERT_CHECK((arr[2] == 3), res);
}
{
Array arr;
arr.push_back(1);
arr.push_back(2);
arr.push_back(3);
arr.push_back(4);
arr.push_back(5);
Array arr2;
arr2 = std::move(arr);
ASSERT_CHECK((arr2.size() == 5), res);
ASSERT_CHECK((arr2[0] == 1), res);
ASSERT_CHECK((arr2[1] == 2), res);
ASSERT_CHECK((arr2[2] == 3), res);
ASSERT_CHECK((arr2[3] == 4), res);
ASSERT_CHECK((arr2[4] == 5), res);
arr = std::move(arr2);
ASSERT_CHECK((arr.size() == 5), res);
ASSERT_CHECK((arr[0] == 1), res);
ASSERT_CHECK((arr[1] == 2), res);
ASSERT_CHECK((arr[2] == 3), res);
ASSERT_CHECK((arr[3] == 4), res);
ASSERT_CHECK((arr[4] == 5), res);
}
{
Array arr;
arr.push_back(1);
arr.push_back(2);
arr.push_back(3);
Array arr2;
arr2.push_back(4);
arr2.push_back(5);
arr2.push_back(6);
arr2.push_back(7);
arr2 = std::move(arr);
ASSERT_CHECK((arr2.size() == 3), res);
ASSERT_CHECK((arr2[0] == 1), res);
ASSERT_CHECK((arr2[1] == 2), res);
ASSERT_CHECK((arr2[2] == 3), res);
}
{
Array arr;
arr.push_back(1);
arr.push_back(2);
arr.push_back(3);
Array arr2;
arr2.push_back(4);
arr2.push_back(5);
arr2.push_back(6);
arr2.push_back(7);
arr2.push_back(8);
arr = std::move(arr2);
ASSERT_CHECK((arr.size() == 5), res);
ASSERT_CHECK((arr[0] == 4), res);
ASSERT_CHECK((arr[1] == 5), res);
ASSERT_CHECK((arr[2] == 6), res);
ASSERT_CHECK((arr[3] == 7), res);
ASSERT_CHECK((arr[4] == 8), res);
}
if (!res)
std::cerr << "Some errors were found in test 1\n";
}
static void test2()
{
using namespace DB;
static constexpr size_t initial_bytes = 32;
using Array = PODArray<UInt64, initial_bytes,
AllocatorWithStackMemory<Allocator<false>, initial_bytes>>;
bool res = true;
{
Array arr;
Array arr2;
arr.swap(arr2);
arr2.swap(arr);
}
{
Array arr;
Array arr2;
arr2.push_back(1);
arr2.push_back(2);
arr2.push_back(3);
arr.swap(arr2);
ASSERT_CHECK((arr.size() == 3), res);
ASSERT_CHECK((arr[0] == 1), res);
ASSERT_CHECK((arr[1] == 2), res);
ASSERT_CHECK((arr[2] == 3), res);
ASSERT_CHECK((arr2.empty()), res);
arr.swap(arr2);
ASSERT_CHECK((arr.empty()), res);
ASSERT_CHECK((arr2.size() == 3), res);
ASSERT_CHECK((arr2[0] == 1), res);
ASSERT_CHECK((arr2[1] == 2), res);
ASSERT_CHECK((arr2[2] == 3), res);
}
{
Array arr;
Array arr2;
arr2.push_back(1);
arr2.push_back(2);
arr2.push_back(3);
arr2.push_back(4);
arr2.push_back(5);
arr.swap(arr2);
ASSERT_CHECK((arr.size() == 5), res);
ASSERT_CHECK((arr[0] == 1), res);
ASSERT_CHECK((arr[1] == 2), res);
ASSERT_CHECK((arr[2] == 3), res);
ASSERT_CHECK((arr[3] == 4), res);
ASSERT_CHECK((arr[4] == 5), res);
ASSERT_CHECK((arr2.empty()), res);
arr.swap(arr2);
ASSERT_CHECK((arr.empty()), res);
ASSERT_CHECK((arr2.size() == 5), res);
ASSERT_CHECK((arr2[0] == 1), res);
ASSERT_CHECK((arr2[1] == 2), res);
ASSERT_CHECK((arr2[2] == 3), res);
ASSERT_CHECK((arr2[3] == 4), res);
ASSERT_CHECK((arr2[4] == 5), res);
}
{
Array arr;
arr.push_back(1);
arr.push_back(2);
arr.push_back(3);
Array arr2;
arr2.push_back(4);
arr2.push_back(5);
arr2.push_back(6);
arr.swap(arr2);
ASSERT_CHECK((arr.size() == 3), res);
ASSERT_CHECK((arr[0] == 4), res);
ASSERT_CHECK((arr[1] == 5), res);
ASSERT_CHECK((arr[2] == 6), res);
ASSERT_CHECK((arr2.size() == 3), res);
ASSERT_CHECK((arr2[0] == 1), res);
ASSERT_CHECK((arr2[1] == 2), res);
ASSERT_CHECK((arr2[2] == 3), res);
arr.swap(arr2);
ASSERT_CHECK((arr.size() == 3), res);
ASSERT_CHECK((arr[0] == 1), res);
ASSERT_CHECK((arr[1] == 2), res);
ASSERT_CHECK((arr[2] == 3), res);
ASSERT_CHECK((arr2.size() == 3), res);
ASSERT_CHECK((arr2[0] == 4), res);
ASSERT_CHECK((arr2[1] == 5), res);
ASSERT_CHECK((arr2[2] == 6), res);
}
{
Array arr;
arr.push_back(1);
arr.push_back(2);
Array arr2;
arr2.push_back(3);
arr2.push_back(4);
arr2.push_back(5);
arr.swap(arr2);
ASSERT_CHECK((arr.size() == 3), res);
ASSERT_CHECK((arr[0] == 3), res);
ASSERT_CHECK((arr[1] == 4), res);
ASSERT_CHECK((arr[2] == 5), res);
ASSERT_CHECK((arr2.size() == 2), res);
ASSERT_CHECK((arr2[0] == 1), res);
ASSERT_CHECK((arr2[1] == 2), res);
arr.swap(arr2);
ASSERT_CHECK((arr.size() == 2), res);
ASSERT_CHECK((arr[0] == 1), res);
ASSERT_CHECK((arr[1] == 2), res);
ASSERT_CHECK((arr2.size() == 3), res);
ASSERT_CHECK((arr2[0] == 3), res);
ASSERT_CHECK((arr2[1] == 4), res);
ASSERT_CHECK((arr2[2] == 5), res);
}
{
Array arr;
arr.push_back(1);
arr.push_back(2);
arr.push_back(3);
Array arr2;
arr2.push_back(4);
arr2.push_back(5);
arr2.push_back(6);
arr2.push_back(7);
arr2.push_back(8);
arr.swap(arr2);
ASSERT_CHECK((arr.size() == 5), res);
ASSERT_CHECK((arr[0] == 4), res);
ASSERT_CHECK((arr[1] == 5), res);
ASSERT_CHECK((arr[2] == 6), res);
ASSERT_CHECK((arr[3] == 7), res);
ASSERT_CHECK((arr[4] == 8), res);
ASSERT_CHECK((arr2.size() == 3), res);
ASSERT_CHECK((arr2[0] == 1), res);
ASSERT_CHECK((arr2[1] == 2), res);
ASSERT_CHECK((arr2[2] == 3), res);
arr.swap(arr2);
ASSERT_CHECK((arr.size() == 3), res);
ASSERT_CHECK((arr[0] == 1), res);
ASSERT_CHECK((arr[1] == 2), res);
ASSERT_CHECK((arr[2] == 3), res);
ASSERT_CHECK((arr2.size() == 5), res);
ASSERT_CHECK((arr2[0] == 4), res);
ASSERT_CHECK((arr2[1] == 5), res);
ASSERT_CHECK((arr2[2] == 6), res);
ASSERT_CHECK((arr2[3] == 7), res);
ASSERT_CHECK((arr2[4] == 8), res);
}
{
Array arr;
arr.push_back(1);
arr.push_back(2);
arr.push_back(3);
arr.push_back(4);
arr.push_back(5);
Array arr2;
arr2.push_back(6);
arr2.push_back(7);
arr2.push_back(8);
arr2.push_back(9);
arr2.push_back(10);
arr.swap(arr2);
ASSERT_CHECK((arr.size() == 5), res);
ASSERT_CHECK((arr[0] == 6), res);
ASSERT_CHECK((arr[1] == 7), res);
ASSERT_CHECK((arr[2] == 8), res);
ASSERT_CHECK((arr[3] == 9), res);
ASSERT_CHECK((arr[4] == 10), res);
ASSERT_CHECK((arr2.size() == 5), res);
ASSERT_CHECK((arr2[0] == 1), res);
ASSERT_CHECK((arr2[1] == 2), res);
ASSERT_CHECK((arr2[2] == 3), res);
ASSERT_CHECK((arr2[3] == 4), res);
ASSERT_CHECK((arr2[4] == 5), res);
arr.swap(arr2);
ASSERT_CHECK((arr.size() == 5), res);
ASSERT_CHECK((arr[0] == 1), res);
ASSERT_CHECK((arr[1] == 2), res);
ASSERT_CHECK((arr[2] == 3), res);
ASSERT_CHECK((arr[3] == 4), res);
ASSERT_CHECK((arr[4] == 5), res);
ASSERT_CHECK((arr2.size() == 5), res);
ASSERT_CHECK((arr2[0] == 6), res);
ASSERT_CHECK((arr2[1] == 7), res);
ASSERT_CHECK((arr2[2] == 8), res);
ASSERT_CHECK((arr2[3] == 9), res);
ASSERT_CHECK((arr2[4] == 10), res);
}
if (!res)
std::cerr << "Some errors were found in test 2\n";
}
static void test3()
{
using namespace DB;
static constexpr size_t initial_bytes = 32;
using Array = PODArray<UInt64, initial_bytes,
AllocatorWithStackMemory<Allocator<false>, initial_bytes>>;
bool res = true;
{
Array arr;
Array arr2{std::move(arr)};
}
{
Array arr;
arr.push_back(1);
arr.push_back(2);
arr.push_back(3);
Array arr2{std::move(arr)};
ASSERT_CHECK((arr.empty()), res); // NOLINT
ASSERT_CHECK((arr2.size() == 3), res);
ASSERT_CHECK((arr2[0] == 1), res);
ASSERT_CHECK((arr2[1] == 2), res);
ASSERT_CHECK((arr2[2] == 3), res);
}
{
Array arr;
arr.push_back(1);
arr.push_back(2);
arr.push_back(3);
arr.push_back(4);
arr.push_back(5);
Array arr2{std::move(arr)};
ASSERT_CHECK((arr.empty()), res); // NOLINT
ASSERT_CHECK((arr2.size() == 5), res);
ASSERT_CHECK((arr2[0] == 1), res);
ASSERT_CHECK((arr2[1] == 2), res);
ASSERT_CHECK((arr2[2] == 3), res);
ASSERT_CHECK((arr2[3] == 4), res);
ASSERT_CHECK((arr2[4] == 5), res);
}
if (!res)
std::cerr << "Some errors were found in test 3\n";
}
int main()
{
std::cout << "test 1\n";
test1();
std::cout << "test 2\n";
test2();
std::cout << "test 3\n";
test3();
return 0;
}

View File

@ -47,6 +47,7 @@ SRCS(
FileChecker.cpp
IPv6ToBinary.cpp
IntervalKind.cpp
JSONBuilder.cpp
Macros.cpp
MemoryStatisticsOS.cpp
MemoryTracker.cpp

View File

@ -1,3 +1,3 @@
if(ENABLE_TESTS)
add_subdirectory(tests)
if(ENABLE_EXAMPLES)
add_subdirectory(examples)
endif()

View File

@ -1,3 +1,3 @@
if (ENABLE_TESTS)
add_subdirectory (tests)
if (ENABLE_EXAMPLES)
add_subdirectory(examples)
endif ()

View File

@ -435,7 +435,7 @@ class IColumn;
M(Bool, allow_experimental_map_type, false, "Allow data type Map", 0) \
M(Bool, allow_experimental_window_functions, false, "Allow experimental window functions", 0) \
M(Bool, use_antlr_parser, false, "Parse incoming queries using ANTLR-generated experimental parser", 0) \
M(Bool, async_socket_for_remote, false, "Asynchronously read from socket executing remote query", 0) \
M(Bool, async_socket_for_remote, true, "Asynchronously read from socket executing remote query", 0) \
M(Bool, insert_null_as_default, true, "Insert DEFAULT values instead of NULL in INSERT SELECT (UNION ALL)", 0) \
\
M(Bool, optimize_rewrite_sum_if_to_count_if, true, "Rewrite sumIf() and sum(if()) function countIf() function when logically equivalent", 0) \

View File

@ -1,6 +1,7 @@
#include <Core/SortDescription.h>
#include <Core/Block.h>
#include <IO/Operators.h>
#include <Common/JSONBuilder.h>
namespace DB
{
@ -37,6 +38,22 @@ void dumpSortDescription(const SortDescription & description, const Block & head
}
}
void SortColumnDescription::explain(JSONBuilder::JSONMap & map, const Block & header) const
{
if (!column_name.empty())
map.add("Column", column_name);
else
{
if (column_number < header.columns())
map.add("Column", header.getByPosition(column_number).name);
map.add("Position", column_number);
}
map.add("Ascending", direction > 0);
map.add("With Fill", with_fill);
}
std::string dumpSortDescription(const SortDescription & description)
{
WriteBufferFromOwnString wb;
@ -44,5 +61,17 @@ std::string dumpSortDescription(const SortDescription & description)
return wb.str();
}
JSONBuilder::ItemPtr explainSortDescription(const SortDescription & description, const Block & header)
{
auto json_array = std::make_unique<JSONBuilder::JSONArray>();
for (const auto & descr : description)
{
auto json_map = std::make_unique<JSONBuilder::JSONMap>();
descr.explain(*json_map, header);
json_array->add(std::move(json_map));
}
return json_array;
}
}

View File

@ -12,6 +12,15 @@ class Collator;
namespace DB
{
namespace JSONBuilder
{
class JSONMap;
class IItem;
using ItemPtr = std::unique_ptr<IItem>;
}
class Block;
struct FillColumnDescription
{
/// All missed values in range [FROM, TO) will be filled
@ -62,16 +71,18 @@ struct SortColumnDescription
{
return fmt::format("{}:{}:dir {}nulls ", column_name, column_number, direction, nulls_direction);
}
void explain(JSONBuilder::JSONMap & map, const Block & header) const;
};
/// Description of the sorting rule for several columns.
using SortDescription = std::vector<SortColumnDescription>;
class Block;
/// Outputs user-readable description into `out`.
void dumpSortDescription(const SortDescription & description, const Block & header, WriteBuffer & out);
std::string dumpSortDescription(const SortDescription & description);
JSONBuilder::ItemPtr explainSortDescription(const SortDescription & description, const Block & header);
}

View File

@ -1,3 +1,3 @@
if (ENABLE_TESTS)
add_subdirectory (tests)
if (ENABLE_EXAMPLES)
add_subdirectory(examples)
endif ()

Some files were not shown because too many files have changed in this diff Show More