Merge remote-tracking branch 'origin/master' into distinct_sorted_simplify

This commit is contained in:
Igor Nikonov 2022-08-02 11:18:46 +00:00
commit 56697125d3
719 changed files with 2391 additions and 2783 deletions

View File

@ -151,8 +151,8 @@ jobs:
# shellcheck disable=SC2046
docker rm -f $(docker ps -a -q) ||:
sudo rm -fr "$TEMP_PATH"
SplitBuildSmokeTest:
needs: [BuilderDebSplitted]
SharedBuildSmokeTest:
needs: [BuilderDebShared]
runs-on: [self-hosted, style-checker]
steps:
- name: Set envs
@ -171,7 +171,7 @@ jobs:
uses: actions/download-artifact@v2
with:
path: ${{ env.REPORTS_PATH }}
- name: Split build check
- name: Shared build check
run: |
sudo rm -fr "$TEMP_PATH"
mkdir -p "$TEMP_PATH"
@ -598,7 +598,7 @@ jobs:
##########################################################################################
##################################### SPECIAL BUILDS #####################################
##########################################################################################
BuilderDebSplitted:
BuilderDebShared:
needs: [DockerHubPush]
runs-on: [self-hosted, builder]
steps:
@ -609,7 +609,7 @@ jobs:
IMAGES_PATH=${{runner.temp}}/images_path
REPO_COPY=${{runner.temp}}/build_check/ClickHouse
CACHES_PATH=${{runner.temp}}/../ccaches
BUILD_NAME=binary_splitted
BUILD_NAME=binary_shared
EOF
- name: Download changed images
uses: actions/download-artifact@v2
@ -1012,7 +1012,7 @@ jobs:
# - BuilderBinGCC
- BuilderBinPPC64
- BuilderBinClangTidy
- BuilderDebSplitted
- BuilderDebShared
runs-on: [self-hosted, style-checker]
steps:
- name: Set envs
@ -3153,7 +3153,7 @@ jobs:
- UnitTestsMsan
- UnitTestsUBsan
- UnitTestsReleaseClang
- SplitBuildSmokeTest
- SharedBuildSmokeTest
runs-on: [self-hosted, style-checker]
steps:
- name: Clear repository

View File

@ -216,8 +216,8 @@ jobs:
# shellcheck disable=SC2046
docker rm -f $(docker ps -a -q) ||:
sudo rm -fr "$TEMP_PATH"
SplitBuildSmokeTest:
needs: [BuilderDebSplitted]
SharedBuildSmokeTest:
needs: [BuilderDebShared]
runs-on: [self-hosted, style-checker]
steps:
- name: Set envs
@ -236,7 +236,7 @@ jobs:
uses: actions/download-artifact@v2
with:
path: ${{ env.REPORTS_PATH }}
- name: Split build check
- name: Shared build check
run: |
sudo rm -fr "$TEMP_PATH"
mkdir -p "$TEMP_PATH"
@ -620,7 +620,7 @@ jobs:
##########################################################################################
##################################### SPECIAL BUILDS #####################################
##########################################################################################
BuilderDebSplitted:
BuilderDebShared:
needs: [DockerHubPush, FastTest, StyleCheck]
runs-on: [self-hosted, builder]
steps:
@ -631,7 +631,7 @@ jobs:
IMAGES_PATH=${{runner.temp}}/images_path
REPO_COPY=${{runner.temp}}/build_check/ClickHouse
CACHES_PATH=${{runner.temp}}/../ccaches
BUILD_NAME=binary_splitted
BUILD_NAME=binary_shared
EOF
- name: Download changed images
uses: actions/download-artifact@v2
@ -1024,7 +1024,7 @@ jobs:
# - BuilderBinGCC
- BuilderBinPPC64
- BuilderBinClangTidy
- BuilderDebSplitted
- BuilderDebShared
runs-on: [self-hosted, style-checker]
if: ${{ success() || failure() }}
steps:
@ -3416,7 +3416,7 @@ jobs:
- UnitTestsMsan
- UnitTestsUBsan
- UnitTestsReleaseClang
- SplitBuildSmokeTest
- SharedBuildSmokeTest
- CompatibilityCheck
- IntegrationTestsFlakyCheck
- Jepsen

View File

@ -77,10 +77,9 @@ option(USE_STATIC_LIBRARIES "Disable to use shared libraries" ON)
# DEVELOPER ONLY.
# Faster linking if turned on.
option(SPLIT_SHARED_LIBRARIES "Keep all internal libraries as separate .so files" OFF)
option(CLICKHOUSE_SPLIT_BINARY "Make several binaries (clickhouse-server, clickhouse-client etc.) instead of one bundled" OFF)
if (USE_STATIC_LIBRARIES AND (SPLIT_SHARED_LIBRARIES OR CLICKHOUSE_SPLIT_BINARY))
message(FATAL_ERROR "SPLIT_SHARED_LIBRARIES=1 or CLICKHOUSE_SPLIT_BINARY=1 must not be used together with USE_STATIC_LIBRARIES=1")
if (USE_STATIC_LIBRARIES AND SPLIT_SHARED_LIBRARIES)
message(FATAL_ERROR "SPLIT_SHARED_LIBRARIES=1 must not be used together with USE_STATIC_LIBRARIES=1")
endif()
if (NOT USE_STATIC_LIBRARIES AND SPLIT_SHARED_LIBRARIES)
@ -502,7 +501,7 @@ endif ()
message (STATUS
"Building for: ${CMAKE_SYSTEM} ${CMAKE_SYSTEM_PROCESSOR} ${CMAKE_LIBRARY_ARCHITECTURE} ;
USE_STATIC_LIBRARIES=${USE_STATIC_LIBRARIES}
SPLIT_SHARED=${SPLIT_SHARED_LIBRARIES}")
SPLIT_SHARED_LIBRARIES=${SPLIT_SHARED_LIBRARIES}")
include (GNUInstallDirs)

View File

@ -15,6 +15,4 @@ ClickHouse® is an open-source column-oriented database management system that a
* [Contacts](https://clickhouse.com/company/contact) can help to get your questions answered if there are any.
## Upcoming events
* **v22.8 Release Webinar** Original creator, co-founder, and CTO of ClickHouse Alexey Milovidov will walk us through the highlights of the release, provide live demos, and share vision into what is coming in the roadmap.
* [**v22.8 Release Webinar**](https://clickhouse.com/company/events/v22-8-release-webinar) Original creator, co-founder, and CTO of ClickHouse Alexey Milovidov will walk us through the highlights of the release, provide live demos, and share vision into what is coming in the roadmap.

View File

@ -100,12 +100,12 @@ def run_docker_image_with_env(
subprocess.check_call(cmd, shell=True)
def is_release_build(build_type, package_type, sanitizer, split_binary):
def is_release_build(build_type, package_type, sanitizer, shared_libraries):
return (
build_type == ""
and package_type == "deb"
and sanitizer == ""
and not split_binary
and not shared_libraries
)
@ -116,7 +116,7 @@ def parse_env_variables(
package_type,
cache,
distcc_hosts,
split_binary,
shared_libraries,
clang_tidy,
version,
author,
@ -202,7 +202,7 @@ def parse_env_variables(
cmake_flags.append("-DCMAKE_INSTALL_PREFIX=/usr")
cmake_flags.append("-DCMAKE_INSTALL_SYSCONFDIR=/etc")
cmake_flags.append("-DCMAKE_INSTALL_LOCALSTATEDIR=/var")
if is_release_build(build_type, package_type, sanitizer, split_binary):
if is_release_build(build_type, package_type, sanitizer, shared_libraries):
cmake_flags.append("-DSPLIT_DEBUG_SYMBOLS=ON")
result.append("WITH_PERFORMANCE=1")
if is_cross_arm:
@ -215,11 +215,11 @@ def parse_env_variables(
cmake_flags.append(f"-DCMAKE_C_COMPILER={cc}")
cmake_flags.append(f"-DCMAKE_CXX_COMPILER={cxx}")
# Create combined output archive for split build and for performance tests.
# Create combined output archive for shared library build and for performance tests.
if package_type == "coverity":
result.append("COMBINED_OUTPUT=coverity")
result.append('COVERITY_TOKEN="$COVERITY_TOKEN"')
elif split_binary:
elif shared_libraries:
result.append("COMBINED_OUTPUT=shared_build")
if sanitizer:
@ -264,14 +264,13 @@ def parse_env_variables(
result.append("BINARY_OUTPUT=tests")
cmake_flags.append("-DENABLE_TESTS=1")
if split_binary:
if shared_libraries:
cmake_flags.append(
"-DUSE_STATIC_LIBRARIES=0 -DSPLIT_SHARED_LIBRARIES=1 "
"-DCLICKHOUSE_SPLIT_BINARY=1"
"-DUSE_STATIC_LIBRARIES=0 -DSPLIT_SHARED_LIBRARIES=1"
)
# We can't always build utils because it requires too much space, but
# we have to build them at least in some way in CI. The split build is
# probably the least heavy disk-wise.
# we have to build them at least in some way in CI. The shared library
# build is probably the least heavy disk-wise.
cmake_flags.append("-DENABLE_UTILS=1")
# utils are not included into clickhouse-bundle, so build everything
build_target = "all"
@ -352,7 +351,7 @@ if __name__ == "__main__":
default="",
)
parser.add_argument("--split-binary", action="store_true")
parser.add_argument("--shared-libraries", action="store_true")
parser.add_argument("--clang-tidy", action="store_true")
parser.add_argument("--cache", choices=("ccache", "distcc", ""), default="")
parser.add_argument(
@ -405,7 +404,7 @@ if __name__ == "__main__":
args.package_type,
args.cache,
args.distcc_hosts,
args.split_binary,
args.shared_libraries,
args.clang_tidy,
args.version,
args.author,

View File

@ -38,6 +38,7 @@ FORMAT_SCHEMA_PATH="$(clickhouse extract-from-config --config-file "$CLICKHOUSE_
# There could be many disks declared in config
readarray -t FILESYSTEM_CACHE_PATHS < <(clickhouse extract-from-config --config-file "$CLICKHOUSE_CONFIG" --key='storage_configuration.disks.*.data_cache_path' || true)
readarray -t DISKS_PATHS < <(clickhouse extract-from-config --config-file "$CLICKHOUSE_CONFIG" --key='storage_configuration.disks.*.path' || true)
CLICKHOUSE_USER="${CLICKHOUSE_USER:-default}"
CLICKHOUSE_PASSWORD="${CLICKHOUSE_PASSWORD:-}"
@ -50,7 +51,8 @@ for dir in "$DATA_DIR" \
"$TMP_DIR" \
"$USER_PATH" \
"$FORMAT_SCHEMA_PATH" \
"${FILESYSTEM_CACHE_PATHS[@]}"
"${FILESYSTEM_CACHE_PATHS[@]}" \
"${DISKS_PATHS[@]}"
do
# check if variable not empty
[ -z "$dir" ] && continue

View File

@ -171,11 +171,11 @@ concurrency-related errors. If it fails:
## Split Build Smoke Test
Checks that the server build in [split build](../development/build.md#split-build)
Checks that the server build in [split build](../development/developer-instruction.md#split-build)
configuration can start and run simple queries. If it fails:
* Fix other test errors first;
* Build the server in [split build](../development/build.md#split-build) configuration
* Build the server in [split build](../development/developer-instruction.md#split-build) configuration
locally and check whether it can start and run `select 1`.

View File

@ -267,19 +267,19 @@ The system will prepare ClickHouse binary builds for your pull request individua
Most probably some of the builds will fail at first times. This is due to the fact that we check builds both with gcc as well as with clang, with almost all of existing warnings (always with the `-Werror` flag) enabled for clang. On that same page, you can find all of the build logs so that you do not have to build ClickHouse in all of the possible ways.
## Browse ClickHouse Source Code {#browse-clickhouse-source-code}
You can use the **Woboq** online code browser available [here](https://clickhouse.com/codebrowser/ClickHouse/src/index.html). It provides code navigation, semantic highlighting, search and indexing. The code snapshot is updated daily.
Also, you can browse sources on [GitHub](https://github.com/ClickHouse/ClickHouse) as usual.
## Faster builds for development: Split build configuration {#split-build}
ClickHouse is normally statically linked into a single static `clickhouse` binary with minimal dependencies. This is convenient for distribution, but it means that for every change the entire binary needs to be re-linked, which is slow and inconvenient for development. As an alternative, you can instead build dynamically linked shared libraries and separate binaries `clickhouse-server`, `clickhouse-client` etc., allowing for faster incremental builds. To use it, add the following flags to your `cmake` invocation:
ClickHouse is normally statically linked into a single static `clickhouse` binary with minimal dependencies. This is convenient for distribution, but it means that for every change the entire binary needs to be re-linked, which is slow and inconvenient for development. As an alternative, you can instead build dynamically linked shared libraries, allowing for faster incremental builds. To use it, add the following flags to your `cmake` invocation:
```
-DUSE_STATIC_LIBRARIES=0 -DSPLIT_SHARED_LIBRARIES=1 -DCLICKHOUSE_SPLIT_BINARY=1
-DUSE_STATIC_LIBRARIES=0 -DSPLIT_SHARED_LIBRARIES=1
```
Note that the split build has several drawbacks:
* There is no single `clickhouse` binary, and you have to run `clickhouse-server`, `clickhouse-client`, etc.
* Risk of segfault if you run any of the programs while rebuilding the project.
* You cannot run the integration tests since they only work a single complete binary.
* You can't easily copy the binaries elsewhere. Instead of moving a single binary you'll need to copy all binaries and libraries.
If you are not interested in functionality provided by third-party libraries, you can further speed up the build using `cmake` options
```
-DENABLE_LIBRARIES=0 -DENABLE_EMBEDDED_COMPILER=0

View File

@ -389,12 +389,6 @@ SETTINGS mutations_sync = 1;
Let's run the same 3 queries.
[Enable](../../operations/settings/settings.md#allow-experimental-projection-optimization) projections for selects:
```sql
SET allow_experimental_projection_optimization = 1;
```
### Query 1. Average Price Per Year {#average-price-projections}
Query:

View File

@ -438,6 +438,18 @@ For more information, see the section “[Configuration files](../../operations/
<include_from>/etc/metrica.xml</include_from>
```
## interserver_listen_host {#interserver-listen-host}
Restriction on hosts that can exchange data between ClickHouse servers.
The default value equals to `listen_host` setting.
Examples:
``` xml
<interserver_listen_host>::ffff:a00:1</interserver_listen_host>
<interserver_listen_host>10.0.0.1</interserver_listen_host>
```
## interserver_http_port {#interserver-http-port}
Port for exchanging data between ClickHouse servers.
@ -970,7 +982,7 @@ Default value: 2.
**Example**
```xml
<background_merges_mutations_concurrency_ratio>3</background_pbackground_merges_mutations_concurrency_ratio>
<background_merges_mutations_concurrency_ratio>3</background_merges_mutations_concurrency_ratio>
```
## background_move_pool_size {#background_move_pool_size}

View File

@ -12,12 +12,13 @@ Reads file as a String. The file content is not parsed, so any information is re
**Syntax**
``` sql
file(path)
file(path[, default])
```
**Arguments**
- `path` — The relative path to the file from [user_files_path](../../operations/server-configuration-parameters/settings.md#server_configuration_parameters-user_files_path). Path to file support following wildcards: `*`, `?`, `{abc,def}` and `{N..M}` where `N`, `M` — numbers, `'abc', 'def'` — strings.
- `default` — The value that will be returned in the case when a file does not exist or cannot be accessed. Data types supported: [String](../../sql-reference/data-types/string.md) and [NULL](../../sql-reference/syntax.md#null-literal).
**Example**

View File

@ -170,7 +170,7 @@ sudo bash -c "$(wget -O - https://apt.llvm.org/llvm.sh)"
В случае использования на разработческой машине старого HDD или SSD, а также при желании использовать меньше места для артефактов сборки можно использовать следующую команду:
```bash
cmake -DUSE_DEBUG_HELPERS=1 -DUSE_STATIC_LIBRARIES=0 -DSPLIT_SHARED_LIBRARIES=1 -DCLICKHOUSE_SPLIT_BINARY=1 ..
cmake -DUSE_DEBUG_HELPERS=1 -DUSE_STATIC_LIBRARIES=0 -DSPLIT_SHARED_LIBRARIES=1 ..
```
При этом надо учесть, что получаемые в результате сборки исполнимые файлы будут динамически слинкованы с библиотеками, и поэтому фактически станут непереносимыми на другие компьютеры (либо для этого нужно будет предпринять значительно больше усилий по сравнению со статической сборкой). Плюсом же в данном случае является значительно меньшее время сборки (это проявляется не на первой сборке, а на последующих, после внесения изменений в исходный код - тратится меньшее время на линковку по сравнению со статической сборкой) и значительно меньшее использование места на жёстком диске (экономия более, чем в 3 раза по сравнению со статической сборкой). Для целей разработки, когда планируются только отладочные запуски на том же компьютере, где осуществлялась сборка, это может быть наиболее удобным вариантом.
@ -285,3 +285,9 @@ Pull request можно создать, даже если работа над з
Система подготовит сборки ClickHouse специально для вашего pull request. Для их получения, нажмите на ссылку «Details» у проверки «Clickhouse build check». Там вы сможете найти прямые ссылки на собранные .deb пакеты ClickHouse, которые, при желании, вы даже сможете установить на свои продакшен серверы (если не страшно).
Вероятнее всего, часть сборок не будет успешной с первого раза. Ведь мы проверяем сборку кода и gcc и clang, а при сборке с помощью clang включаются почти все существующие в природе warnings (всегда с флагом `-Werror`). На той же странице, вы сможете найти логи сборки - вам не обязательно самому собирать ClickHouse всеми возможными способами.
## Навигация по коду ClickHouse {#navigatsiia-po-kodu-clickhouse}
Для навигации по коду онлайн доступен **Woboq**, он расположен [здесь](https://clickhouse.com/codebrowser/ClickHouse/src/index.html). В нём реализовано удобное перемещение между исходными файлами, семантическая подсветка, подсказки, индексация и поиск. Слепок кода обновляется ежедневно.
Также вы можете просматривать исходники на [GitHub](https://github.com/ClickHouse/ClickHouse).

View File

@ -389,12 +389,6 @@ SETTINGS mutations_sync = 1;
Давайте выполним те же 3 запроса.
[Включите](../../operations/settings/settings.md#allow-experimental-projection-optimization) поддержку проекций:
```sql
SET allow_experimental_projection_optimization = 1;
```
### Запрос 1. Средняя цена за год {#average-price-projections}
Запрос:
@ -647,4 +641,3 @@ no projection: 100 rows in set. Elapsed: 0.069 sec. Processed 26.32 million rows
### Online Playground {#playground}
Этот набор данных доступен в [Online Playground](https://gh-api.clickhouse.tech/play?user=play#U0VMRUNUIHRvd24sIGRpc3RyaWN0LCBjb3VudCgpIEFTIGMsIHJvdW5kKGF2ZyhwcmljZSkpIEFTIHByaWNlLCBiYXIocHJpY2UsIDAsIDUwMDAwMDAsIDEwMCkgRlJPTSB1a19wcmljZV9wYWlkIFdIRVJFIGRhdGUgPj0gJzIwMjAtMDEtMDEnIEdST1VQIEJZIHRvd24sIGRpc3RyaWN0IEhBVklORyBjID49IDEwMCBPUkRFUiBCWSBwcmljZSBERVNDIExJTUlUIDEwMA==).

View File

@ -407,6 +407,18 @@ ClickHouse проверяет условия для `min_part_size` и `min_part
<include_from>/etc/metrica.xml</include_from>
```
## interserver_listen_host {#interserver-listen-host}
Ограничение по хостам, для обмена между серверами ClickHouse.
Значение по умолчанию совпадает со значением параметра listen_host
Примеры:
``` xml
<interserver_listen_host>::ffff:a00:1</interserver_listen_host>
<interserver_listen_host>10.0.0.1</interserver_listen_host>
```
## interserver_http_port {#interserver-http-port}
Порт для обмена между серверами ClickHouse.

View File

@ -12,12 +12,13 @@ sidebar_label: "Функции для работы с файлами"
**Синтаксис**
``` sql
file(path)
file(path[, default])
```
**Аргументы**
- `path` — относительный путь до файла от [user_files_path](../../operations/server-configuration-parameters/settings.md#server_configuration_parameters-user_files_path). Путь к файлу может включать следующие символы подстановки и шаблоны: `*`, `?`, `{abc,def}` и `{N..M}`, где `N`, `M` — числа, `'abc', 'def'` — строки.
- `default` — Значение возвращаемое в случае, если указанный файл не существует. Поддерживаемые типы данных: [String](../../sql-reference/data-types/string.md) и [NULL](../../sql-reference/syntax.md#null-literal).
**Примеры**

View File

@ -264,3 +264,9 @@ ClickHouse成员一旦在您的拉取请求上贴上«可以测试»标签
系统将分别为您的拉取请求准备ClickHouse二进制版本。若要检索这些构建信息请在检查列表中单击« ClickHouse构建检查»旁边的«详细信息»链接。在这里您会找到指向ClickHouse的.deb软件包的直接链接此外甚至可以将其部署在生产服务器上如果您不担心
某些构建项很可能会在首次构建时失败。这是因为我们同时检查了基于gcc和clang的构建几乎所有现有的被clang启用的警告总是带有`-Werror`标志。在同一页面上您可以找到所有构建的日志因此不必以所有可能的方式构建ClickHouse。
## 浏览ClickHouse源代码 {#browse-clickhouse-source-code}
您可以使用 **Woboq** 在线代码浏览器 [点击这里](https://clickhouse.com/codebrowser/ClickHouse/src/index.html). 它提供了代码导航和语义突出显示、搜索和索引。 代码快照每天更新。
此外,您还可以像往常一样浏览源代码 [GitHub](https://github.com/ClickHouse/ClickHouse)

View File

@ -18,11 +18,7 @@ option (ENABLE_CLICKHOUSE_SERVER "Server mode (main mode)" ${ENABLE_CLICKHOUSE_A
option (ENABLE_CLICKHOUSE_CLIENT "Client mode (interactive tui/shell that connects to the server)"
${ENABLE_CLICKHOUSE_ALL})
if (CLICKHOUSE_SPLIT_BINARY)
option (ENABLE_CLICKHOUSE_SELF_EXTRACTING "Self-extracting executable" OFF)
else ()
option (ENABLE_CLICKHOUSE_SELF_EXTRACTING "Self-extracting executable" ON)
endif ()
option (ENABLE_CLICKHOUSE_SELF_EXTRACTING "Self-extracting executable" ON)
# https://clickhouse.com/docs/en/operations/utilities/clickhouse-local/
option (ENABLE_CLICKHOUSE_LOCAL "Local files fast processing mode" ${ENABLE_CLICKHOUSE_ALL})
@ -80,12 +76,7 @@ if (NOT ENABLE_NURAFT)
set(ENABLE_CLICKHOUSE_KEEPER_CONVERTER OFF)
endif()
if (CLICKHOUSE_SPLIT_BINARY)
option(ENABLE_CLICKHOUSE_INSTALL "Install ClickHouse without .deb/.rpm/.tgz packages (having the binary only)" OFF)
else ()
option(ENABLE_CLICKHOUSE_INSTALL "Install ClickHouse without .deb/.rpm/.tgz packages (having the binary only)"
${ENABLE_CLICKHOUSE_ALL})
endif ()
option(ENABLE_CLICKHOUSE_INSTALL "Install ClickHouse without .deb/.rpm/.tgz packages (having the binary only)" ${ENABLE_CLICKHOUSE_ALL})
message(STATUS "ClickHouse modes:")
@ -211,10 +202,6 @@ macro(clickhouse_target_link_split_lib target name)
endif()
endmacro()
macro(clickhouse_program_link_split_binary name)
clickhouse_target_link_split_lib(clickhouse-${name} ${name})
endmacro()
macro(clickhouse_program_add_library name)
string(TOUPPER ${name} name_uc)
string(REPLACE "-" "_" name_uc ${name_uc})
@ -239,17 +226,8 @@ macro(clickhouse_program_add_library name)
endif()
endmacro()
macro(clickhouse_program_add_executable name)
if(CLICKHOUSE_SPLIT_BINARY)
clickhouse_add_executable(clickhouse-${name} clickhouse-${name}.cpp)
clickhouse_program_link_split_binary(${name})
install(TARGETS clickhouse-${name} ${CLICKHOUSE_ALL_TARGETS} RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR} COMPONENT clickhouse)
endif()
endmacro()
macro(clickhouse_program_add name)
clickhouse_program_add_library(${name})
clickhouse_program_add_executable(${name})
endmacro()
add_subdirectory (server)
@ -342,210 +320,173 @@ if (CLICKHOUSE_ONE_SHARED)
install (TARGETS clickhouse-lib LIBRARY DESTINATION ${CMAKE_INSTALL_LIBDIR} COMPONENT clickhouse)
endif()
if (CLICKHOUSE_SPLIT_BINARY)
set (CLICKHOUSE_ALL_TARGETS
clickhouse-server
clickhouse-client
clickhouse-local
clickhouse-benchmark
clickhouse-extract-from-config
clickhouse-compressor
clickhouse-format
clickhouse-obfuscator
clickhouse-git-import
clickhouse-copier
clickhouse-static-files-disk-uploader
clickhouse-disks)
clickhouse_add_executable (clickhouse main.cpp)
if (ENABLE_CLICKHOUSE_ODBC_BRIDGE)
list (APPEND CLICKHOUSE_ALL_TARGETS clickhouse-odbc-bridge)
endif ()
if (NOT USE_STATIC_LIBRARIES AND SPLIT_SHARED_LIBRARIES)
# Shared split (dev) build: In CI, the server is run with custom LD_LIBRARY_PATH. This makes the harmful env check re-execute the
# process in a clean environment but as in CI the containing directory is not included in DT_RUNPATH/DT_RPATH, the server won't come up.
target_compile_definitions(clickhouse PRIVATE DISABLE_HARMFUL_ENV_VAR_CHECK)
endif ()
if (ENABLE_CLICKHOUSE_LIBRARY_BRIDGE)
list (APPEND CLICKHOUSE_ALL_TARGETS clickhouse-library-bridge)
endif ()
# A library that prevent usage of several functions from libc.
if (ARCH_AMD64 AND OS_LINUX AND NOT OS_ANDROID)
set (HARMFUL_LIB harmful)
endif ()
if (ENABLE_CLICKHOUSE_KEEPER)
list (APPEND CLICKHOUSE_ALL_TARGETS clickhouse-keeper)
endif ()
target_link_libraries (clickhouse PRIVATE clickhouse_common_io string_utils ${HARMFUL_LIB})
target_include_directories (clickhouse PRIVATE ${CMAKE_CURRENT_BINARY_DIR})
if (ENABLE_CLICKHOUSE_KEEPER_CONVERTER)
list (APPEND CLICKHOUSE_ALL_TARGETS clickhouse-keeper-converter)
endif ()
if (ENABLE_CLICKHOUSE_SERVER)
clickhouse_target_link_split_lib(clickhouse server)
endif ()
if (ENABLE_CLICKHOUSE_CLIENT)
clickhouse_target_link_split_lib(clickhouse client)
endif ()
if (ENABLE_CLICKHOUSE_LOCAL)
clickhouse_target_link_split_lib(clickhouse local)
endif ()
if (ENABLE_CLICKHOUSE_BENCHMARK)
clickhouse_target_link_split_lib(clickhouse benchmark)
endif ()
if (ENABLE_CLICKHOUSE_COPIER)
clickhouse_target_link_split_lib(clickhouse copier)
endif ()
if (ENABLE_CLICKHOUSE_EXTRACT_FROM_CONFIG)
clickhouse_target_link_split_lib(clickhouse extract-from-config)
endif ()
if (ENABLE_CLICKHOUSE_COMPRESSOR)
clickhouse_target_link_split_lib(clickhouse compressor)
endif ()
if (ENABLE_CLICKHOUSE_FORMAT)
clickhouse_target_link_split_lib(clickhouse format)
endif ()
if (ENABLE_CLICKHOUSE_OBFUSCATOR)
clickhouse_target_link_split_lib(clickhouse obfuscator)
endif ()
if (ENABLE_CLICKHOUSE_GIT_IMPORT)
clickhouse_target_link_split_lib(clickhouse git-import)
endif ()
if (ENABLE_CLICKHOUSE_STATIC_FILES_DISK_UPLOADER)
clickhouse_target_link_split_lib(clickhouse static-files-disk-uploader)
endif ()
if (ENABLE_CLICKHOUSE_SU)
clickhouse_target_link_split_lib(clickhouse su)
endif ()
if (ENABLE_CLICKHOUSE_KEEPER)
clickhouse_target_link_split_lib(clickhouse keeper)
endif()
if (ENABLE_CLICKHOUSE_KEEPER_CONVERTER)
clickhouse_target_link_split_lib(clickhouse keeper-converter)
endif()
if (ENABLE_CLICKHOUSE_INSTALL)
clickhouse_target_link_split_lib(clickhouse install)
endif ()
if (ENABLE_CLICKHOUSE_DISKS)
clickhouse_target_link_split_lib(clickhouse disks)
endif ()
if (ENABLE_CLICKHOUSE_SU)
list (APPEND CLICKHOUSE_ALL_TARGETS clickhouse-su)
endif ()
set (CLICKHOUSE_BUNDLE)
if (ENABLE_CLICKHOUSE_SELF_EXTRACTING)
list(APPEND CLICKHOUSE_BUNDLE self-extracting)
endif ()
if (ENABLE_CLICKHOUSE_SERVER)
add_custom_target (clickhouse-server ALL COMMAND ${CMAKE_COMMAND} -E create_symlink clickhouse clickhouse-server DEPENDS clickhouse)
install (FILES "${CMAKE_CURRENT_BINARY_DIR}/clickhouse-server" DESTINATION ${CMAKE_INSTALL_BINDIR} COMPONENT clickhouse)
list(APPEND CLICKHOUSE_BUNDLE clickhouse-server)
endif ()
if (ENABLE_CLICKHOUSE_CLIENT)
add_custom_target (clickhouse-client ALL COMMAND ${CMAKE_COMMAND} -E create_symlink clickhouse clickhouse-client DEPENDS clickhouse)
install (FILES "${CMAKE_CURRENT_BINARY_DIR}/clickhouse-client" DESTINATION ${CMAKE_INSTALL_BINDIR} COMPONENT clickhouse)
list(APPEND CLICKHOUSE_BUNDLE clickhouse-client)
endif ()
if (ENABLE_CLICKHOUSE_LOCAL)
add_custom_target (clickhouse-local ALL COMMAND ${CMAKE_COMMAND} -E create_symlink clickhouse clickhouse-local DEPENDS clickhouse)
install (FILES "${CMAKE_CURRENT_BINARY_DIR}/clickhouse-local" DESTINATION ${CMAKE_INSTALL_BINDIR} COMPONENT clickhouse)
list(APPEND CLICKHOUSE_BUNDLE clickhouse-local)
endif ()
if (ENABLE_CLICKHOUSE_BENCHMARK)
add_custom_target (clickhouse-benchmark ALL COMMAND ${CMAKE_COMMAND} -E create_symlink clickhouse clickhouse-benchmark DEPENDS clickhouse)
install (FILES "${CMAKE_CURRENT_BINARY_DIR}/clickhouse-benchmark" DESTINATION ${CMAKE_INSTALL_BINDIR} COMPONENT clickhouse)
list(APPEND CLICKHOUSE_BUNDLE clickhouse-benchmark)
endif ()
if (ENABLE_CLICKHOUSE_COPIER)
add_custom_target (clickhouse-copier ALL COMMAND ${CMAKE_COMMAND} -E create_symlink clickhouse clickhouse-copier DEPENDS clickhouse)
install (FILES "${CMAKE_CURRENT_BINARY_DIR}/clickhouse-copier" DESTINATION ${CMAKE_INSTALL_BINDIR} COMPONENT clickhouse)
list(APPEND CLICKHOUSE_BUNDLE clickhouse-copier)
endif ()
if (ENABLE_CLICKHOUSE_EXTRACT_FROM_CONFIG)
add_custom_target (clickhouse-extract-from-config ALL COMMAND ${CMAKE_COMMAND} -E create_symlink clickhouse clickhouse-extract-from-config DEPENDS clickhouse)
install (FILES "${CMAKE_CURRENT_BINARY_DIR}/clickhouse-extract-from-config" DESTINATION ${CMAKE_INSTALL_BINDIR} COMPONENT clickhouse)
list(APPEND CLICKHOUSE_BUNDLE clickhouse-extract-from-config)
endif ()
if (ENABLE_CLICKHOUSE_COMPRESSOR)
add_custom_target (clickhouse-compressor ALL COMMAND ${CMAKE_COMMAND} -E create_symlink clickhouse clickhouse-compressor DEPENDS clickhouse)
install (FILES "${CMAKE_CURRENT_BINARY_DIR}/clickhouse-compressor" DESTINATION ${CMAKE_INSTALL_BINDIR} COMPONENT clickhouse)
list(APPEND CLICKHOUSE_BUNDLE clickhouse-compressor)
endif ()
if (ENABLE_CLICKHOUSE_FORMAT)
add_custom_target (clickhouse-format ALL COMMAND ${CMAKE_COMMAND} -E create_symlink clickhouse clickhouse-format DEPENDS clickhouse)
install (FILES "${CMAKE_CURRENT_BINARY_DIR}/clickhouse-format" DESTINATION ${CMAKE_INSTALL_BINDIR} COMPONENT clickhouse)
list(APPEND CLICKHOUSE_BUNDLE clickhouse-format)
endif ()
if (ENABLE_CLICKHOUSE_OBFUSCATOR)
add_custom_target (clickhouse-obfuscator ALL COMMAND ${CMAKE_COMMAND} -E create_symlink clickhouse clickhouse-obfuscator DEPENDS clickhouse)
install (FILES "${CMAKE_CURRENT_BINARY_DIR}/clickhouse-obfuscator" DESTINATION ${CMAKE_INSTALL_BINDIR} COMPONENT clickhouse)
list(APPEND CLICKHOUSE_BUNDLE clickhouse-obfuscator)
endif ()
if (ENABLE_CLICKHOUSE_GIT_IMPORT)
add_custom_target (clickhouse-git-import ALL COMMAND ${CMAKE_COMMAND} -E create_symlink clickhouse clickhouse-git-import DEPENDS clickhouse)
install (FILES "${CMAKE_CURRENT_BINARY_DIR}/clickhouse-git-import" DESTINATION ${CMAKE_INSTALL_BINDIR} COMPONENT clickhouse)
list(APPEND CLICKHOUSE_BUNDLE clickhouse-git-import)
endif ()
if (ENABLE_CLICKHOUSE_STATIC_FILES_DISK_UPLOADER)
add_custom_target (clickhouse-static-files-disk-uploader ALL COMMAND ${CMAKE_COMMAND} -E create_symlink clickhouse clickhouse-static-files-disk-uploader DEPENDS clickhouse)
install (FILES "${CMAKE_CURRENT_BINARY_DIR}/clickhouse-static-files-disk-uploader" DESTINATION ${CMAKE_INSTALL_BINDIR} COMPONENT clickhouse)
list(APPEND CLICKHOUSE_BUNDLE clickhouse-static-files-disk-uploader)
endif ()
if (ENABLE_CLICKHOUSE_SU)
add_custom_target (clickhouse-su ALL COMMAND ${CMAKE_COMMAND} -E create_symlink clickhouse clickhouse-su DEPENDS clickhouse)
install (FILES "${CMAKE_CURRENT_BINARY_DIR}/clickhouse-su" DESTINATION ${CMAKE_INSTALL_BINDIR} COMPONENT clickhouse)
list(APPEND CLICKHOUSE_BUNDLE clickhouse-su)
endif ()
set_target_properties(${CLICKHOUSE_ALL_TARGETS} PROPERTIES RUNTIME_OUTPUT_DIRECTORY ..)
add_custom_target (clickhouse-bundle ALL DEPENDS ${CLICKHOUSE_ALL_TARGETS})
add_custom_target (clickhouse ALL DEPENDS clickhouse-bundle)
install(PROGRAMS clickhouse-split-helper DESTINATION ${CMAKE_INSTALL_BINDIR} RENAME clickhouse COMPONENT clickhouse)
else ()
clickhouse_add_executable (clickhouse main.cpp)
# A library that prevent usage of several functions from libc.
if (ARCH_AMD64 AND OS_LINUX AND NOT OS_ANDROID)
set (HARMFUL_LIB harmful)
endif ()
target_link_libraries (clickhouse PRIVATE clickhouse_common_io string_utils ${HARMFUL_LIB})
target_include_directories (clickhouse PRIVATE ${CMAKE_CURRENT_BINARY_DIR})
if (ENABLE_CLICKHOUSE_SERVER)
clickhouse_target_link_split_lib(clickhouse server)
endif ()
if (ENABLE_CLICKHOUSE_CLIENT)
clickhouse_target_link_split_lib(clickhouse client)
endif ()
if (ENABLE_CLICKHOUSE_LOCAL)
clickhouse_target_link_split_lib(clickhouse local)
endif ()
if (ENABLE_CLICKHOUSE_BENCHMARK)
clickhouse_target_link_split_lib(clickhouse benchmark)
endif ()
if (ENABLE_CLICKHOUSE_COPIER)
clickhouse_target_link_split_lib(clickhouse copier)
endif ()
if (ENABLE_CLICKHOUSE_EXTRACT_FROM_CONFIG)
clickhouse_target_link_split_lib(clickhouse extract-from-config)
endif ()
if (ENABLE_CLICKHOUSE_COMPRESSOR)
clickhouse_target_link_split_lib(clickhouse compressor)
endif ()
if (ENABLE_CLICKHOUSE_FORMAT)
clickhouse_target_link_split_lib(clickhouse format)
endif ()
if (ENABLE_CLICKHOUSE_OBFUSCATOR)
clickhouse_target_link_split_lib(clickhouse obfuscator)
endif ()
if (ENABLE_CLICKHOUSE_GIT_IMPORT)
clickhouse_target_link_split_lib(clickhouse git-import)
endif ()
if (ENABLE_CLICKHOUSE_STATIC_FILES_DISK_UPLOADER)
clickhouse_target_link_split_lib(clickhouse static-files-disk-uploader)
endif ()
if (ENABLE_CLICKHOUSE_SU)
clickhouse_target_link_split_lib(clickhouse su)
endif ()
if (ENABLE_CLICKHOUSE_KEEPER)
clickhouse_target_link_split_lib(clickhouse keeper)
endif()
if (ENABLE_CLICKHOUSE_KEEPER_CONVERTER)
clickhouse_target_link_split_lib(clickhouse keeper-converter)
endif()
if (ENABLE_CLICKHOUSE_INSTALL)
clickhouse_target_link_split_lib(clickhouse install)
endif ()
if (ENABLE_CLICKHOUSE_DISKS)
clickhouse_target_link_split_lib(clickhouse disks)
endif ()
set (CLICKHOUSE_BUNDLE)
if (ENABLE_CLICKHOUSE_SELF_EXTRACTING)
list(APPEND CLICKHOUSE_BUNDLE self-extracting)
endif ()
if (ENABLE_CLICKHOUSE_SERVER)
add_custom_target (clickhouse-server ALL COMMAND ${CMAKE_COMMAND} -E create_symlink clickhouse clickhouse-server DEPENDS clickhouse)
install (FILES "${CMAKE_CURRENT_BINARY_DIR}/clickhouse-server" DESTINATION ${CMAKE_INSTALL_BINDIR} COMPONENT clickhouse)
list(APPEND CLICKHOUSE_BUNDLE clickhouse-server)
endif ()
if (ENABLE_CLICKHOUSE_CLIENT)
add_custom_target (clickhouse-client ALL COMMAND ${CMAKE_COMMAND} -E create_symlink clickhouse clickhouse-client DEPENDS clickhouse)
install (FILES "${CMAKE_CURRENT_BINARY_DIR}/clickhouse-client" DESTINATION ${CMAKE_INSTALL_BINDIR} COMPONENT clickhouse)
list(APPEND CLICKHOUSE_BUNDLE clickhouse-client)
endif ()
if (ENABLE_CLICKHOUSE_LOCAL)
add_custom_target (clickhouse-local ALL COMMAND ${CMAKE_COMMAND} -E create_symlink clickhouse clickhouse-local DEPENDS clickhouse)
install (FILES "${CMAKE_CURRENT_BINARY_DIR}/clickhouse-local" DESTINATION ${CMAKE_INSTALL_BINDIR} COMPONENT clickhouse)
list(APPEND CLICKHOUSE_BUNDLE clickhouse-local)
endif ()
if (ENABLE_CLICKHOUSE_BENCHMARK)
add_custom_target (clickhouse-benchmark ALL COMMAND ${CMAKE_COMMAND} -E create_symlink clickhouse clickhouse-benchmark DEPENDS clickhouse)
install (FILES "${CMAKE_CURRENT_BINARY_DIR}/clickhouse-benchmark" DESTINATION ${CMAKE_INSTALL_BINDIR} COMPONENT clickhouse)
list(APPEND CLICKHOUSE_BUNDLE clickhouse-benchmark)
endif ()
if (ENABLE_CLICKHOUSE_COPIER)
add_custom_target (clickhouse-copier ALL COMMAND ${CMAKE_COMMAND} -E create_symlink clickhouse clickhouse-copier DEPENDS clickhouse)
install (FILES "${CMAKE_CURRENT_BINARY_DIR}/clickhouse-copier" DESTINATION ${CMAKE_INSTALL_BINDIR} COMPONENT clickhouse)
list(APPEND CLICKHOUSE_BUNDLE clickhouse-copier)
endif ()
if (ENABLE_CLICKHOUSE_EXTRACT_FROM_CONFIG)
add_custom_target (clickhouse-extract-from-config ALL COMMAND ${CMAKE_COMMAND} -E create_symlink clickhouse clickhouse-extract-from-config DEPENDS clickhouse)
install (FILES "${CMAKE_CURRENT_BINARY_DIR}/clickhouse-extract-from-config" DESTINATION ${CMAKE_INSTALL_BINDIR} COMPONENT clickhouse)
list(APPEND CLICKHOUSE_BUNDLE clickhouse-extract-from-config)
endif ()
if (ENABLE_CLICKHOUSE_COMPRESSOR)
add_custom_target (clickhouse-compressor ALL COMMAND ${CMAKE_COMMAND} -E create_symlink clickhouse clickhouse-compressor DEPENDS clickhouse)
install (FILES "${CMAKE_CURRENT_BINARY_DIR}/clickhouse-compressor" DESTINATION ${CMAKE_INSTALL_BINDIR} COMPONENT clickhouse)
list(APPEND CLICKHOUSE_BUNDLE clickhouse-compressor)
endif ()
if (ENABLE_CLICKHOUSE_FORMAT)
add_custom_target (clickhouse-format ALL COMMAND ${CMAKE_COMMAND} -E create_symlink clickhouse clickhouse-format DEPENDS clickhouse)
install (FILES "${CMAKE_CURRENT_BINARY_DIR}/clickhouse-format" DESTINATION ${CMAKE_INSTALL_BINDIR} COMPONENT clickhouse)
list(APPEND CLICKHOUSE_BUNDLE clickhouse-format)
endif ()
if (ENABLE_CLICKHOUSE_OBFUSCATOR)
add_custom_target (clickhouse-obfuscator ALL COMMAND ${CMAKE_COMMAND} -E create_symlink clickhouse clickhouse-obfuscator DEPENDS clickhouse)
install (FILES "${CMAKE_CURRENT_BINARY_DIR}/clickhouse-obfuscator" DESTINATION ${CMAKE_INSTALL_BINDIR} COMPONENT clickhouse)
list(APPEND CLICKHOUSE_BUNDLE clickhouse-obfuscator)
endif ()
if (ENABLE_CLICKHOUSE_GIT_IMPORT)
add_custom_target (clickhouse-git-import ALL COMMAND ${CMAKE_COMMAND} -E create_symlink clickhouse clickhouse-git-import DEPENDS clickhouse)
install (FILES "${CMAKE_CURRENT_BINARY_DIR}/clickhouse-git-import" DESTINATION ${CMAKE_INSTALL_BINDIR} COMPONENT clickhouse)
list(APPEND CLICKHOUSE_BUNDLE clickhouse-git-import)
endif ()
if (ENABLE_CLICKHOUSE_STATIC_FILES_DISK_UPLOADER)
add_custom_target (clickhouse-static-files-disk-uploader ALL COMMAND ${CMAKE_COMMAND} -E create_symlink clickhouse clickhouse-static-files-disk-uploader DEPENDS clickhouse)
install (FILES "${CMAKE_CURRENT_BINARY_DIR}/clickhouse-static-files-disk-uploader" DESTINATION ${CMAKE_INSTALL_BINDIR} COMPONENT clickhouse)
list(APPEND CLICKHOUSE_BUNDLE clickhouse-static-files-disk-uploader)
endif ()
if (ENABLE_CLICKHOUSE_SU)
add_custom_target (clickhouse-su ALL COMMAND ${CMAKE_COMMAND} -E create_symlink clickhouse clickhouse-su DEPENDS clickhouse)
install (FILES "${CMAKE_CURRENT_BINARY_DIR}/clickhouse-su" DESTINATION ${CMAKE_INSTALL_BINDIR} COMPONENT clickhouse)
list(APPEND CLICKHOUSE_BUNDLE clickhouse-su)
endif ()
if (ENABLE_CLICKHOUSE_KEEPER)
if (NOT BUILD_STANDALONE_KEEPER AND CREATE_KEEPER_SYMLINK)
add_custom_target (clickhouse-keeper ALL COMMAND ${CMAKE_COMMAND} -E create_symlink clickhouse clickhouse-keeper DEPENDS clickhouse)
install (FILES "${CMAKE_CURRENT_BINARY_DIR}/clickhouse-keeper" DESTINATION ${CMAKE_INSTALL_BINDIR} COMPONENT clickhouse)
endif()
# otherwise we don't build keeper
if (BUILD_STANDALONE_KEEPER OR CREATE_KEEPER_SYMLINK)
list(APPEND CLICKHOUSE_BUNDLE clickhouse-keeper)
endif()
endif ()
if (ENABLE_CLICKHOUSE_KEEPER_CONVERTER)
add_custom_target (clickhouse-keeper-converter ALL COMMAND ${CMAKE_COMMAND} -E create_symlink clickhouse clickhouse-keeper-converter DEPENDS clickhouse)
install (FILES "${CMAKE_CURRENT_BINARY_DIR}/clickhouse-keeper-converter" DESTINATION ${CMAKE_INSTALL_BINDIR} COMPONENT clickhouse)
list(APPEND CLICKHOUSE_BUNDLE clickhouse-keeper-converter)
endif ()
if (ENABLE_CLICKHOUSE_DISKS)
add_custom_target (clickhouse-disks ALL COMMAND ${CMAKE_COMMAND} -E create_symlink clickhouse clickhouse-disks DEPENDS clickhouse)
install (FILES "${CMAKE_CURRENT_BINARY_DIR}/clickhouse-disks" DESTINATION ${CMAKE_INSTALL_BINDIR} COMPONENT clickhouse)
list(APPEND CLICKHOUSE_BUNDLE clickhouse-disks)
endif ()
add_custom_target (clickhouse-bundle ALL DEPENDS ${CLICKHOUSE_BUNDLE})
if (USE_GDB_ADD_INDEX)
add_custom_command(TARGET clickhouse POST_BUILD COMMAND ${GDB_ADD_INDEX_EXE} clickhouse COMMENT "Adding .gdb-index to clickhouse" VERBATIM)
if (ENABLE_CLICKHOUSE_KEEPER)
if (NOT BUILD_STANDALONE_KEEPER AND CREATE_KEEPER_SYMLINK)
add_custom_target (clickhouse-keeper ALL COMMAND ${CMAKE_COMMAND} -E create_symlink clickhouse clickhouse-keeper DEPENDS clickhouse)
install (FILES "${CMAKE_CURRENT_BINARY_DIR}/clickhouse-keeper" DESTINATION ${CMAKE_INSTALL_BINDIR} COMPONENT clickhouse)
endif()
if (USE_BINARY_HASH)
add_custom_command(TARGET clickhouse POST_BUILD COMMAND ./clickhouse hash-binary > hash && ${OBJCOPY_PATH} --add-section .clickhouse.hash=hash clickhouse COMMENT "Adding section '.clickhouse.hash' to clickhouse binary" VERBATIM)
# otherwise we don't build keeper
if (BUILD_STANDALONE_KEEPER OR CREATE_KEEPER_SYMLINK)
list(APPEND CLICKHOUSE_BUNDLE clickhouse-keeper)
endif()
endif ()
if (ENABLE_CLICKHOUSE_KEEPER_CONVERTER)
add_custom_target (clickhouse-keeper-converter ALL COMMAND ${CMAKE_COMMAND} -E create_symlink clickhouse clickhouse-keeper-converter DEPENDS clickhouse)
install (FILES "${CMAKE_CURRENT_BINARY_DIR}/clickhouse-keeper-converter" DESTINATION ${CMAKE_INSTALL_BINDIR} COMPONENT clickhouse)
list(APPEND CLICKHOUSE_BUNDLE clickhouse-keeper-converter)
endif ()
if (ENABLE_CLICKHOUSE_DISKS)
add_custom_target (clickhouse-disks ALL COMMAND ${CMAKE_COMMAND} -E create_symlink clickhouse clickhouse-disks DEPENDS clickhouse)
install (FILES "${CMAKE_CURRENT_BINARY_DIR}/clickhouse-disks" DESTINATION ${CMAKE_INSTALL_BINDIR} COMPONENT clickhouse)
list(APPEND CLICKHOUSE_BUNDLE clickhouse-disks)
endif ()
if (SPLIT_DEBUG_SYMBOLS)
clickhouse_split_debug_symbols(TARGET clickhouse DESTINATION_DIR ${CMAKE_CURRENT_BINARY_DIR}/${SPLITTED_DEBUG_SYMBOLS_DIR} BINARY_PATH clickhouse)
else()
clickhouse_make_empty_debug_info_for_nfpm(TARGET clickhouse DESTINATION_DIR ${CMAKE_CURRENT_BINARY_DIR}/${SPLITTED_DEBUG_SYMBOLS_DIR})
install (TARGETS clickhouse RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR} COMPONENT clickhouse)
endif()
add_custom_target (clickhouse-bundle ALL DEPENDS ${CLICKHOUSE_BUNDLE})
if (USE_GDB_ADD_INDEX)
add_custom_command(TARGET clickhouse POST_BUILD COMMAND ${GDB_ADD_INDEX_EXE} clickhouse COMMENT "Adding .gdb-index to clickhouse" VERBATIM)
endif()
if (USE_BINARY_HASH)
add_custom_command(TARGET clickhouse POST_BUILD COMMAND ./clickhouse hash-binary > hash && ${OBJCOPY_PATH} --add-section .clickhouse.hash=hash clickhouse COMMENT "Adding section '.clickhouse.hash' to clickhouse binary" VERBATIM)
endif()
if (SPLIT_DEBUG_SYMBOLS)
clickhouse_split_debug_symbols(TARGET clickhouse DESTINATION_DIR ${CMAKE_CURRENT_BINARY_DIR}/${SPLITTED_DEBUG_SYMBOLS_DIR} BINARY_PATH clickhouse)
else()
clickhouse_make_empty_debug_info_for_nfpm(TARGET clickhouse DESTINATION_DIR ${CMAKE_CURRENT_BINARY_DIR}/${SPLITTED_DEBUG_SYMBOLS_DIR})
install (TARGETS clickhouse RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR} COMPONENT clickhouse)
endif()
if (ENABLE_TESTS)

View File

@ -352,6 +352,9 @@ try
{
UseSSL use_ssl;
ThreadStatus thread_status;
StackTrace::setShowAddresses(config().getBool("show_addresses_in_stack_traces", true));
setupSignalHandler();
std::cout << std::fixed << std::setprecision(3);

View File

@ -345,6 +345,7 @@ struct Checker
;
#ifndef DISABLE_HARMFUL_ENV_VAR_CHECK
/// NOTE: We will migrate to full static linking or our own dynamic loader to make this code obsolete.
void checkHarmfulEnvironmentVariables(char ** argv)
{
@ -396,6 +397,7 @@ void checkHarmfulEnvironmentVariables(char ** argv)
_exit(error);
}
}
#endif
}
@ -422,7 +424,9 @@ int main(int argc_, char ** argv_)
/// will work only after additional call of this function.
updatePHDRCache();
#ifndef DISABLE_HARMFUL_ENV_VAR_CHECK
checkHarmfulEnvironmentVariables(argv_);
#endif
/// Reset new handler to default (that throws std::bad_alloc)
/// It is needed because LLVM library clobbers it.

View File

@ -46,6 +46,7 @@
#include <boost/algorithm/string.hpp>
#include <boost/container/flat_map.hpp>
#include <Common/TerminalSize.h>
#include <bit>
static const char * documentation = R"(
@ -186,7 +187,7 @@ static UInt64 transform(UInt64 x, UInt64 seed)
if (x == 2 || x == 3)
return x ^ (seed & 1);
size_t num_leading_zeros = __builtin_clzll(x);
size_t num_leading_zeros = std::countl_zero(x);
return feistelNetwork(x, 64 - num_leading_zeros - 1, seed);
}

View File

@ -367,7 +367,7 @@ Poco::Net::SocketAddress Server::socketBindListen(
return address;
}
std::vector<std::string> getListenHosts(const Poco::Util::AbstractConfiguration & config)
Strings getListenHosts(const Poco::Util::AbstractConfiguration & config)
{
auto listen_hosts = DB::getMultipleValuesFromConfig(config, "", "listen_host");
if (listen_hosts.empty())
@ -378,6 +378,16 @@ std::vector<std::string> getListenHosts(const Poco::Util::AbstractConfiguration
return listen_hosts;
}
Strings getInterserverListenHosts(const Poco::Util::AbstractConfiguration & config)
{
auto interserver_listen_hosts = DB::getMultipleValuesFromConfig(config, "", "interserver_listen_host");
if (!interserver_listen_hosts.empty())
return interserver_listen_hosts;
/// Use more general restriction in case of emptiness
return getListenHosts(config);
}
bool getListenTry(const Poco::Util::AbstractConfiguration & config)
{
bool listen_try = config.getBool("listen_try", false);
@ -627,6 +637,8 @@ int Server::main(const std::vector<std::string> & /*args*/)
MainThreadStatus::getInstance();
StackTrace::setShowAddresses(config().getBool("show_addresses_in_stack_traces", true));
registerFunctions();
registerAggregateFunctions();
registerTableFunctions();
@ -1232,6 +1244,7 @@ int Server::main(const std::vector<std::string> & /*args*/)
/* already_loaded = */ false); /// Reload it right now (initial loading)
const auto listen_hosts = getListenHosts(config());
const auto interserver_listen_hosts = getInterserverListenHosts(config());
const auto listen_try = getListenTry(config());
if (config().has("keeper_server"))
@ -1627,7 +1640,7 @@ int Server::main(const std::vector<std::string> & /*args*/)
{
std::lock_guard lock(servers_lock);
createServers(config(), listen_hosts, listen_try, server_pool, async_metrics, servers);
createServers(config(), listen_hosts, interserver_listen_hosts, listen_try, server_pool, async_metrics, servers);
if (servers.empty())
throw Exception(
"No servers started (add valid listen_host and 'tcp_port' or 'http_port' to configuration file.)",
@ -1809,7 +1822,8 @@ int Server::main(const std::vector<std::string> & /*args*/)
void Server::createServers(
Poco::Util::AbstractConfiguration & config,
const std::vector<std::string> & listen_hosts,
const Strings & listen_hosts,
const Strings & interserver_listen_hosts,
bool listen_try,
Poco::ThreadPool & server_pool,
AsynchronousMetrics & async_metrics,
@ -1927,51 +1941,6 @@ void Server::createServers(
#endif
});
/// Interserver IO HTTP
port_name = "interserver_http_port";
createServer(config, listen_host, port_name, listen_try, start_servers, servers, [&](UInt16 port) -> ProtocolServerAdapter
{
Poco::Net::ServerSocket socket;
auto address = socketBindListen(config, socket, listen_host, port);
socket.setReceiveTimeout(settings.http_receive_timeout);
socket.setSendTimeout(settings.http_send_timeout);
return ProtocolServerAdapter(
listen_host,
port_name,
"replica communication (interserver): http://" + address.toString(),
std::make_unique<HTTPServer>(
context(),
createHandlerFactory(*this, async_metrics, "InterserverIOHTTPHandler-factory"),
server_pool,
socket,
http_params));
});
port_name = "interserver_https_port";
createServer(config, listen_host, port_name, listen_try, start_servers, servers, [&](UInt16 port) -> ProtocolServerAdapter
{
#if USE_SSL
Poco::Net::SecureServerSocket socket;
auto address = socketBindListen(config, socket, listen_host, port, /* secure = */ true);
socket.setReceiveTimeout(settings.http_receive_timeout);
socket.setSendTimeout(settings.http_send_timeout);
return ProtocolServerAdapter(
listen_host,
port_name,
"secure replica communication (interserver): https://" + address.toString(),
std::make_unique<HTTPServer>(
context(),
createHandlerFactory(*this, async_metrics, "InterserverIOHTTPSHandler-factory"),
server_pool,
socket,
http_params));
#else
UNUSED(port);
throw Exception{"SSL support for TCP protocol is disabled because Poco library was built without NetSSL support.",
ErrorCodes::SUPPORT_IS_DISABLED};
#endif
});
port_name = "mysql_port";
createServer(config, listen_host, port_name, listen_try, start_servers, servers, [&](UInt16 port) -> ProtocolServerAdapter
{
@ -2030,6 +1999,55 @@ void Server::createServers(
});
}
/// Now iterate over interserver_listen_hosts
for (const auto & interserver_listen_host : interserver_listen_hosts)
{
/// Interserver IO HTTP
const char * port_name = "interserver_http_port";
createServer(config, interserver_listen_host, port_name, listen_try, start_servers, servers, [&](UInt16 port) -> ProtocolServerAdapter
{
Poco::Net::ServerSocket socket;
auto address = socketBindListen(config, socket, interserver_listen_host, port);
socket.setReceiveTimeout(settings.http_receive_timeout);
socket.setSendTimeout(settings.http_send_timeout);
return ProtocolServerAdapter(
interserver_listen_host,
port_name,
"replica communication (interserver): http://" + address.toString(),
std::make_unique<HTTPServer>(
context(),
createHandlerFactory(*this, async_metrics, "InterserverIOHTTPHandler-factory"),
server_pool,
socket,
http_params));
});
port_name = "interserver_https_port";
createServer(config, interserver_listen_host, port_name, listen_try, start_servers, servers, [&](UInt16 port) -> ProtocolServerAdapter
{
#if USE_SSL
Poco::Net::SecureServerSocket socket;
auto address = socketBindListen(config, socket, interserver_listen_host, port, /* secure = */ true);
socket.setReceiveTimeout(settings.http_receive_timeout);
socket.setSendTimeout(settings.http_send_timeout);
return ProtocolServerAdapter(
interserver_listen_host,
port_name,
"secure replica communication (interserver): https://" + address.toString(),
std::make_unique<HTTPServer>(
context(),
createHandlerFactory(*this, async_metrics, "InterserverIOHTTPSHandler-factory"),
server_pool,
socket,
http_params));
#else
UNUSED(port);
throw Exception{"SSL support for TCP protocol is disabled because Poco library was built without NetSSL support.",
ErrorCodes::SUPPORT_IS_DISABLED};
#endif
});
}
}
void Server::updateServers(
@ -2041,6 +2059,7 @@ void Server::updateServers(
Poco::Logger * log = &logger();
const auto listen_hosts = getListenHosts(config);
const auto interserver_listen_hosts = getInterserverListenHosts(config);
const auto listen_try = getListenTry(config);
/// Remove servers once all their connections are closed
@ -2073,7 +2092,7 @@ void Server::updateServers(
}
}
createServers(config, listen_hosts, listen_try, server_pool, async_metrics, servers, /* start_servers= */ true);
createServers(config, listen_hosts, interserver_listen_hosts, listen_try, server_pool, async_metrics, servers, /* start_servers= */ true);
std::erase_if(servers, std::bind_front(check_server, ""));
}

View File

@ -86,7 +86,8 @@ private:
void createServers(
Poco::Util::AbstractConfiguration & config,
const std::vector<std::string> & listen_hosts,
const Strings & listen_hosts,
const Strings & interserver_listen_hosts,
bool listen_try,
Poco::ThreadPool & server_pool,
AsynchronousMetrics & async_metrics,

View File

@ -188,6 +188,10 @@
<listen_host>127.0.0.1</listen_host>
-->
<!-- <interserver_listen_host>::</interserver_listen_host> -->
<!-- Listen host for communication between replicas. Used for data exchange -->
<!-- Default values - equal to listen_host -->
<!-- Don't exit if IPv6 or IPv4 networks are unavailable while trying to listen. -->
<!-- <listen_try>0</listen_try> -->
@ -1386,4 +1390,13 @@
<lru_cache_size>268435456</lru_cache_size>
<continue_if_corrupted>true</continue_if_corrupted>
</merge_tree_metadata_cache-->
<!-- This allows to disable exposing addresses in stack traces for security reasons.
Please be aware that it does not improve security much, but makes debugging much harder.
The addresses that are small offsets from zero will be displayed nevertheless to show nullptr dereferences.
Regardless of this configuration, the addresses are visible in the system.stack_trace and system.trace_log tables
if the user has access to these tables.
I don't recommend to change this setting.
-->
<show_addresses_in_stack_traces>false</show_addresses_in_stack_traces>
</clickhouse>

View File

@ -144,8 +144,8 @@ endif ()
list (APPEND clickhouse_common_io_sources ${CONFIG_BUILD})
list (APPEND clickhouse_common_io_headers ${CONFIG_VERSION} ${CONFIG_COMMON})
list (APPEND dbms_sources Functions/IFunction.cpp Functions/FunctionFactory.cpp Functions/FunctionHelpers.cpp Functions/extractTimeZoneFromFunctionArguments.cpp Functions/replicate.cpp Functions/FunctionsLogical.cpp)
list (APPEND dbms_headers Functions/IFunction.h Functions/FunctionFactory.h Functions/FunctionHelpers.h Functions/extractTimeZoneFromFunctionArguments.h Functions/replicate.h Functions/FunctionsLogical.h)
list (APPEND dbms_sources Functions/IFunction.cpp Functions/FunctionFactory.cpp Functions/FunctionHelpers.cpp Functions/extractTimeZoneFromFunctionArguments.cpp Functions/FunctionsLogical.cpp)
list (APPEND dbms_headers Functions/IFunction.h Functions/FunctionFactory.h Functions/FunctionHelpers.h Functions/extractTimeZoneFromFunctionArguments.h Functions/FunctionsLogical.h)
list (APPEND dbms_sources
AggregateFunctions/IAggregateFunction.cpp

View File

@ -583,9 +583,14 @@ try
if (has_vertical_output_suffix)
current_format = "Vertical";
/// It is not clear how to write progress intermixed with data with parallel formatting.
bool logs_into_stdout = server_logs_file == "-";
bool extras_into_stdout = need_render_progress || logs_into_stdout;
bool select_only_into_file = select_into_file && !select_into_file_and_stdout;
/// It is not clear how to write progress and logs
/// intermixed with data with parallel formatting.
/// It may increase code complexity significantly.
if (!need_render_progress || (select_into_file && !select_into_file_and_stdout))
if (!extras_into_stdout || select_only_into_file)
output_format = global_context->getOutputFormatParallelIfPossible(
current_format, out_file_buf ? *out_file_buf : *out_buf, block);
else

View File

@ -298,7 +298,7 @@ ColumnPtr ColumnDecimal<T>::filter(const IColumn::Filter & filt, ssize_t result_
{
while (mask)
{
size_t index = __builtin_ctzll(mask);
size_t index = std::countr_zero(mask);
res_data.push_back(data_pos[index]);
#ifdef __BMI__
mask = _blsr_u64(mask);

View File

@ -240,7 +240,7 @@ ColumnPtr ColumnFixedString::filter(const IColumn::Filter & filt, ssize_t result
size_t res_chars_size = res->chars.size();
while (mask)
{
size_t index = __builtin_ctzll(mask);
size_t index = std::countr_zero(mask);
res->chars.resize(res_chars_size + n);
memcpySmallAllowReadWriteOverflow15(&res->chars[res_chars_size], data_pos + index * n, n);
res_chars_size += n;

View File

@ -508,7 +508,7 @@ ColumnPtr ColumnVector<T>::filter(const IColumn::Filter & filt, ssize_t result_s
{
while (mask)
{
size_t index = __builtin_ctzll(mask);
size_t index = std::countr_zero(mask);
res_data.push_back(data_pos[index]);
#ifdef __BMI__
mask = _blsr_u64(mask);

View File

@ -2,13 +2,14 @@
#include <Columns/ColumnVector.h>
#include <Common/typeid_cast.h>
#include <Common/HashTable/HashSet.h>
#include <bit>
#include "ColumnsCommon.h"
namespace DB
{
#if defined(__SSE2__) && defined(__POPCNT__)
#if defined(__SSE2__)
/// Transform 64-byte mask to 64-bit mask.
static UInt64 toBits64(const Int8 * bytes64)
{
@ -41,11 +42,11 @@ size_t countBytesInFilter(const UInt8 * filt, size_t start, size_t end)
const Int8 * end_pos = pos + (end - start);
#if defined(__SSE2__) && defined(__POPCNT__)
#if defined(__SSE2__)
const Int8 * end_pos64 = pos + (end - start) / 64 * 64;
for (; pos < end_pos64; pos += 64)
count += __builtin_popcountll(toBits64(pos));
count += std::popcount(toBits64(pos));
/// TODO Add duff device for tail?
#endif
@ -74,11 +75,11 @@ size_t countBytesInFilterWithNull(const IColumn::Filter & filt, const UInt8 * nu
const Int8 * pos2 = reinterpret_cast<const Int8 *>(null_map) + start;
const Int8 * end_pos = pos + (end - start);
#if defined(__SSE2__) && defined(__POPCNT__)
#if defined(__SSE2__)
const Int8 * end_pos64 = pos + (end - start) / 64 * 64;
for (; pos < end_pos64; pos += 64, pos2 += 64)
count += __builtin_popcountll(toBits64(pos) & ~toBits64(pos2));
count += std::popcount(toBits64(pos) & ~toBits64(pos2));
/// TODO Add duff device for tail?
#endif
@ -259,7 +260,7 @@ namespace
{
while (mask)
{
size_t index = __builtin_ctzll(mask);
size_t index = std::countr_zero(mask);
copy_array(offsets_pos + index);
#ifdef __BMI__
mask = _blsr_u64(mask);

View File

@ -36,7 +36,7 @@ inline UInt64 bytes64MaskToBits64Mask(const UInt8 * bytes64)
_mm256_loadu_si256(reinterpret_cast<const __m256i *>(bytes64)), zero32))) & 0xffffffff)
| (static_cast<UInt64>(_mm256_movemask_epi8(_mm256_cmpeq_epi8(
_mm256_loadu_si256(reinterpret_cast<const __m256i *>(bytes64+32)), zero32))) << 32);
#elif defined(__SSE2__) && defined(__POPCNT__)
#elif defined(__SSE2__)
static const __m128i zero16 = _mm_setzero_si128();
UInt64 res =
(static_cast<UInt64>(_mm_movemask_epi8(_mm_cmpeq_epi8(

View File

@ -3,6 +3,7 @@
#include <Common/HashTable/HashMap.h>
#include <Common/HashTable/HashTable.h>
#include <bit>
#include <new>
#include <variant>
@ -21,17 +22,17 @@ struct StringKey24
inline StringRef ALWAYS_INLINE toStringRef(const StringKey8 & n)
{
assert(n != 0);
return {reinterpret_cast<const char *>(&n), 8ul - (__builtin_clzll(n) >> 3)};
return {reinterpret_cast<const char *>(&n), 8ul - (std::countl_zero(n) >> 3)};
}
inline StringRef ALWAYS_INLINE toStringRef(const StringKey16 & n)
{
assert(n.items[1] != 0);
return {reinterpret_cast<const char *>(&n), 16ul - (__builtin_clzll(n.items[1]) >> 3)};
return {reinterpret_cast<const char *>(&n), 16ul - (std::countl_zero(n.items[1]) >> 3)};
}
inline StringRef ALWAYS_INLINE toStringRef(const StringKey24 & n)
{
assert(n.c != 0);
return {reinterpret_cast<const char *>(&n), 24ul - (__builtin_clzll(n.c) >> 3)};
return {reinterpret_cast<const char *>(&n), 24ul - (std::countl_zero(n.c) >> 3)};
}
struct StringHashTableHash

View File

@ -11,6 +11,7 @@
#include <IO/WriteHelpers.h>
#include <Core/Defines.h>
#include <bit>
#include <cmath>
#include <cstring>
@ -205,7 +206,7 @@ struct TrailingZerosCounter<UInt32>
{
static int apply(UInt32 val)
{
return __builtin_ctz(val);
return std::countr_zero(val);
}
};
@ -214,7 +215,7 @@ struct TrailingZerosCounter<UInt64>
{
static int apply(UInt64 val)
{
return __builtin_ctzll(val);
return std::countr_zero(val);
}
};

View File

@ -5,6 +5,7 @@
#include <Common/formatIPv6.h>
#include <cstring>
#include <bit>
namespace DB
@ -89,7 +90,7 @@ bool matchIPv6Subnet(const uint8_t * addr, const uint8_t * cidr_addr, UInt8 pref
if (mask)
{
auto offset = __builtin_ctz(mask);
auto offset = std::countr_zero(mask);
if (prefix / 8 != offset)
return prefix / 8 < offset;

View File

@ -25,6 +25,10 @@
M(WriteBufferFromFileDescriptorWrite, "Number of writes (write/pwrite) to a file descriptor. Does not include sockets.") \
M(WriteBufferFromFileDescriptorWriteFailed, "Number of times the write (write/pwrite) to a file descriptor have failed.") \
M(WriteBufferFromFileDescriptorWriteBytes, "Number of bytes written to file descriptors. If the file is compressed, this will show compressed data size.") \
M(FileSync, "Number of times the F_FULLFSYNC/fsync/fdatasync function was called for files.") \
M(DirectorySync, "Number of times the F_FULLFSYNC/fsync/fdatasync function was called for directories.") \
M(FileSyncElapsedMicroseconds, "Total time spent waiting for F_FULLFSYNC/fsync/fdatasync syscall for files.") \
M(DirectorySyncElapsedMicroseconds, "Total time spent waiting for F_FULLFSYNC/fsync/fdatasync syscall for directories.") \
M(ReadCompressedBytes, "Number of bytes (the number of bytes before decompression) read from compressed sources (files, network).") \
M(CompressedReadBufferBlocks, "Number of compressed blocks (the blocks of data that are compressed independent of each other) read from compressed sources (files, network).") \
M(CompressedReadBufferBytes, "Number of uncompressed bytes (the number of bytes after decompression) read from compressed sources (files, network).") \

View File

@ -78,7 +78,7 @@ private:
constexpr uint64_t nextAlphaSize(uint64_t x)
{
constexpr uint64_t alpha_map_elements_per_counter = 6;
return 1ULL << (sizeof(uint64_t) * 8 - __builtin_clzll(x * alpha_map_elements_per_counter));
return 1ULL << (sizeof(uint64_t) * 8 - std::countl_zero(x * alpha_map_elements_per_counter));
}
public:

View File

@ -1,6 +1,5 @@
#include <Common/StackTrace.h>
#include <Core/Defines.h>
#include <Common/Dwarf.h>
#include <Common/Elf.h>
#include <Common/SymbolIndex.h>
@ -8,6 +7,7 @@
#include <base/CachedFn.h>
#include <base/demangle.h>
#include <atomic>
#include <cstring>
#include <filesystem>
#include <sstream>
@ -19,6 +19,32 @@
# include <libunwind.h>
#endif
namespace
{
/// Currently this variable is set up once on server startup.
/// But we use atomic just in case, so it is possible to be modified at runtime.
std::atomic<bool> show_addresses = true;
bool shouldShowAddress(const void * addr)
{
/// If the address is less than 4096, most likely it is a nullptr dereference with offset,
/// and showing this offset is secure nevertheless.
/// NOTE: 4096 is the page size on x86 and it can be different on other systems,
/// but for the purpose of this branch, it does not matter.
if (reinterpret_cast<uintptr_t>(addr) < 4096)
return true;
return show_addresses.load(std::memory_order_relaxed);
}
}
void StackTrace::setShowAddresses(bool show)
{
show_addresses.store(show, std::memory_order_relaxed);
}
std::string signalToErrorMessage(int sig, const siginfo_t & info, [[maybe_unused]] const ucontext_t & context)
{
std::stringstream error; // STYLE_CHECK_ALLOW_STD_STRING_STREAM
@ -30,7 +56,7 @@ std::string signalToErrorMessage(int sig, const siginfo_t & info, [[maybe_unused
/// Print info about address and reason.
if (nullptr == info.si_addr)
error << "Address: NULL pointer.";
else
else if (shouldShowAddress(info.si_addr))
error << "Address: " << info.si_addr;
#if defined(__x86_64__) && !defined(OS_FREEBSD) && !defined(OS_DARWIN) && !defined(__arm__) && !defined(__powerpc__)
@ -372,7 +398,9 @@ static void toStringEveryLineImpl(
else
out << "?";
out << " @ " << physical_addr;
if (shouldShowAddress(physical_addr))
out << " @ " << physical_addr;
out << " in " << (object ? object->name : "?");
for (size_t j = 0; j < inline_frames.size(); ++j)
@ -393,10 +421,13 @@ static void toStringEveryLineImpl(
for (size_t i = offset; i < size; ++i)
{
const void * addr = frame_pointers[i];
out << i << ". " << addr;
if (shouldShowAddress(addr))
{
out << i << ". " << addr;
callback(out.str());
out.str({});
callback(out.str());
out.str({});
}
}
#endif
}

View File

@ -67,6 +67,11 @@ public:
void toStringEveryLine(std::function<void(const std::string &)> callback) const;
/// Displaying the addresses can be disabled for security reasons.
/// If you turn off addresses, it will be more secure, but we will be unable to help you with debugging.
/// Please note: addresses are also available in the system.stack_trace and system.trace_log tables.
static void setShowAddresses(bool show);
protected:
void tryCapture();

View File

@ -2,6 +2,7 @@
#include <Common/StringUtils/StringUtils.h>
#include <widechar_width.h>
#include <bit>
namespace DB
@ -124,7 +125,7 @@ size_t computeWidthImpl(const UInt8 * data, size_t size, size_t prefix, size_t l
if (non_regular_width_mask)
{
auto num_regular_chars = __builtin_ctz(non_regular_width_mask);
auto num_regular_chars = std::countr_zero(non_regular_width_mask);
width += num_regular_chars;
i += num_regular_chars;
break;

View File

@ -83,7 +83,7 @@ inline size_t countCodePoints(const UInt8 * data, size_t size)
const auto threshold = vdupq_n_s8(0xBF);
for (; data < src_end_sse; data += bytes_sse)
res += __builtin_popcountll(get_nibble_mask(vcgtq_s8(vld1q_s8(reinterpret_cast<const int8_t *>(data)), threshold)));
res += std::popcount(get_nibble_mask(vcgtq_s8(vld1q_s8(reinterpret_cast<const int8_t *>(data)), threshold)));
res >>= 2;
#endif

View File

@ -18,7 +18,6 @@
#cmakedefine01 USE_DATASKETCHES
#cmakedefine01 USE_YAML_CPP
#cmakedefine01 CLICKHOUSE_SPLIT_BINARY
#cmakedefine01 USE_BZIP2
#cmakedefine01 USE_MINIZIP
#cmakedefine01 USE_SNAPPY

View File

@ -1,5 +1,6 @@
#include <iostream>
#include <string>
#include <bit>
#include <fmt/format.h>
@ -561,7 +562,7 @@ int main(int argc, char ** argv)
/// Fill source data
for (size_t i = 0; i < size; ++i)
{
keys[i] = __builtin_ctz(i + 1); /// Make keys to have just slightly more realistic distribution.
keys[i] = std::countr_zero(i + 1); /// Make keys to have just slightly more realistic distribution.
values[i] = 1234.5; /// The distribution of values does not affect execution speed.
}

View File

@ -1,6 +1,7 @@
#pragma once
#include <algorithm>
#include <bit>
#include <cstdint>
#include <Core/Defines.h>
@ -50,7 +51,7 @@ inline int memcmpSmallAllowOverflow15(const Char * a, size_t a_size, const Char
if (mask)
{
offset += __builtin_ctz(mask);
offset += std::countr_zero(mask);
if (offset >= min_size)
break;
@ -82,7 +83,7 @@ inline int memcmpSmallLikeZeroPaddedAllowOverflow15(const Char * a, size_t a_siz
if (mask)
{
offset += __builtin_ctz(mask);
offset += std::countr_zero(mask);
if (offset >= min_size)
break;
@ -123,7 +124,7 @@ inline int memcmpSmallLikeZeroPaddedAllowOverflow15(const Char * a, size_t a_siz
if (mask)
{
offset += __builtin_ctz(mask);
offset += std::countr_zero(mask);
if (offset >= max_size)
return 0;
@ -150,7 +151,7 @@ inline int memcmpSmallAllowOverflow15(const Char * a, const Char * b, size_t siz
if (mask)
{
offset += __builtin_ctz(mask);
offset += std::countr_zero(mask);
if (offset >= size)
return 0;
@ -180,7 +181,7 @@ inline bool memequalSmallAllowOverflow15(const Char * a, size_t a_size, const Ch
if (mask)
{
offset += __builtin_ctz(mask);
offset += std::countr_zero(mask);
return offset >= a_size;
}
}
@ -203,7 +204,7 @@ inline int memcmpSmallMultipleOf16(const Char * a, const Char * b, size_t size)
if (mask)
{
offset += __builtin_ctz(mask);
offset += std::countr_zero(mask);
return detail::cmp(a[offset], b[offset]);
}
}
@ -222,7 +223,7 @@ inline int memcmp16(const Char * a, const Char * b)
if (mask)
{
auto offset = __builtin_ctz(mask);
auto offset = std::countr_zero(mask);
return detail::cmp(a[offset], b[offset]);
}
@ -252,7 +253,7 @@ inline bool memoryIsZeroSmallAllowOverflow15(const void * data, size_t size)
if (mask)
{
offset += __builtin_ctz(mask);
offset += std::countr_zero(mask);
return offset >= size;
}
}
@ -285,7 +286,7 @@ inline int memcmpSmallAllowOverflow15(const Char * a, size_t a_size, const Char
if (mask)
{
offset += __builtin_ctz(mask);
offset += std::countr_zero(mask);
if (offset >= min_size)
break;
@ -317,7 +318,7 @@ inline int memcmpSmallLikeZeroPaddedAllowOverflow15(const Char * a, size_t a_siz
if (mask)
{
offset += __builtin_ctz(mask);
offset += std::countr_zero(mask);
if (offset >= min_size)
break;
@ -359,7 +360,7 @@ inline int memcmpSmallLikeZeroPaddedAllowOverflow15(const Char * a, size_t a_siz
if (mask)
{
offset += __builtin_ctz(mask);
offset += std::countr_zero(mask);
if (offset >= max_size)
return 0;
@ -386,7 +387,7 @@ inline int memcmpSmallAllowOverflow15(const Char * a, const Char * b, size_t siz
if (mask)
{
offset += __builtin_ctz(mask);
offset += std::countr_zero(mask);
if (offset >= size)
return 0;
@ -416,7 +417,7 @@ inline bool memequalSmallAllowOverflow15(const Char * a, size_t a_size, const Ch
if (mask)
{
offset += __builtin_ctz(mask);
offset += std::countr_zero(mask);
return offset >= a_size;
}
}
@ -439,7 +440,7 @@ inline int memcmpSmallMultipleOf16(const Char * a, const Char * b, size_t size)
if (mask)
{
offset += __builtin_ctz(mask);
offset += std::countr_zero(mask);
return detail::cmp(a[offset], b[offset]);
}
}
@ -459,7 +460,7 @@ inline int memcmp16(const Char * a, const Char * b)
if (mask)
{
auto offset = __builtin_ctz(mask);
auto offset = std::countr_zero(mask);
return detail::cmp(a[offset], b[offset]);
}
@ -490,7 +491,7 @@ inline bool memoryIsZeroSmallAllowOverflow15(const void * data, size_t size)
if (mask)
{
offset += __builtin_ctz(mask);
offset += std::countr_zero(mask);
return offset >= size;
}
}
@ -523,7 +524,7 @@ inline int memcmpSmallAllowOverflow15(const Char * a, size_t a_size, const Char
if (mask)
{
offset += __builtin_ctzll(mask) >> 2;
offset += std::countr_zero(mask) >> 2;
if (offset >= min_size)
break;
@ -548,7 +549,7 @@ inline int memcmpSmallLikeZeroPaddedAllowOverflow15(const Char * a, size_t a_siz
if (mask)
{
offset += __builtin_ctzll(mask) >> 2;
offset += std::countr_zero(mask) >> 2;
if (offset >= min_size)
break;
@ -589,7 +590,7 @@ inline int memcmpSmallLikeZeroPaddedAllowOverflow15(const Char * a, size_t a_siz
if (mask)
{
offset += __builtin_ctzll(mask) >> 2;
offset += std::countr_zero(mask) >> 2;
if (offset >= max_size)
return 0;
@ -611,7 +612,7 @@ inline int memcmpSmallAllowOverflow15(const Char * a, const Char * b, size_t siz
if (mask)
{
offset += __builtin_ctzll(mask) >> 2;
offset += std::countr_zero(mask) >> 2;
if (offset >= size)
return 0;
@ -637,7 +638,7 @@ inline bool memequalSmallAllowOverflow15(const Char * a, size_t a_size, const Ch
if (mask)
{
offset += __builtin_ctzll(mask) >> 2;
offset += std::countr_zero(mask) >> 2;
return offset >= a_size;
}
}
@ -656,7 +657,7 @@ inline int memcmpSmallMultipleOf16(const Char * a, const Char * b, size_t size)
if (mask)
{
offset += __builtin_ctzll(mask) >> 2;
offset += std::countr_zero(mask) >> 2;
return detail::cmp(a[offset], b[offset]);
}
}
@ -672,7 +673,7 @@ inline int memcmp16(const Char * a, const Char * b)
mask = ~mask;
if (mask)
{
auto offset = __builtin_ctzll(mask) >> 2;
auto offset = std::countr_zero(mask) >> 2;
return detail::cmp(a[offset], b[offset]);
}
return 0;
@ -694,7 +695,7 @@ inline bool memoryIsZeroSmallAllowOverflow15(const void * data, size_t size)
if (mask)
{
offset += __builtin_ctzll(mask) >> 2;
offset += std::countr_zero(mask) >> 2;
return offset >= size;
}
}

View File

@ -0,0 +1,12 @@
#include <Common/register_objects.h>
namespace DB
{
FunctionRegisterMap & FunctionRegisterMap::instance()
{
static FunctionRegisterMap map;
return map;
}
}

View File

@ -0,0 +1,33 @@
#pragma once
#include <string_view>
#include <unordered_map>
namespace DB
{
class FunctionFactory;
using FunctionRegisterFunctionPtr = void (*)(::DB::FunctionFactory &);
struct FunctionRegisterMap : public std::unordered_map<std::string_view, FunctionRegisterFunctionPtr>
{
static FunctionRegisterMap & instance();
};
struct FunctionRegister
{
FunctionRegister(std::string_view name, FunctionRegisterFunctionPtr func_ptr)
{
FunctionRegisterMap::instance().emplace(std::move(name), func_ptr);
}
};
}
#define REGISTER_FUNCTION_IMPL(fn, func_name, register_name) \
void func_name(::DB::FunctionFactory & factory); \
static ::DB::FunctionRegister register_name(#fn, func_name); \
void func_name(::DB::FunctionFactory & factory)
#define REGISTER_FUNCTION(fn) REGISTER_FUNCTION_IMPL(fn, registerFunction##fn, REGISTER_FUNCTION_##fn)

View File

@ -1,5 +1,6 @@
#include "CompressedReadBufferBase.h"
#include <bit>
#include <cstring>
#include <cassert>
#include <city.h>
@ -93,8 +94,8 @@ static void validateChecksum(char * data, size_t size, const Checksum expected_c
}
/// Check if the difference caused by single bit flip in stored checksum.
size_t difference = __builtin_popcountll(expected_checksum.first ^ calculated_checksum.first)
+ __builtin_popcountll(expected_checksum.second ^ calculated_checksum.second);
size_t difference = std::popcount(expected_checksum.first ^ calculated_checksum.first)
+ std::popcount(expected_checksum.second ^ calculated_checksum.second);
if (difference == 1)
{

View File

@ -8,6 +8,7 @@
#include <Parsers/ASTFunction.h>
#include <IO/WriteHelpers.h>
#include <Core/Types.h>
#include <bit>
namespace DB
@ -413,7 +414,7 @@ UInt32 getValuableBitsNumber(UInt64 min, UInt64 max)
{
UInt64 diff_bits = min ^ max;
if (diff_bits)
return 64 - __builtin_clzll(diff_bits);
return 64 - std::countl_zero(diff_bits);
return 0;
}

View File

@ -34,6 +34,8 @@ namespace ProfileEvents
extern const Event AIOWriteBytes;
extern const Event AIORead;
extern const Event AIOReadBytes;
extern const Event FileSync;
extern const Event FileSyncElapsedMicroseconds;
}
namespace DB
@ -544,6 +546,9 @@ public:
file_path,
std::to_string(bytes_written));
ProfileEvents::increment(ProfileEvents::FileSync);
Stopwatch watch;
#if defined(OS_DARWIN)
if (::fsync(file.fd) < 0)
throwFromErrnoWithPath("Cannot fsync " + file_path, file_path, ErrorCodes::CANNOT_FSYNC);
@ -551,6 +556,7 @@ public:
if (::fdatasync(file.fd) < 0)
throwFromErrnoWithPath("Cannot fdatasync " + file_path, file_path, ErrorCodes::CANNOT_FSYNC);
#endif
ProfileEvents::increment(ProfileEvents::FileSyncElapsedMicroseconds, watch.elapsedMicroseconds());
current_block_index += buffer_size_in_blocks;

View File

@ -1,6 +1,8 @@
#include <Disks/LocalDirectorySyncGuard.h>
#include <Common/ProfileEvents.h>
#include <Common/Exception.h>
#include <Disks/IDisk.h>
#include <Common/Stopwatch.h>
#include <fcntl.h> // O_RDWR
/// OSX does not have O_DIRECTORY
@ -8,6 +10,12 @@
#define O_DIRECTORY O_RDWR
#endif
namespace ProfileEvents
{
extern const Event DirectorySync;
extern const Event DirectorySyncElapsedMicroseconds;
}
namespace DB
{
@ -29,8 +37,12 @@ LocalDirectorySyncGuard::LocalDirectorySyncGuard(const String & full_path)
LocalDirectorySyncGuard::~LocalDirectorySyncGuard()
{
ProfileEvents::increment(ProfileEvents::DirectorySync);
try
{
Stopwatch watch;
#if defined(OS_DARWIN)
if (fcntl(fd, F_FULLFSYNC, 0))
throwFromErrno("Cannot fcntl(F_FULLFSYNC)", ErrorCodes::CANNOT_FSYNC);
@ -40,6 +52,8 @@ LocalDirectorySyncGuard::~LocalDirectorySyncGuard()
#endif
if (-1 == ::close(fd))
throw Exception("Cannot close file", ErrorCodes::CANNOT_CLOSE_FILE);
ProfileEvents::increment(ProfileEvents::DirectorySyncElapsedMicroseconds, watch.elapsedMicroseconds());
}
catch (...)
{

View File

@ -529,7 +529,12 @@ namespace JSONUtils
writeObjectStart(out, 2);
writeTitle<true>("name", out, 3);
writeDoubleQuoted(fields[i].name, out);
/// The field names are pre-escaped to be put into JSON string literal.
writeChar('"', out);
writeString(fields[i].name, out);
writeChar('"', out);
writeFieldDelimiter(out);
writeTitle<true>("type", out, 3);
writeJSONString(fields[i].type->getName(), out, settings);

View File

@ -6,13 +6,14 @@ add_subdirectory(divide)
include("${ClickHouse_SOURCE_DIR}/cmake/dbms_glob_sources.cmake")
add_headers_and_sources(clickhouse_functions .)
list(REMOVE_ITEM clickhouse_functions_sources IFunction.cpp FunctionFactory.cpp FunctionHelpers.cpp)
list(REMOVE_ITEM clickhouse_functions_headers IFunction.h FunctionFactory.h FunctionHelpers.h)
list(REMOVE_ITEM clickhouse_functions_sources IFunction.cpp FunctionFactory.cpp FunctionHelpers.cpp extractTimeZoneFromFunctionArguments.cpp FunctionsLogical.cpp)
list(REMOVE_ITEM clickhouse_functions_headers IFunction.h FunctionFactory.h FunctionHelpers.h extractTimeZoneFromFunctionArguments.h FunctionsLogical.h)
add_library(clickhouse_functions ${clickhouse_functions_sources})
add_library(clickhouse_functions_obj OBJECT ${clickhouse_functions_sources})
target_link_libraries(clickhouse_functions
PUBLIC
list (APPEND OBJECT_LIBS $<TARGET_OBJECTS:clickhouse_functions_obj>)
list (APPEND PUBLIC_LIBS
ch_contrib::wyhash
ch_contrib::cityhash
ch_contrib::farmhash
@ -24,27 +25,28 @@ target_link_libraries(clickhouse_functions
ch_contrib::metrohash
ch_contrib::murmurhash
ch_contrib::hashidsxx
)
PRIVATE
list (APPEND PRIVATE_LIBS
ch_contrib::zlib
boost::filesystem
divide_impl
)
if (TARGET OpenSSL::Crypto)
target_link_libraries(clickhouse_functions PUBLIC OpenSSL::Crypto)
list (APPEND PUBLIC_LIBS OpenSSL::Crypto)
endif()
if (OMIT_HEAVY_DEBUG_SYMBOLS)
target_compile_options(clickhouse_functions PRIVATE "-g0")
target_compile_options(clickhouse_functions_obj PRIVATE "-g0")
endif()
if (TARGET ch_contrib::icu)
target_link_libraries (clickhouse_functions PRIVATE ch_contrib::icu)
list (APPEND PRIVATE_LIBS ch_contrib::icu)
endif ()
if (TARGET ch_contrib::fastops)
target_link_libraries (clickhouse_functions PRIVATE ch_contrib::fastops)
list (APPEND PRIVATE_LIBS ch_contrib::fastops)
endif ()
if (ENABLE_EXAMPLES)
@ -52,45 +54,46 @@ if (ENABLE_EXAMPLES)
endif ()
if (TARGET ch_contrib::llvm)
target_link_libraries(clickhouse_functions PRIVATE ch_contrib::llvm)
list (APPEND PRIVATE_LIBS ch_contrib::llvm)
endif ()
if (TARGET ch_contrib::base64)
target_link_libraries(clickhouse_functions PRIVATE ch_contrib::base64)
list (APPEND PRIVATE_LIBS ch_contrib::base64)
endif()
target_link_libraries(clickhouse_functions PRIVATE ch_contrib::lz4)
list (APPEND PRIVATE_LIBS ch_contrib::lz4)
if (ENABLE_NLP)
target_link_libraries(clickhouse_functions PRIVATE ch_contrib::cld2)
list (APPEND PRIVATE_LIBS ch_contrib::cld2)
endif()
if (TARGET ch_contrib::h3)
target_link_libraries (clickhouse_functions PRIVATE ch_contrib::h3)
list (APPEND PRIVATE_LIBS ch_contrib::h3)
endif()
if (TARGET ch_contrib::vectorscan)
target_link_libraries(clickhouse_functions PRIVATE ch_contrib::vectorscan)
list (APPEND PRIVATE_LIBS ch_contrib::vectorscan)
endif()
if (TARGET ch_contrib::simdjson)
target_link_libraries(clickhouse_functions PRIVATE ch_contrib::simdjson)
list (APPEND PRIVATE_LIBS ch_contrib::simdjson)
endif()
if (TARGET ch_contrib::rapidjson)
target_link_libraries(clickhouse_functions PRIVATE ch_contrib::rapidjson)
list (APPEND PRIVATE_LIBS ch_contrib::rapidjson)
endif()
add_subdirectory(GatherUtils)
target_link_libraries(clickhouse_functions PRIVATE clickhouse_functions_gatherutils)
list (APPEND PRIVATE_LIBS clickhouse_functions_gatherutils)
add_subdirectory(URL)
target_link_libraries(clickhouse_functions PRIVATE clickhouse_functions_url)
list (APPEND OBJECT_LIBS $<TARGET_OBJECTS:clickhouse_functions_url>)
add_subdirectory(array)
target_link_libraries(clickhouse_functions PRIVATE clickhouse_functions_array)
list (APPEND OBJECT_LIBS $<TARGET_OBJECTS:clickhouse_functions_array>)
add_subdirectory(JSONPath)
list (APPEND PRIVATE_LIBS clickhouse_functions_jsonpath)
# Signed integer overflow on user-provided data inside boost::geometry - ignore.
set_source_files_properties("pointInPolygon.cpp" PROPERTIES COMPILE_FLAGS -fno-sanitize=signed-integer-overflow)
@ -98,3 +101,15 @@ set_source_files_properties("pointInPolygon.cpp" PROPERTIES COMPILE_FLAGS -fno-s
if (ENABLE_FUZZING)
add_compile_definitions(FUZZING_MODE=1)
endif ()
target_link_libraries(clickhouse_functions_obj PUBLIC ${PUBLIC_LIBS} PRIVATE ${PRIVATE_LIBS})
if (USE_STATIC_LIBRARIES OR NOT SPLIT_SHARED_LIBRARIES)
# Used to forward the linking information to the final binaries such as clickhouse / unit_tests_dbms,
# since such information are lost after we convert to OBJECT target
add_library(clickhouse_functions INTERFACE)
target_link_libraries(clickhouse_functions INTERFACE ${OBJECT_LIBS} ${PUBLIC_LIBS} ${PRIVATE_LIBS})
else()
add_library(clickhouse_functions SHARED ${OBJECT_LIBS})
target_link_libraries(clickhouse_functions PUBLIC ${PUBLIC_LIBS} PRIVATE ${PRIVATE_LIBS})
endif ()

View File

@ -143,7 +143,7 @@ void registerFunctionCRCImpl(FunctionFactory & factory)
factory.registerFunction<T>(T::name, FunctionFactory::CaseInsensitive);
}
void registerFunctionCRC(FunctionFactory & factory)
REGISTER_FUNCTION(CRC)
{
registerFunctionCRCImpl<FunctionCRC32ZLIB>(factory);
registerFunctionCRCImpl<FunctionCRC32IEEE>(factory);

View File

@ -5,7 +5,7 @@
namespace DB
{
void registerCastOverloadResolvers(FunctionFactory & factory)
REGISTER_FUNCTION(CastOverloadResolvers)
{
factory.registerFunction<CastInternalOverloadResolver<CastType::nonAccurate>>(FunctionFactory::CaseInsensitive);
factory.registerFunction<CastInternalOverloadResolver<CastType::accurate>>();

View File

@ -113,7 +113,7 @@ private:
}
};
void registerFunctionChar(FunctionFactory & factory)
REGISTER_FUNCTION(Char)
{
factory.registerFunction<FunctionChar>(FunctionFactory::CaseInsensitive);
}

View File

@ -44,7 +44,7 @@ public:
};
void registerFunctionFQDN(FunctionFactory & factory)
REGISTER_FUNCTION(FQDN)
{
factory.registerFunction<FunctionFQDN>(FunctionFactory::CaseInsensitive);
factory.registerFunction<FunctionFQDN>("fullHostName");

View File

@ -1,6 +1,7 @@
#pragma once
#include <Interpreters/Context_fwd.h>
#include <Common/register_objects.h>
#include <Common/IFactoryWithAliases.h>
#include <Functions/IFunction.h>
#include <Functions/IFunctionAdaptors.h>

View File

@ -1,7 +1,10 @@
#include <Columns/ColumnNullable.h>
#include <Columns/ColumnString.h>
#include <Columns/ColumnConst.h>
#include <Columns/IColumn.h>
#include <Functions/FunctionFactory.h>
#include <DataTypes/DataTypeString.h>
#include <DataTypes/DataTypeNullable.h>
#include <IO/ReadBufferFromFile.h>
#include <IO/WriteBufferFromVector.h>
#include <IO/copyData.h>
@ -19,6 +22,7 @@ namespace ErrorCodes
{
extern const int ILLEGAL_COLUMN;
extern const int NOT_IMPLEMENTED;
extern const int NUMBER_OF_ARGUMENTS_DOESNT_MATCH;
extern const int DATABASE_ACCESS_DENIED;
}
@ -30,21 +34,41 @@ public:
static FunctionPtr create(ContextPtr context_) { return std::make_shared<FunctionFile>(context_); }
explicit FunctionFile(ContextPtr context_) : WithContext(context_) {}
bool isVariadic() const override { return true; }
String getName() const override { return name; }
size_t getNumberOfArguments() const override { return 1; }
size_t getNumberOfArguments() const override { return 0; }
bool isSuitableForShortCircuitArgumentsExecution(const DataTypesWithConstInfo & /*arguments*/) const override { return true; }
DataTypePtr getReturnTypeImpl(const ColumnsWithTypeAndName & arguments) const override
{
if (arguments.empty() || arguments.size() > 2)
throw Exception(
ErrorCodes::NUMBER_OF_ARGUMENTS_DOESNT_MATCH,
"Number of arguments for function {} doesn't match: passed {}, should be 1 or 2",
getName(), toString(arguments.size()));
if (!isString(arguments[0].type))
throw Exception(ErrorCodes::NOT_IMPLEMENTED, "{} is only implemented for type String", getName());
if (arguments.size() == 2)
{
if (arguments[1].type->onlyNull())
return makeNullable(std::make_shared<DataTypeString>());
if (!isString(arguments[1].type))
throw Exception(ErrorCodes::NOT_IMPLEMENTED, "{} only accepts String or Null as second argument", getName());
}
return std::make_shared<DataTypeString>();
}
ColumnNumbers getArgumentsThatAreAlwaysConstant() const override { return {1}; }
bool useDefaultImplementationForNulls() const override { return false; }
bool useDefaultImplementationForConstants() const override { return true; }
ColumnPtr executeImpl(const ColumnsWithTypeAndName & arguments, const DataTypePtr &, size_t input_rows_count) const override
ColumnPtr executeImpl(const ColumnsWithTypeAndName & arguments, const DataTypePtr & result_type, size_t input_rows_count) const override
{
const ColumnPtr column = arguments[0].column;
const ColumnString * column_src = checkAndGetColumn<ColumnString>(column.get());
@ -53,6 +77,31 @@ public:
fmt::format("Illegal column {} of argument of function {}", arguments[0].column->getName(), getName()),
ErrorCodes::ILLEGAL_COLUMN);
String default_result;
ColumnUInt8::MutablePtr col_null_map_to;
ColumnUInt8::Container * vec_null_map_to [[maybe_unused]] = nullptr;
if (arguments.size() == 2)
{
if (result_type->isNullable())
{
col_null_map_to = ColumnUInt8::create(input_rows_count, false);
vec_null_map_to = &col_null_map_to->getData();
}
else
{
const auto & default_column = arguments[1].column;
const ColumnConst * default_col = checkAndGetColumn<ColumnConst>(default_column.get());
if (!default_col)
throw Exception(
"Illegal column " + arguments[1].column->getName() + " of argument of function " + getName(), ErrorCodes::ILLEGAL_COLUMN);
default_result = default_col->getValue<String>();
}
}
auto result = ColumnString::create();
auto & res_chars = result->getChars();
auto & res_offsets = result->getOffsets();
@ -77,24 +126,40 @@ public:
/// Otherwise it will not allow to work with symlinks in `user_files_path` directory.
file_path = fs::absolute(file_path).lexically_normal();
if (need_check && file_path.string().find(user_files_absolute_path_string) != 0)
throw Exception(ErrorCodes::DATABASE_ACCESS_DENIED, "File is not inside {}", user_files_absolute_path.string());
try
{
if (need_check && file_path.string().find(user_files_absolute_path_string) != 0)
throw Exception(ErrorCodes::DATABASE_ACCESS_DENIED, "File is not inside {}", user_files_absolute_path.string());
ReadBufferFromFile in(file_path);
WriteBufferFromVector out(res_chars, AppendModeTag{});
copyData(in, out);
out.finalize();
ReadBufferFromFile in(file_path);
WriteBufferFromVector out(res_chars, AppendModeTag{});
copyData(in, out);
out.finalize();
}
catch (...)
{
if (arguments.size() == 1)
throw;
if (vec_null_map_to)
(*vec_null_map_to)[row] = true;
else
res_chars.insert(default_result.data(), default_result.data() + default_result.size());
}
res_chars.push_back(0);
res_offsets[row] = res_chars.size();
}
if (vec_null_map_to)
return ColumnNullable::create(std::move(result), std::move(col_null_map_to));
return result;
}
};
void registerFunctionFile(FunctionFactory & factory)
REGISTER_FUNCTION(File)
{
factory.registerFunction<FunctionFile>();
}

View File

@ -4,7 +4,7 @@
namespace DB
{
void registerFunctionHashID(FunctionFactory & factory)
REGISTER_FUNCTION(HashID)
{
factory.registerFunction<FunctionHashID>();
}

View File

@ -96,7 +96,7 @@ FunctionBasePtr JoinGetOverloadResolver<or_null>::buildImpl(const ColumnsWithTyp
return std::make_unique<FunctionJoinGet<or_null>>(getContext(), table_lock, storage_join, attr_name, argument_types, return_type);
}
void registerFunctionJoinGet(FunctionFactory & factory)
REGISTER_FUNCTION(JoinGet)
{
// joinGet
factory.registerFunction<JoinGetOverloadResolver<false>>();

View File

@ -5,7 +5,7 @@
namespace DB
{
void registerFunctionsSQLJSON(FunctionFactory & factory)
REGISTER_FUNCTION(SQLJSON)
{
factory.registerFunction<FunctionSQLJSON<NameJSONExists, JSONExistsImpl>>();
factory.registerFunction<FunctionSQLJSON<NameJSONQuery, JSONQueryImpl>>();

View File

@ -4,7 +4,7 @@
namespace DB
{
void registerFunctionShowCertificate(FunctionFactory & factory)
REGISTER_FUNCTION(ShowCertificate)
{
factory.registerFunction<FunctionShowCertificate>();
}

View File

@ -3,12 +3,12 @@
namespace DB
{
void registerFunctionBase58Encode(FunctionFactory & factory)
REGISTER_FUNCTION(Base58Encode)
{
factory.registerFunction<FunctionBase58Conversion<Base58Encode>>();
}
void registerFunctionBase58Decode(FunctionFactory & factory)
REGISTER_FUNCTION(Base58Decode)
{
factory.registerFunction<FunctionBase58Conversion<Base58Decode>>();
}

View File

@ -621,7 +621,7 @@ public:
}
};
void registerFunctionsBinaryRepr(FunctionFactory & factory)
REGISTER_FUNCTION(BinaryRepr)
{
factory.registerFunction<EncodeToBinaryRepresentation<HexImpl>>(FunctionFactory::CaseInsensitive);
factory.registerFunction<DecodeFromBinaryRepresentation<UnhexImpl>>(FunctionFactory::CaseInsensitive);

View File

@ -8,6 +8,7 @@
#include <Functions/IFunction.h>
#include <IO/WriteBufferFromVector.h>
#include <IO/WriteHelpers.h>
#include <bit>
namespace DB
@ -285,7 +286,7 @@ public:
{
while (x)
{
result_array_values_data.push_back(getTrailingZeroBitsUnsafe(x));
result_array_values_data.push_back(std::countr_zero(x));
x &= (x - 1);
}
}
@ -329,7 +330,7 @@ public:
}
void registerFunctionsBitToArray(FunctionFactory & factory)
REGISTER_FUNCTION(BitToArray)
{
factory.registerFunction<FunctionBitPositionsToArray>();
factory.registerFunction<FunctionBitmaskToArray>();

View File

@ -7,7 +7,7 @@
namespace DB
{
void registerFunctionsBitmap(FunctionFactory & factory)
REGISTER_FUNCTION(Bitmap)
{
factory.registerFunction<FunctionBitmapBuild>();
factory.registerFunction<FunctionBitmapToArray>();

View File

@ -143,7 +143,7 @@ struct NameDetectLanguageUnknown
using FunctionDetectCharset = FunctionTextClassificationString<CharsetClassificationImpl<false>, NameDetectCharset>;
using FunctionDetectLanguageUnknown = FunctionTextClassificationString<CharsetClassificationImpl<true>, NameDetectLanguageUnknown>;
void registerFunctionDetectCharset(FunctionFactory & factory)
REGISTER_FUNCTION(DetectCharset)
{
factory.registerFunction<FunctionDetectCharset>();
factory.registerFunction<FunctionDetectLanguageUnknown>();

View File

@ -1128,7 +1128,7 @@ public:
struct NameFunctionIPv4NumToString { static constexpr auto name = "IPv4NumToString"; };
struct NameFunctionIPv4NumToStringClassC { static constexpr auto name = "IPv4NumToStringClassC"; };
void registerFunctionsCoding(FunctionFactory & factory)
REGISTER_FUNCTION(Coding)
{
factory.registerFunction<FunctionCutIPv6>();
factory.registerFunction<FunctionIPv4ToIPv6>();

View File

@ -229,7 +229,7 @@ public:
}
};
void registerFunctionsCodingUUID(FunctionFactory & factory)
REGISTER_FUNCTION(CodingUUID)
{
factory.registerFunction<FunctionUUIDNumToString>();
factory.registerFunction<FunctionUUIDStringToNum>();

View File

@ -5,11 +5,7 @@
namespace DB
{
void registerFunctionFixedString(FunctionFactory & factory);
void registerCastOverloadResolvers(FunctionFactory & factory);
void registerFunctionsConversion(FunctionFactory & factory)
REGISTER_FUNCTION(Conversion)
{
factory.registerFunction<FunctionToUInt8>();
factory.registerFunction<FunctionToUInt16>();
@ -41,12 +37,8 @@ void registerFunctionsConversion(FunctionFactory & factory)
factory.registerFunction<FunctionToUUID>();
factory.registerFunction<FunctionToString>();
registerFunctionFixedString(factory);
factory.registerFunction<FunctionToUnixTimestamp>();
registerCastOverloadResolvers(factory);
factory.registerFunction<FunctionToUInt8OrZero>();
factory.registerFunction<FunctionToUInt16OrZero>();
factory.registerFunction<FunctionToUInt32OrZero>();

View File

@ -5,7 +5,7 @@
namespace DB
{
void registerFunctionsEmbeddedDictionaries(FunctionFactory & factory)
REGISTER_FUNCTION(EmbeddedDictionaries)
{
factory.registerFunction<FunctionRegionToCity>();
factory.registerFunction<FunctionRegionToArea>();

View File

@ -5,7 +5,7 @@
namespace DB
{
void registerFunctionsExternalDictionaries(FunctionFactory & factory)
REGISTER_FUNCTION(ExternalDictionaries)
{
factory.registerFunction<FunctionDictHas>();
factory.registerFunction<FunctionDictGetUInt8>();

View File

@ -6,7 +6,7 @@
namespace DB
{
void registerFunctionsHashing(FunctionFactory & factory)
REGISTER_FUNCTION(Hashing)
{
#if USE_SSL
factory.registerFunction<FunctionMD4>();

View File

@ -1443,7 +1443,7 @@ public:
}
};
void registerFunctionsJSON(FunctionFactory & factory)
REGISTER_FUNCTION(JSON)
{
factory.registerFunction<JSONOverloadResolver<NameJSONHas, JSONHasImpl>>();
factory.registerFunction<JSONOverloadResolver<NameIsValidJSON, IsValidJSONImpl>>();

View File

@ -221,7 +221,7 @@ struct NameDetectLanguage
using FunctionDetectLanguage = FunctionTextClassificationString<FunctionDetectLanguageImpl, NameDetectLanguage>;
void registerFunctionsDetectLanguage(FunctionFactory & factory)
REGISTER_FUNCTION(DetectLanguage)
{
factory.registerFunction<FunctionDetectLanguage>();
factory.registerFunction<FunctionDetectLanguageMixed>();

View File

@ -22,7 +22,7 @@
namespace DB
{
void registerFunctionsLogical(FunctionFactory & factory)
REGISTER_FUNCTION(Logical)
{
factory.registerFunction<FunctionAnd>();
factory.registerFunction<FunctionOr>();

View File

@ -112,7 +112,7 @@ struct NameDetectProgrammingLanguage
using FunctionDetectProgrammingLanguage = FunctionTextClassificationString<FunctionDetectProgrammingLanguageImpl, NameDetectProgrammingLanguage>;
void registerFunctionDetectProgrammingLanguage(FunctionFactory & factory)
REGISTER_FUNCTION(DetectProgrammingLanguage)
{
factory.registerFunction<FunctionDetectProgrammingLanguage>();
}

View File

@ -5,7 +5,7 @@
namespace DB
{
void registerFunctionsRound(FunctionFactory & factory)
REGISTER_FUNCTION(Round)
{
factory.registerFunction<FunctionRound>("round", FunctionFactory::CaseInsensitive);
factory.registerFunction<FunctionRoundBankers>("roundBankers", FunctionFactory::CaseSensitive);

View File

@ -27,7 +27,7 @@ DataTypePtr FunctionArrayStringConcat::getReturnTypeImpl(const DataTypes & argum
return std::make_shared<DataTypeString>();
}
void registerFunctionsStringArray(FunctionFactory & factory)
REGISTER_FUNCTION(StringArray)
{
factory.registerFunction<FunctionExtractAll>();
factory.registerFunction<FunctionAlphaTokens>();

View File

@ -743,7 +743,7 @@ using FunctionWordShingleMinHashArgUTF8
using FunctionWordShingleMinHashArgCaseInsensitiveUTF8
= FunctionsStringHash<MinHashImpl<true, false, true>, NameWordShingleMinHashArgCaseInsensitiveUTF8, false, true>;
void registerFunctionsStringHash(FunctionFactory & factory)
REGISTER_FUNCTION(StringHash)
{
factory.registerFunction<FunctionNgramSimHash>();
factory.registerFunction<FunctionNgramSimHashCaseInsensitive>();

View File

@ -530,7 +530,7 @@ using FunctionNgramSearchUTF8 = FunctionsStringSimilarity<NgramDistanceImpl<3, U
using FunctionNgramSearchCaseInsensitiveUTF8 = FunctionsStringSimilarity<NgramDistanceImpl<3, UInt32, true, true, false>, NameNgramSearchUTF8CaseInsensitive>;
void registerFunctionsStringSimilarity(FunctionFactory & factory)
REGISTER_FUNCTION(StringSimilarity)
{
factory.registerFunction<FunctionNgramDistance>();
factory.registerFunction<FunctionNgramDistanceCaseInsensitive>();

View File

@ -684,7 +684,7 @@ ColumnPtr FunctionTimeWindow<type>::executeImpl(const ColumnsWithTypeAndName & a
return TimeWindowImpl<type>::dispatchForColumns(arguments, name);
}
void registerFunctionsTimeWindow(FunctionFactory& factory)
REGISTER_FUNCTION(TimeWindow)
{
factory.registerFunction<FunctionTumble>();
factory.registerFunction<FunctionHop>();

View File

@ -81,7 +81,7 @@ struct NameDetectTonality
using FunctionDetectTonality = FunctionTextClassificationFloat<FunctionDetectTonalityImpl, NameDetectTonality>;
void registerFunctionDetectTonality(FunctionFactory & factory)
REGISTER_FUNCTION(DetectTonality)
{
factory.registerFunction<FunctionDetectTonality>();
}

View File

@ -61,7 +61,7 @@ public:
}
void registerFunctionsTransactionCounters(FunctionFactory & factory)
REGISTER_FUNCTION(TransactionCounters)
{
factory.registerFunction<FunctionTransactionID>();
factory.registerFunction<FunctionTransactionLatestSnapshot>();

View File

@ -6,7 +6,6 @@ add_library(clickhouse_functions_jsonpath ${clickhouse_functions_jsonpath_source
target_link_libraries(clickhouse_functions_jsonpath PRIVATE dbms)
target_link_libraries(clickhouse_functions_jsonpath PRIVATE clickhouse_parsers)
target_link_libraries(clickhouse_functions PRIVATE clickhouse_functions_jsonpath)
if (OMIT_HEAVY_DEBUG_SYMBOLS)
target_compile_options(clickhouse_functions_jsonpath PRIVATE "-g0")

View File

@ -6,19 +6,19 @@ namespace DB
{
using FunctionSubtractNanoseconds = FunctionDateOrDateTimeAddInterval<SubtractNanosecondsImpl>;
void registerFunctionSubtractNanoseconds(FunctionFactory & factory)
REGISTER_FUNCTION(SubtractNanoseconds)
{
factory.registerFunction<FunctionSubtractNanoseconds>();
}
using FunctionSubtractMicroseconds = FunctionDateOrDateTimeAddInterval<SubtractMicrosecondsImpl>;
void registerFunctionSubtractMicroseconds(FunctionFactory & factory)
REGISTER_FUNCTION(SubtractMicroseconds)
{
factory.registerFunction<FunctionSubtractMicroseconds>();
}
using FunctionSubtractMilliseconds = FunctionDateOrDateTimeAddInterval<SubtractMillisecondsImpl>;
void registerFunctionSubtractMilliseconds(FunctionFactory & factory)
REGISTER_FUNCTION(SubtractMilliseconds)
{
factory.registerFunction<FunctionSubtractMilliseconds>();
}

View File

@ -1,6 +1,6 @@
include("${ClickHouse_SOURCE_DIR}/cmake/dbms_glob_sources.cmake")
add_headers_and_sources(clickhouse_functions_url .)
add_library(clickhouse_functions_url ${clickhouse_functions_url_sources} ${clickhouse_functions_url_headers})
add_library(clickhouse_functions_url OBJECT ${clickhouse_functions_url_sources} ${clickhouse_functions_url_headers})
target_link_libraries(clickhouse_functions_url PRIVATE dbms)
if (OMIT_HEAVY_DEBUG_SYMBOLS)
@ -10,6 +10,7 @@ endif()
# TODO: move Functions/Regexps.h to some lib and use here
if (TARGET ch_contrib::vectorscan)
target_link_libraries(clickhouse_functions_url PRIVATE ch_contrib::vectorscan)
list (APPEND PRIVATE_LIBS ch_contrib::vectorscan PARENT_SCOPE)
endif()
if (USE_GPERF)

View File

@ -104,7 +104,7 @@ public:
struct NameURLPathHierarchy { static constexpr auto name = "URLPathHierarchy"; };
using FunctionURLPathHierarchy = FunctionTokens<URLPathHierarchyImpl>;
void registerFunctionURLPathHierarchy(FunctionFactory & factory)
REGISTER_FUNCTION(URLPathHierarchy)
{
factory.registerFunction<FunctionURLPathHierarchy>();
}

View File

@ -106,7 +106,7 @@ public:
struct NameURLHierarchy { static constexpr auto name = "URLHierarchy"; };
using FunctionURLHierarchy = FunctionTokens<URLHierarchyImpl>;
void registerFunctionURLHierarchy(FunctionFactory & factory)
REGISTER_FUNCTION(URLHierarchy)
{
factory.registerFunction<FunctionURLHierarchy>();
}

View File

@ -34,7 +34,7 @@ struct ExtractBasename
struct NameBasename { static constexpr auto name = "basename"; };
using FunctionBasename = FunctionStringToString<ExtractSubstringImpl<ExtractBasename>, NameBasename>;
void registerFunctionBasename(FunctionFactory & factory)
REGISTER_FUNCTION(Basename)
{
factory.registerFunction<FunctionBasename>();
}

View File

@ -8,7 +8,7 @@ namespace DB
struct NameCutFragment { static constexpr auto name = "cutFragment"; };
using FunctionCutFragment = FunctionStringToString<CutSubstringImpl<ExtractFragment<false>>, NameCutFragment>;
void registerFunctionCutFragment(FunctionFactory & factory)
REGISTER_FUNCTION(CutFragment)
{
factory.registerFunction<FunctionCutFragment>();
}

View File

@ -8,7 +8,7 @@ namespace DB
struct NameCutQueryString { static constexpr auto name = "cutQueryString"; };
using FunctionCutQueryString = FunctionStringToString<CutSubstringImpl<ExtractQueryString<false>>, NameCutQueryString>;
void registerFunctionCutQueryString(FunctionFactory & factory)
REGISTER_FUNCTION(CutQueryString)
{
factory.registerFunction<FunctionCutQueryString>();
}

View File

@ -8,7 +8,7 @@ namespace DB
struct NameCutQueryStringAndFragment { static constexpr auto name = "cutQueryStringAndFragment"; };
using FunctionCutQueryStringAndFragment = FunctionStringToString<CutSubstringImpl<ExtractQueryStringAndFragment<false>>, NameCutQueryStringAndFragment>;
void registerFunctionCutQueryStringAndFragment(FunctionFactory & factory)
REGISTER_FUNCTION(CutQueryStringAndFragment)
{
factory.registerFunction<FunctionCutQueryStringAndFragment>();
}

View File

@ -35,7 +35,7 @@ using FunctionCutToFirstSignificantSubdomain = FunctionStringToString<ExtractSub
struct NameCutToFirstSignificantSubdomainWithWWW { static constexpr auto name = "cutToFirstSignificantSubdomainWithWWW"; };
using FunctionCutToFirstSignificantSubdomainWithWWW = FunctionStringToString<ExtractSubstringImpl<CutToFirstSignificantSubdomain<false>>, NameCutToFirstSignificantSubdomainWithWWW>;
void registerFunctionCutToFirstSignificantSubdomain(FunctionFactory & factory)
REGISTER_FUNCTION(CutToFirstSignificantSubdomain)
{
factory.registerFunction<FunctionCutToFirstSignificantSubdomain>();
factory.registerFunction<FunctionCutToFirstSignificantSubdomainWithWWW>();

View File

@ -34,7 +34,7 @@ using FunctionCutToFirstSignificantSubdomainCustom = FunctionCutToFirstSignifica
struct NameCutToFirstSignificantSubdomainCustomWithWWW { static constexpr auto name = "cutToFirstSignificantSubdomainCustomWithWWW"; };
using FunctionCutToFirstSignificantSubdomainCustomWithWWW = FunctionCutToFirstSignificantSubdomainCustomImpl<CutToFirstSignificantSubdomainCustom<false>, NameCutToFirstSignificantSubdomainCustomWithWWW>;
void registerFunctionCutToFirstSignificantSubdomainCustom(FunctionFactory & factory)
REGISTER_FUNCTION(CutToFirstSignificantSubdomainCustom)
{
factory.registerFunction<FunctionCutToFirstSignificantSubdomainCustom>();
factory.registerFunction<FunctionCutToFirstSignificantSubdomainCustomWithWWW>();

View File

@ -77,7 +77,7 @@ struct CutURLParameterImpl
struct NameCutURLParameter { static constexpr auto name = "cutURLParameter"; };
using FunctionCutURLParameter = FunctionsStringSearchToString<CutURLParameterImpl, NameCutURLParameter>;
void registerFunctionCutURLParameter(FunctionFactory & factory)
REGISTER_FUNCTION(CutURLParameter)
{
factory.registerFunction<FunctionCutURLParameter>();
}

View File

@ -54,7 +54,7 @@ struct ExtractWWW
struct NameCutWWW { static constexpr auto name = "cutWWW"; };
using FunctionCutWWW = FunctionStringToString<CutSubstringImpl<ExtractWWW>, NameCutWWW>;
void registerFunctionCutWWW(FunctionFactory & factory)
REGISTER_FUNCTION(CutWWW)
{
factory.registerFunction<FunctionCutWWW>();
}

View File

@ -172,7 +172,7 @@ using FunctionEncodeURLComponent = FunctionStringToString<CodeURLComponentImpl<e
using FunctionDecodeURLFormComponent = FunctionStringToString<CodeURLComponentImpl<decode, true>, NameDecodeURLFormComponent>;
using FunctionEncodeURLFormComponent = FunctionStringToString<CodeURLComponentImpl<encode, true>, NameEncodeURLFormComponent>;
void registerFunctionEncodeAndDecodeURLComponent(FunctionFactory & factory)
REGISTER_FUNCTION(EncodeAndDecodeURLComponent)
{
factory.registerFunction<FunctionDecodeURLComponent>();
factory.registerFunction<FunctionEncodeURLComponent>();

View File

@ -10,7 +10,7 @@ struct NameDomain { static constexpr auto name = "domain"; };
using FunctionDomain = FunctionStringToString<ExtractSubstringImpl<ExtractDomain<false>>, NameDomain>;
void registerFunctionDomain(FunctionFactory & factory)
REGISTER_FUNCTION(Domain)
{
factory.registerFunction<FunctionDomain>();
}

View File

@ -9,7 +9,7 @@ struct NameDomainWithoutWWW { static constexpr auto name = "domainWithoutWWW"; }
using FunctionDomainWithoutWWW = FunctionStringToString<ExtractSubstringImpl<ExtractDomain<true>>, NameDomainWithoutWWW>;
void registerFunctionDomainWithoutWWW(FunctionFactory & factory)
REGISTER_FUNCTION(DomainWithoutWWW)
{
factory.registerFunction<FunctionDomainWithoutWWW>();
}

View File

@ -92,7 +92,7 @@ struct ExtractURLParameterImpl
struct NameExtractURLParameter { static constexpr auto name = "extractURLParameter"; };
using FunctionExtractURLParameter = FunctionsStringSearchToString<ExtractURLParameterImpl, NameExtractURLParameter>;
void registerFunctionExtractURLParameter(FunctionFactory & factory)
REGISTER_FUNCTION(ExtractURLParameter)
{
factory.registerFunction<FunctionExtractURLParameter>();
}

View File

@ -89,7 +89,7 @@ public:
struct NameExtractURLParameterNames { static constexpr auto name = "extractURLParameterNames"; };
using FunctionExtractURLParameterNames = FunctionTokens<ExtractURLParameterNamesImpl>;
void registerFunctionExtractURLParameterNames(FunctionFactory & factory)
REGISTER_FUNCTION(ExtractURLParameterNames)
{
factory.registerFunction<FunctionExtractURLParameterNames>();
}

Some files were not shown because too many files have changed in this diff Show More