mirror of
https://github.com/ClickHouse/ClickHouse.git
synced 2024-11-10 01:25:21 +00:00
Merge branch 'master' into rdb_auto_recovery
This commit is contained in:
commit
9eaf9004b7
16
.clang-tidy
16
.clang-tidy
@ -123,7 +123,21 @@ Checks: [
|
|||||||
'-readability-uppercase-literal-suffix',
|
'-readability-uppercase-literal-suffix',
|
||||||
'-readability-use-anyofallof',
|
'-readability-use-anyofallof',
|
||||||
|
|
||||||
'-zircon-*'
|
'-zircon-*',
|
||||||
|
|
||||||
|
# These are new in clang-18, and we have to sort them out:
|
||||||
|
'-readability-avoid-nested-conditional-operator',
|
||||||
|
'-modernize-use-designated-initializers',
|
||||||
|
'-performance-enum-size',
|
||||||
|
'-readability-redundant-inline-specifier',
|
||||||
|
'-readability-redundant-member-init',
|
||||||
|
'-bugprone-crtp-constructor-accessibility',
|
||||||
|
'-bugprone-suspicious-stringview-data-usage',
|
||||||
|
'-bugprone-multi-level-implicit-pointer-conversion',
|
||||||
|
'-cert-err33-c',
|
||||||
|
|
||||||
|
# This is a good check, but clang-tidy crashes, see https://github.com/llvm/llvm-project/issues/91872
|
||||||
|
'-modernize-use-constraints',
|
||||||
]
|
]
|
||||||
|
|
||||||
WarningsAsErrors: '*'
|
WarningsAsErrors: '*'
|
||||||
|
2
.github/PULL_REQUEST_TEMPLATE.md
vendored
2
.github/PULL_REQUEST_TEMPLATE.md
vendored
@ -85,4 +85,4 @@ At a minimum, the following information should be added (but add more as needed)
|
|||||||
- [ ] <!---batch_2--> 3
|
- [ ] <!---batch_2--> 3
|
||||||
- [ ] <!---batch_3--> 4
|
- [ ] <!---batch_3--> 4
|
||||||
|
|
||||||
<details>
|
</details>
|
||||||
|
6
.github/workflows/backport_branches.yml
vendored
6
.github/workflows/backport_branches.yml
vendored
@ -9,6 +9,12 @@ on: # yamllint disable-line rule:truthy
|
|||||||
push:
|
push:
|
||||||
branches:
|
branches:
|
||||||
- 'backport/**'
|
- 'backport/**'
|
||||||
|
|
||||||
|
# Cancel the previous wf run in PRs.
|
||||||
|
concurrency:
|
||||||
|
group: ${{ github.workflow }}-${{ github.ref }}
|
||||||
|
cancel-in-progress: true
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
RunConfig:
|
RunConfig:
|
||||||
runs-on: [self-hosted, style-checker-aarch64]
|
runs-on: [self-hosted, style-checker-aarch64]
|
||||||
|
19
.github/workflows/cancel.yml
vendored
19
.github/workflows/cancel.yml
vendored
@ -1,19 +0,0 @@
|
|||||||
name: Cancel
|
|
||||||
|
|
||||||
env:
|
|
||||||
# Force the stdout and stderr streams to be unbuffered
|
|
||||||
PYTHONUNBUFFERED: 1
|
|
||||||
|
|
||||||
on: # yamllint disable-line rule:truthy
|
|
||||||
workflow_run:
|
|
||||||
workflows: ["PullRequestCI", "ReleaseBranchCI", "DocsCheck", "BackportPR"]
|
|
||||||
types:
|
|
||||||
- requested
|
|
||||||
jobs:
|
|
||||||
cancel:
|
|
||||||
runs-on: [self-hosted, style-checker]
|
|
||||||
steps:
|
|
||||||
- uses: styfle/cancel-workflow-action@0.9.1
|
|
||||||
with:
|
|
||||||
all_but_latest: true
|
|
||||||
workflow_id: ${{ github.event.workflow.id }}
|
|
11
.github/workflows/debug.yml
vendored
11
.github/workflows/debug.yml
vendored
@ -1,11 +0,0 @@
|
|||||||
# The CI for each commit, prints envs and content of GITHUB_EVENT_PATH
|
|
||||||
name: Debug
|
|
||||||
|
|
||||||
'on':
|
|
||||||
[push, pull_request, pull_request_review, release, workflow_dispatch, workflow_call]
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
DebugInfo:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: hmarr/debug-action@f7318c783045ac39ed9bb497e22ce835fdafbfe6
|
|
96
.github/workflows/merge_queue.yml
vendored
Normal file
96
.github/workflows/merge_queue.yml
vendored
Normal file
@ -0,0 +1,96 @@
|
|||||||
|
# yamllint disable rule:comments-indentation
|
||||||
|
name: MergeQueueCI
|
||||||
|
|
||||||
|
env:
|
||||||
|
# Force the stdout and stderr streams to be unbuffered
|
||||||
|
PYTHONUNBUFFERED: 1
|
||||||
|
|
||||||
|
on: # yamllint disable-line rule:truthy
|
||||||
|
merge_group:
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
RunConfig:
|
||||||
|
runs-on: [self-hosted, style-checker-aarch64]
|
||||||
|
outputs:
|
||||||
|
data: ${{ steps.runconfig.outputs.CI_DATA }}
|
||||||
|
steps:
|
||||||
|
- name: DebugInfo
|
||||||
|
uses: hmarr/debug-action@f7318c783045ac39ed9bb497e22ce835fdafbfe6
|
||||||
|
- name: Check out repository code
|
||||||
|
uses: ClickHouse/checkout@v1
|
||||||
|
with:
|
||||||
|
clear-repository: true # to ensure correct digests
|
||||||
|
fetch-depth: 0 # to get version
|
||||||
|
filter: tree:0
|
||||||
|
- name: Python unit tests
|
||||||
|
run: |
|
||||||
|
cd "$GITHUB_WORKSPACE/tests/ci"
|
||||||
|
echo "Testing the main ci directory"
|
||||||
|
python3 -m unittest discover -s . -p 'test_*.py'
|
||||||
|
for dir in *_lambda/; do
|
||||||
|
echo "Testing $dir"
|
||||||
|
python3 -m unittest discover -s "$dir" -p 'test_*.py'
|
||||||
|
done
|
||||||
|
- name: PrepareRunConfig
|
||||||
|
id: runconfig
|
||||||
|
run: |
|
||||||
|
python3 "$GITHUB_WORKSPACE/tests/ci/ci.py" --configure --outfile ${{ runner.temp }}/ci_run_data.json
|
||||||
|
|
||||||
|
echo "::group::CI configuration"
|
||||||
|
python3 -m json.tool ${{ runner.temp }}/ci_run_data.json
|
||||||
|
echo "::endgroup::"
|
||||||
|
|
||||||
|
{
|
||||||
|
echo 'CI_DATA<<EOF'
|
||||||
|
cat ${{ runner.temp }}/ci_run_data.json
|
||||||
|
echo 'EOF'
|
||||||
|
} >> "$GITHUB_OUTPUT"
|
||||||
|
BuildDockers:
|
||||||
|
needs: [RunConfig]
|
||||||
|
if: ${{ !failure() && !cancelled() && toJson(fromJson(needs.RunConfig.outputs.data).docker_data.missing_multi) != '[]' }}
|
||||||
|
uses: ./.github/workflows/reusable_docker.yml
|
||||||
|
with:
|
||||||
|
data: ${{ needs.RunConfig.outputs.data }}
|
||||||
|
StyleCheck:
|
||||||
|
needs: [RunConfig, BuildDockers]
|
||||||
|
if: ${{ !failure() && !cancelled() && contains(fromJson(needs.RunConfig.outputs.data).jobs_data.jobs_to_do, 'Style check')}}
|
||||||
|
uses: ./.github/workflows/reusable_test.yml
|
||||||
|
with:
|
||||||
|
test_name: Style check
|
||||||
|
runner_type: style-checker
|
||||||
|
run_command: |
|
||||||
|
python3 style_check.py
|
||||||
|
data: ${{ needs.RunConfig.outputs.data }}
|
||||||
|
secrets:
|
||||||
|
secret_envs: |
|
||||||
|
ROBOT_CLICKHOUSE_SSH_KEY<<RCSK
|
||||||
|
${{secrets.ROBOT_CLICKHOUSE_SSH_KEY}}
|
||||||
|
RCSK
|
||||||
|
FastTest:
|
||||||
|
needs: [RunConfig, BuildDockers]
|
||||||
|
if: ${{ !failure() && !cancelled() && contains(fromJson(needs.RunConfig.outputs.data).jobs_data.jobs_to_do, 'Fast test') }}
|
||||||
|
uses: ./.github/workflows/reusable_test.yml
|
||||||
|
with:
|
||||||
|
test_name: Fast test
|
||||||
|
runner_type: builder
|
||||||
|
data: ${{ needs.RunConfig.outputs.data }}
|
||||||
|
run_command: |
|
||||||
|
python3 fast_test_check.py
|
||||||
|
|
||||||
|
################################# Stage Final #################################
|
||||||
|
#
|
||||||
|
FinishCheck:
|
||||||
|
if: ${{ !failure() && !cancelled() }}
|
||||||
|
needs: [RunConfig, BuildDockers, StyleCheck, FastTest]
|
||||||
|
runs-on: [self-hosted, style-checker]
|
||||||
|
steps:
|
||||||
|
- name: Check out repository code
|
||||||
|
uses: ClickHouse/checkout@v1
|
||||||
|
- name: Check sync status
|
||||||
|
run: |
|
||||||
|
cd "$GITHUB_WORKSPACE/tests/ci"
|
||||||
|
python3 sync_pr.py --status
|
||||||
|
- name: Finish label
|
||||||
|
run: |
|
||||||
|
cd "$GITHUB_WORKSPACE/tests/ci"
|
||||||
|
python3 finish_check.py ${{ (contains(needs.*.result, 'failure') && github.event_name == 'merge_group') && '--pipeline-failure' || '' }}
|
5
.github/workflows/nightly.yml
vendored
5
.github/workflows/nightly.yml
vendored
@ -10,14 +10,13 @@ env:
|
|||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
Debug:
|
|
||||||
# The task for having a preserved ENV and event.json for later investigation
|
|
||||||
uses: ./.github/workflows/debug.yml
|
|
||||||
RunConfig:
|
RunConfig:
|
||||||
runs-on: [self-hosted, style-checker-aarch64]
|
runs-on: [self-hosted, style-checker-aarch64]
|
||||||
outputs:
|
outputs:
|
||||||
data: ${{ steps.runconfig.outputs.CI_DATA }}
|
data: ${{ steps.runconfig.outputs.CI_DATA }}
|
||||||
steps:
|
steps:
|
||||||
|
- name: DebugInfo
|
||||||
|
uses: hmarr/debug-action@f7318c783045ac39ed9bb497e22ce835fdafbfe6
|
||||||
- name: Check out repository code
|
- name: Check out repository code
|
||||||
uses: ClickHouse/checkout@v1
|
uses: ClickHouse/checkout@v1
|
||||||
with:
|
with:
|
||||||
|
28
.github/workflows/pull_request.yml
vendored
28
.github/workflows/pull_request.yml
vendored
@ -6,7 +6,6 @@ env:
|
|||||||
PYTHONUNBUFFERED: 1
|
PYTHONUNBUFFERED: 1
|
||||||
|
|
||||||
on: # yamllint disable-line rule:truthy
|
on: # yamllint disable-line rule:truthy
|
||||||
merge_group:
|
|
||||||
pull_request:
|
pull_request:
|
||||||
types:
|
types:
|
||||||
- synchronize
|
- synchronize
|
||||||
@ -15,6 +14,11 @@ on: # yamllint disable-line rule:truthy
|
|||||||
branches:
|
branches:
|
||||||
- master
|
- master
|
||||||
|
|
||||||
|
# Cancel the previous wf run in PRs.
|
||||||
|
concurrency:
|
||||||
|
group: ${{ github.workflow }}-${{ github.ref }}
|
||||||
|
cancel-in-progress: true
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
RunConfig:
|
RunConfig:
|
||||||
runs-on: [self-hosted, style-checker-aarch64]
|
runs-on: [self-hosted, style-checker-aarch64]
|
||||||
@ -30,7 +34,6 @@ jobs:
|
|||||||
fetch-depth: 0 # to get version
|
fetch-depth: 0 # to get version
|
||||||
filter: tree:0
|
filter: tree:0
|
||||||
- name: Labels check
|
- name: Labels check
|
||||||
if: ${{ github.event_name != 'merge_group' }}
|
|
||||||
run: |
|
run: |
|
||||||
cd "$GITHUB_WORKSPACE/tests/ci"
|
cd "$GITHUB_WORKSPACE/tests/ci"
|
||||||
python3 run_check.py
|
python3 run_check.py
|
||||||
@ -58,7 +61,6 @@ jobs:
|
|||||||
echo 'EOF'
|
echo 'EOF'
|
||||||
} >> "$GITHUB_OUTPUT"
|
} >> "$GITHUB_OUTPUT"
|
||||||
- name: Re-create GH statuses for skipped jobs if any
|
- name: Re-create GH statuses for skipped jobs if any
|
||||||
if: ${{ github.event_name != 'merge_group' }}
|
|
||||||
run: |
|
run: |
|
||||||
python3 "$GITHUB_WORKSPACE/tests/ci/ci.py" --infile ${{ runner.temp }}/ci_run_data.json --update-gh-statuses
|
python3 "$GITHUB_WORKSPACE/tests/ci/ci.py" --infile ${{ runner.temp }}/ci_run_data.json --update-gh-statuses
|
||||||
BuildDockers:
|
BuildDockers:
|
||||||
@ -83,7 +85,7 @@ jobs:
|
|||||||
${{secrets.ROBOT_CLICKHOUSE_SSH_KEY}}
|
${{secrets.ROBOT_CLICKHOUSE_SSH_KEY}}
|
||||||
RCSK
|
RCSK
|
||||||
FastTest:
|
FastTest:
|
||||||
needs: [RunConfig, BuildDockers]
|
needs: [RunConfig, BuildDockers, StyleCheck]
|
||||||
if: ${{ !failure() && !cancelled() && contains(fromJson(needs.RunConfig.outputs.data).jobs_data.jobs_to_do, 'Fast test') }}
|
if: ${{ !failure() && !cancelled() && contains(fromJson(needs.RunConfig.outputs.data).jobs_data.jobs_to_do, 'Fast test') }}
|
||||||
uses: ./.github/workflows/reusable_test.yml
|
uses: ./.github/workflows/reusable_test.yml
|
||||||
with:
|
with:
|
||||||
@ -163,20 +165,16 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- name: Check out repository code
|
- name: Check out repository code
|
||||||
uses: ClickHouse/checkout@v1
|
uses: ClickHouse/checkout@v1
|
||||||
- name: Check sync status
|
|
||||||
if: ${{ github.event_name == 'merge_group' }}
|
|
||||||
run: |
|
|
||||||
cd "$GITHUB_WORKSPACE/tests/ci"
|
|
||||||
python3 sync_pr.py --status
|
|
||||||
- name: Finish label
|
- name: Finish label
|
||||||
run: |
|
run: |
|
||||||
cd "$GITHUB_WORKSPACE/tests/ci"
|
cd "$GITHUB_WORKSPACE/tests/ci"
|
||||||
python3 finish_check.py ${{ (contains(needs.*.result, 'failure') && github.event_name == 'merge_group') && '--pipeline-failure' || '' }}
|
python3 finish_check.py
|
||||||
- name: Auto merge if approved
|
# FIXME: merge on approval does not work with MQ. Could be fixed by using defaul GH's automerge after some corrections in Mergeable Check status
|
||||||
if: ${{ github.event_name != 'merge_group' }}
|
# - name: Auto merge if approved
|
||||||
run: |
|
# if: ${{ github.event_name != 'merge_group' }}
|
||||||
cd "$GITHUB_WORKSPACE/tests/ci"
|
# run: |
|
||||||
python3 merge_pr.py --check-approved
|
# cd "$GITHUB_WORKSPACE/tests/ci"
|
||||||
|
# python3 merge_pr.py --check-approved
|
||||||
|
|
||||||
|
|
||||||
#############################################################################################
|
#############################################################################################
|
||||||
|
23
.github/workflows/pull_request_approved.yml
vendored
23
.github/workflows/pull_request_approved.yml
vendored
@ -1,23 +0,0 @@
|
|||||||
name: PullRequestApprovedCI
|
|
||||||
|
|
||||||
env:
|
|
||||||
# Force the stdout and stderr streams to be unbuffered
|
|
||||||
PYTHONUNBUFFERED: 1
|
|
||||||
|
|
||||||
on: # yamllint disable-line rule:truthy
|
|
||||||
pull_request_review:
|
|
||||||
types:
|
|
||||||
- submitted
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
MergeOnApproval:
|
|
||||||
runs-on: [self-hosted, style-checker]
|
|
||||||
steps:
|
|
||||||
- name: Check out repository code
|
|
||||||
uses: ClickHouse/checkout@v1
|
|
||||||
with:
|
|
||||||
clear-repository: true
|
|
||||||
- name: Merge approved PR
|
|
||||||
run: |
|
|
||||||
cd "$GITHUB_WORKSPACE/tests/ci"
|
|
||||||
python3 merge_pr.py --check-approved
|
|
2
.gitmodules
vendored
2
.gitmodules
vendored
@ -6,7 +6,7 @@
|
|||||||
url = https://github.com/facebook/zstd
|
url = https://github.com/facebook/zstd
|
||||||
[submodule "contrib/lz4"]
|
[submodule "contrib/lz4"]
|
||||||
path = contrib/lz4
|
path = contrib/lz4
|
||||||
url = https://github.com/ClickHouse/lz4
|
url = https://github.com/lz4/lz4
|
||||||
[submodule "contrib/librdkafka"]
|
[submodule "contrib/librdkafka"]
|
||||||
path = contrib/librdkafka
|
path = contrib/librdkafka
|
||||||
url = https://github.com/ClickHouse/librdkafka
|
url = https://github.com/ClickHouse/librdkafka
|
||||||
|
119
CMakeLists.txt
119
CMakeLists.txt
@ -135,23 +135,21 @@ endif ()
|
|||||||
include (cmake/check_flags.cmake)
|
include (cmake/check_flags.cmake)
|
||||||
include (cmake/add_warning.cmake)
|
include (cmake/add_warning.cmake)
|
||||||
|
|
||||||
if (COMPILER_CLANG)
|
# generate ranges for fast "addr2line" search
|
||||||
# generate ranges for fast "addr2line" search
|
if (NOT CMAKE_BUILD_TYPE_UC STREQUAL "RELEASE")
|
||||||
if (NOT CMAKE_BUILD_TYPE_UC STREQUAL "RELEASE")
|
# NOTE: that clang has a bug because of it does not emit .debug_aranges
|
||||||
# NOTE: that clang has a bug because of it does not emit .debug_aranges
|
# with ThinLTO, so custom ld.lld wrapper is shipped in docker images.
|
||||||
# with ThinLTO, so custom ld.lld wrapper is shipped in docker images.
|
set(COMPILER_FLAGS "${COMPILER_FLAGS} -gdwarf-aranges")
|
||||||
set(COMPILER_FLAGS "${COMPILER_FLAGS} -gdwarf-aranges")
|
|
||||||
endif ()
|
|
||||||
|
|
||||||
# See https://blog.llvm.org/posts/2021-04-05-constructor-homing-for-debug-info/
|
|
||||||
if (CMAKE_BUILD_TYPE_UC STREQUAL "DEBUG" OR CMAKE_BUILD_TYPE_UC STREQUAL "RELWITHDEBINFO")
|
|
||||||
set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Xclang -fuse-ctor-homing")
|
|
||||||
set (CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -Xclang -fuse-ctor-homing")
|
|
||||||
endif()
|
|
||||||
|
|
||||||
no_warning(enum-constexpr-conversion) # breaks Protobuf in clang-16
|
|
||||||
endif ()
|
endif ()
|
||||||
|
|
||||||
|
# See https://blog.llvm.org/posts/2021-04-05-constructor-homing-for-debug-info/
|
||||||
|
if (CMAKE_BUILD_TYPE_UC STREQUAL "DEBUG" OR CMAKE_BUILD_TYPE_UC STREQUAL "RELWITHDEBINFO")
|
||||||
|
set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Xclang -fuse-ctor-homing")
|
||||||
|
set (CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -Xclang -fuse-ctor-homing")
|
||||||
|
endif()
|
||||||
|
|
||||||
|
no_warning(enum-constexpr-conversion) # breaks Protobuf in clang-16
|
||||||
|
|
||||||
option(ENABLE_TESTS "Provide unit_test_dbms target with Google.Test unit tests" ON)
|
option(ENABLE_TESTS "Provide unit_test_dbms target with Google.Test unit tests" ON)
|
||||||
option(ENABLE_EXAMPLES "Build all example programs in 'examples' subdirectories" OFF)
|
option(ENABLE_EXAMPLES "Build all example programs in 'examples' subdirectories" OFF)
|
||||||
option(ENABLE_BENCHMARKS "Build all benchmark programs in 'benchmarks' subdirectories" OFF)
|
option(ENABLE_BENCHMARKS "Build all benchmark programs in 'benchmarks' subdirectories" OFF)
|
||||||
@ -284,16 +282,12 @@ endif ()
|
|||||||
|
|
||||||
option (ENABLE_BUILD_PROFILING "Enable profiling of build time" OFF)
|
option (ENABLE_BUILD_PROFILING "Enable profiling of build time" OFF)
|
||||||
if (ENABLE_BUILD_PROFILING)
|
if (ENABLE_BUILD_PROFILING)
|
||||||
if (COMPILER_CLANG)
|
set (COMPILER_FLAGS "${COMPILER_FLAGS} -ftime-trace")
|
||||||
set (COMPILER_FLAGS "${COMPILER_FLAGS} -ftime-trace")
|
|
||||||
|
|
||||||
if (LINKER_NAME MATCHES "lld")
|
if (LINKER_NAME MATCHES "lld")
|
||||||
set (CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -Wl,--time-trace")
|
set (CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -Wl,--time-trace")
|
||||||
set (CMAKE_MODULE_LINKER_FLAGS "${CMAKE_MODULE_LINKER_FLAGS} -Wl,--time-trace")
|
set (CMAKE_MODULE_LINKER_FLAGS "${CMAKE_MODULE_LINKER_FLAGS} -Wl,--time-trace")
|
||||||
endif ()
|
endif ()
|
||||||
else ()
|
|
||||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Build profiling is only available with CLang")
|
|
||||||
endif ()
|
|
||||||
endif ()
|
endif ()
|
||||||
|
|
||||||
set (CMAKE_CXX_STANDARD 23)
|
set (CMAKE_CXX_STANDARD 23)
|
||||||
@ -304,22 +298,20 @@ set (CMAKE_C_STANDARD 11)
|
|||||||
set (CMAKE_C_EXTENSIONS ON) # required by most contribs written in C
|
set (CMAKE_C_EXTENSIONS ON) # required by most contribs written in C
|
||||||
set (CMAKE_C_STANDARD_REQUIRED ON)
|
set (CMAKE_C_STANDARD_REQUIRED ON)
|
||||||
|
|
||||||
if (COMPILER_CLANG)
|
# Enable C++14 sized global deallocation functions. It should be enabled by setting -std=c++14 but I'm not sure.
|
||||||
# Enable C++14 sized global deallocation functions. It should be enabled by setting -std=c++14 but I'm not sure.
|
# See https://reviews.llvm.org/D112921
|
||||||
# See https://reviews.llvm.org/D112921
|
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fsized-deallocation")
|
||||||
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fsized-deallocation")
|
|
||||||
|
|
||||||
# falign-functions=32 prevents from random performance regressions with the code change. Thus, providing more stable
|
# falign-functions=32 prevents from random performance regressions with the code change. Thus, providing more stable
|
||||||
# benchmarks.
|
# benchmarks.
|
||||||
set(COMPILER_FLAGS "${COMPILER_FLAGS} -falign-functions=32")
|
set(COMPILER_FLAGS "${COMPILER_FLAGS} -falign-functions=32")
|
||||||
|
|
||||||
if (ARCH_AMD64)
|
if (ARCH_AMD64)
|
||||||
# align branches within a 32-Byte boundary to avoid the potential performance loss when code layout change,
|
# align branches within a 32-Byte boundary to avoid the potential performance loss when code layout change,
|
||||||
# which makes benchmark results more stable.
|
# which makes benchmark results more stable.
|
||||||
set(BRANCHES_WITHIN_32B_BOUNDARIES "-mbranches-within-32B-boundaries")
|
set(BRANCHES_WITHIN_32B_BOUNDARIES "-mbranches-within-32B-boundaries")
|
||||||
set(COMPILER_FLAGS "${COMPILER_FLAGS} ${BRANCHES_WITHIN_32B_BOUNDARIES}")
|
set(COMPILER_FLAGS "${COMPILER_FLAGS} ${BRANCHES_WITHIN_32B_BOUNDARIES}")
|
||||||
endif()
|
endif()
|
||||||
endif ()
|
|
||||||
|
|
||||||
# Disable floating-point expression contraction in order to get consistent floating point calculation results across platforms
|
# Disable floating-point expression contraction in order to get consistent floating point calculation results across platforms
|
||||||
set (COMPILER_FLAGS "${COMPILER_FLAGS} -ffp-contract=off")
|
set (COMPILER_FLAGS "${COMPILER_FLAGS} -ffp-contract=off")
|
||||||
@ -348,39 +340,34 @@ set (CMAKE_ASM_FLAGS "${CMAKE_ASM_FLAGS} ${COMPILER_FLAGS} $
|
|||||||
set (CMAKE_ASM_FLAGS_RELWITHDEBINFO "${CMAKE_ASM_FLAGS_RELWITHDEBINFO} -O3 ${DEBUG_INFO_FLAGS} ${CMAKE_ASM_FLAGS_ADD}")
|
set (CMAKE_ASM_FLAGS_RELWITHDEBINFO "${CMAKE_ASM_FLAGS_RELWITHDEBINFO} -O3 ${DEBUG_INFO_FLAGS} ${CMAKE_ASM_FLAGS_ADD}")
|
||||||
set (CMAKE_ASM_FLAGS_DEBUG "${CMAKE_ASM_FLAGS_DEBUG} -O0 ${DEBUG_INFO_FLAGS} ${CMAKE_ASM_FLAGS_ADD}")
|
set (CMAKE_ASM_FLAGS_DEBUG "${CMAKE_ASM_FLAGS_DEBUG} -O0 ${DEBUG_INFO_FLAGS} ${CMAKE_ASM_FLAGS_ADD}")
|
||||||
|
|
||||||
if (COMPILER_CLANG)
|
if (OS_DARWIN)
|
||||||
if (OS_DARWIN)
|
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -stdlib=libc++")
|
||||||
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -stdlib=libc++")
|
set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -Wl,-U,_inside_main")
|
||||||
set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -Wl,-U,_inside_main")
|
endif()
|
||||||
endif()
|
|
||||||
|
|
||||||
# Display absolute paths in error messages. Otherwise KDevelop fails to navigate to correct file and opens a new file instead.
|
# Display absolute paths in error messages. Otherwise KDevelop fails to navigate to correct file and opens a new file instead.
|
||||||
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fdiagnostics-absolute-paths")
|
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fdiagnostics-absolute-paths")
|
||||||
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -fdiagnostics-absolute-paths")
|
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -fdiagnostics-absolute-paths")
|
||||||
|
|
||||||
if (NOT ENABLE_TESTS AND NOT SANITIZE AND NOT SANITIZE_COVERAGE AND OS_LINUX)
|
if (NOT ENABLE_TESTS AND NOT SANITIZE AND NOT SANITIZE_COVERAGE AND OS_LINUX)
|
||||||
# https://clang.llvm.org/docs/ThinLTO.html
|
# https://clang.llvm.org/docs/ThinLTO.html
|
||||||
# Applies to clang and linux only.
|
# Applies to clang and linux only.
|
||||||
# Disabled when building with tests or sanitizers.
|
# Disabled when building with tests or sanitizers.
|
||||||
option(ENABLE_THINLTO "Clang-specific link time optimization" ON)
|
option(ENABLE_THINLTO "Clang-specific link time optimization" ON)
|
||||||
endif()
|
endif()
|
||||||
|
|
||||||
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fstrict-vtable-pointers")
|
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fstrict-vtable-pointers")
|
||||||
|
|
||||||
# We cannot afford to use LTO when compiling unit tests, and it's not enough
|
|
||||||
# to only supply -fno-lto at the final linking stage. So we disable it
|
|
||||||
# completely.
|
|
||||||
if (ENABLE_THINLTO AND NOT ENABLE_TESTS AND NOT SANITIZE)
|
|
||||||
# Link time optimization
|
|
||||||
set (CMAKE_C_FLAGS_RELWITHDEBINFO "${CMAKE_C_FLAGS_RELWITHDEBINFO} -flto=thin -fwhole-program-vtables")
|
|
||||||
set (CMAKE_CXX_FLAGS_RELWITHDEBINFO "${CMAKE_CXX_FLAGS_RELWITHDEBINFO} -flto=thin -fwhole-program-vtables")
|
|
||||||
set (CMAKE_EXE_LINKER_FLAGS_RELWITHDEBINFO "${CMAKE_EXE_LINKER_FLAGS_RELWITHDEBINFO} -flto=thin -fwhole-program-vtables")
|
|
||||||
elseif (ENABLE_THINLTO)
|
|
||||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Cannot enable ThinLTO")
|
|
||||||
endif ()
|
|
||||||
|
|
||||||
|
# We cannot afford to use LTO when compiling unit tests, and it's not enough
|
||||||
|
# to only supply -fno-lto at the final linking stage. So we disable it
|
||||||
|
# completely.
|
||||||
|
if (ENABLE_THINLTO AND NOT ENABLE_TESTS AND NOT SANITIZE)
|
||||||
|
# Link time optimization
|
||||||
|
set (CMAKE_C_FLAGS_RELWITHDEBINFO "${CMAKE_C_FLAGS_RELWITHDEBINFO} -flto=thin -fwhole-program-vtables")
|
||||||
|
set (CMAKE_CXX_FLAGS_RELWITHDEBINFO "${CMAKE_CXX_FLAGS_RELWITHDEBINFO} -flto=thin -fwhole-program-vtables")
|
||||||
|
set (CMAKE_EXE_LINKER_FLAGS_RELWITHDEBINFO "${CMAKE_EXE_LINKER_FLAGS_RELWITHDEBINFO} -flto=thin -fwhole-program-vtables")
|
||||||
elseif (ENABLE_THINLTO)
|
elseif (ENABLE_THINLTO)
|
||||||
message (${RECONFIGURE_MESSAGE_LEVEL} "ThinLTO is only available with Clang")
|
message (${RECONFIGURE_MESSAGE_LEVEL} "Cannot enable ThinLTO")
|
||||||
endif ()
|
endif ()
|
||||||
|
|
||||||
# Turns on all external libs like s3, kafka, ODBC, ...
|
# Turns on all external libs like s3, kafka, ODBC, ...
|
||||||
|
@ -40,7 +40,6 @@ Every month we get together with the community (users, contributors, customers,
|
|||||||
|
|
||||||
Keep an eye out for upcoming meetups and events around the world. Somewhere else you want us to be? Please feel free to reach out to tyler `<at>` clickhouse `<dot>` com. You can also peruse [ClickHouse Events](https://clickhouse.com/company/news-events) for a list of all upcoming trainings, meetups, speaking engagements, etc.
|
Keep an eye out for upcoming meetups and events around the world. Somewhere else you want us to be? Please feel free to reach out to tyler `<at>` clickhouse `<dot>` com. You can also peruse [ClickHouse Events](https://clickhouse.com/company/news-events) for a list of all upcoming trainings, meetups, speaking engagements, etc.
|
||||||
|
|
||||||
* [ClickHouse Meetup in Bengaluru](https://www.meetup.com/clickhouse-bangalore-user-group/events/300405581/) - May 4
|
|
||||||
* [ClickHouse Happy Hour @ Tom's Watch Bar - Los Angeles](https://www.meetup.com/clickhouse-los-angeles-user-group/events/300740584/) - May 22
|
* [ClickHouse Happy Hour @ Tom's Watch Bar - Los Angeles](https://www.meetup.com/clickhouse-los-angeles-user-group/events/300740584/) - May 22
|
||||||
* [ClickHouse & Confluent Meetup in Dubai](https://www.meetup.com/clickhouse-dubai-meetup-group/events/299629189/) - May 28
|
* [ClickHouse & Confluent Meetup in Dubai](https://www.meetup.com/clickhouse-dubai-meetup-group/events/299629189/) - May 28
|
||||||
* [ClickHouse Meetup in Stockholm](https://www.meetup.com/clickhouse-stockholm-user-group/events/299752651/) - Jun 3
|
* [ClickHouse Meetup in Stockholm](https://www.meetup.com/clickhouse-stockholm-user-group/events/299752651/) - Jun 3
|
||||||
@ -49,6 +48,7 @@ Keep an eye out for upcoming meetups and events around the world. Somewhere else
|
|||||||
* [ClickHouse Meetup in Amsterdam](https://www.meetup.com/clickhouse-netherlands-user-group/events/300781068/) - Jun 27
|
* [ClickHouse Meetup in Amsterdam](https://www.meetup.com/clickhouse-netherlands-user-group/events/300781068/) - Jun 27
|
||||||
* [ClickHouse Meetup in Paris](https://www.meetup.com/clickhouse-france-user-group/events/300783448/) - Jul 9
|
* [ClickHouse Meetup in Paris](https://www.meetup.com/clickhouse-france-user-group/events/300783448/) - Jul 9
|
||||||
* [ClickHouse Meetup @ Ramp - New York City](https://www.meetup.com/clickhouse-new-york-user-group/events/300595845/) - Jul 9
|
* [ClickHouse Meetup @ Ramp - New York City](https://www.meetup.com/clickhouse-new-york-user-group/events/300595845/) - Jul 9
|
||||||
|
* [ClickHouse Meetup @ Klaviyo - Boston](https://www.meetup.com/clickhouse-boston-user-group/events/300907870) - Jul 11
|
||||||
|
|
||||||
## Recent Recordings
|
## Recent Recordings
|
||||||
* **Recent Meetup Videos**: [Meetup Playlist](https://www.youtube.com/playlist?list=PL0Z2YDlm0b3iNDUzpY1S3L_iV4nARda_U) Whenever possible recordings of the ClickHouse Community Meetups are edited and presented as individual talks. Current featuring "Modern SQL in 2023", "Fast, Concurrent, and Consistent Asynchronous INSERTS in ClickHouse", and "Full-Text Indices: Design and Experiments"
|
* **Recent Meetup Videos**: [Meetup Playlist](https://www.youtube.com/playlist?list=PL0Z2YDlm0b3iNDUzpY1S3L_iV4nARda_U) Whenever possible recordings of the ClickHouse Community Meetups are edited and presented as individual talks. Current featuring "Modern SQL in 2023", "Fast, Concurrent, and Consistent Asynchronous INSERTS in ClickHouse", and "Full-Text Indices: Design and Experiments"
|
||||||
|
@ -51,11 +51,9 @@ struct DecomposedFloat
|
|||||||
/// Returns 0 for both +0. and -0.
|
/// Returns 0 for both +0. and -0.
|
||||||
int sign() const
|
int sign() const
|
||||||
{
|
{
|
||||||
return (exponent() == 0 && mantissa() == 0)
|
if (exponent() == 0 && mantissa() == 0)
|
||||||
? 0
|
return 0;
|
||||||
: (isNegative()
|
return isNegative() ? -1 : 1;
|
||||||
? -1
|
|
||||||
: 1);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
uint16_t exponent() const
|
uint16_t exponent() const
|
||||||
|
@ -11,7 +11,7 @@ namespace detail
|
|||||||
template <is_enum E, class F, size_t ...I>
|
template <is_enum E, class F, size_t ...I>
|
||||||
constexpr void static_for(F && f, std::index_sequence<I...>)
|
constexpr void static_for(F && f, std::index_sequence<I...>)
|
||||||
{
|
{
|
||||||
(std::forward<F>(f)(std::integral_constant<E, magic_enum::enum_value<E>(I)>()) , ...);
|
(f(std::integral_constant<E, magic_enum::enum_value<E>(I)>()) , ...);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -651,7 +651,9 @@ std::string_view JSON::getRawString() const
|
|||||||
Pos s = ptr_begin;
|
Pos s = ptr_begin;
|
||||||
if (*s != '"')
|
if (*s != '"')
|
||||||
throw JSONException(std::string("JSON: expected \", got ") + *s);
|
throw JSONException(std::string("JSON: expected \", got ") + *s);
|
||||||
while (++s != ptr_end && *s != '"');
|
++s;
|
||||||
|
while (s != ptr_end && *s != '"')
|
||||||
|
++s;
|
||||||
if (s != ptr_end)
|
if (s != ptr_end)
|
||||||
return std::string_view(ptr_begin + 1, s - ptr_begin - 1);
|
return std::string_view(ptr_begin + 1, s - ptr_begin - 1);
|
||||||
throw JSONException("JSON: incorrect syntax (expected end of string, found end of JSON).");
|
throw JSONException("JSON: incorrect syntax (expected end of string, found end of JSON).");
|
||||||
|
@ -74,7 +74,7 @@ public:
|
|||||||
const char * data() const { return ptr_begin; }
|
const char * data() const { return ptr_begin; }
|
||||||
const char * dataEnd() const { return ptr_end; }
|
const char * dataEnd() const { return ptr_end; }
|
||||||
|
|
||||||
enum ElementType
|
enum ElementType : uint8_t
|
||||||
{
|
{
|
||||||
TYPE_OBJECT,
|
TYPE_OBJECT,
|
||||||
TYPE_ARRAY,
|
TYPE_ARRAY,
|
||||||
|
@ -27,7 +27,7 @@ namespace TypeListUtils /// In some contexts it's more handy to use functions in
|
|||||||
constexpr Root<Args...> changeRoot(TypeList<Args...>) { return {}; }
|
constexpr Root<Args...> changeRoot(TypeList<Args...>) { return {}; }
|
||||||
|
|
||||||
template <typename F, typename ...Args>
|
template <typename F, typename ...Args>
|
||||||
constexpr void forEach(TypeList<Args...>, F && f) { (std::forward<F>(f)(TypeList<Args>{}), ...); }
|
constexpr void forEach(TypeList<Args...>, F && f) { (f(TypeList<Args>{}), ...); }
|
||||||
}
|
}
|
||||||
|
|
||||||
template <typename TypeListLeft, typename TypeListRight>
|
template <typename TypeListLeft, typename TypeListRight>
|
||||||
|
@ -21,7 +21,7 @@ bool func_wrapper(Func && func, Arg && arg)
|
|||||||
template <typename T, T Begin, typename Func, T... Is>
|
template <typename T, T Begin, typename Func, T... Is>
|
||||||
constexpr bool static_for_impl(Func && f, std::integer_sequence<T, Is...>)
|
constexpr bool static_for_impl(Func && f, std::integer_sequence<T, Is...>)
|
||||||
{
|
{
|
||||||
return (func_wrapper(std::forward<Func>(f), std::integral_constant<T, Begin + Is>{}) || ...);
|
return (func_wrapper(f, std::integral_constant<T, Begin + Is>{}) || ...);
|
||||||
}
|
}
|
||||||
|
|
||||||
template <auto Begin, decltype(Begin) End, typename Func>
|
template <auto Begin, decltype(Begin) End, typename Func>
|
||||||
|
@ -147,7 +147,7 @@ constexpr uint16_t maybe_negate(uint16_t x)
|
|||||||
return ~x;
|
return ~x;
|
||||||
}
|
}
|
||||||
|
|
||||||
enum class ReturnMode
|
enum class ReturnMode : uint8_t
|
||||||
{
|
{
|
||||||
End,
|
End,
|
||||||
Nullptr,
|
Nullptr,
|
||||||
|
@ -77,8 +77,7 @@ uint64_t getMemoryAmountOrZero()
|
|||||||
{
|
{
|
||||||
uint64_t limit_v1;
|
uint64_t limit_v1;
|
||||||
if (limit_file_v1 >> limit_v1)
|
if (limit_file_v1 >> limit_v1)
|
||||||
if (limit_v1 < memory_amount)
|
memory_amount = std::min(memory_amount, limit_v1);
|
||||||
memory_amount = limit_v1;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -146,7 +146,7 @@ namespace impl
|
|||||||
TUInt res;
|
TUInt res;
|
||||||
if constexpr (sizeof(TUInt) == 1)
|
if constexpr (sizeof(TUInt) == 1)
|
||||||
{
|
{
|
||||||
res = static_cast<UInt8>(unhexDigit(data[0])) * 0x10 + static_cast<UInt8>(unhexDigit(data[1]));
|
res = unhexDigit(data[0]) * 0x10 + unhexDigit(data[1]);
|
||||||
}
|
}
|
||||||
else if constexpr (sizeof(TUInt) == 2)
|
else if constexpr (sizeof(TUInt) == 2)
|
||||||
{
|
{
|
||||||
@ -176,17 +176,19 @@ namespace impl
|
|||||||
};
|
};
|
||||||
|
|
||||||
/// Helper template class to convert a value of any supported type to hexadecimal representation and back.
|
/// Helper template class to convert a value of any supported type to hexadecimal representation and back.
|
||||||
template <typename T, typename SFINAE = void>
|
template <typename T>
|
||||||
struct HexConversion;
|
struct HexConversion;
|
||||||
|
|
||||||
template <typename TUInt>
|
template <typename TUInt>
|
||||||
struct HexConversion<TUInt, std::enable_if_t<std::is_integral_v<TUInt>>> : public HexConversionUInt<TUInt> {};
|
requires(std::is_integral_v<TUInt>)
|
||||||
|
struct HexConversion<TUInt> : public HexConversionUInt<TUInt> {};
|
||||||
|
|
||||||
template <size_t Bits, typename Signed>
|
template <size_t Bits, typename Signed>
|
||||||
struct HexConversion<wide::integer<Bits, Signed>> : public HexConversionUInt<wide::integer<Bits, Signed>> {};
|
struct HexConversion<wide::integer<Bits, Signed>> : public HexConversionUInt<wide::integer<Bits, Signed>> {};
|
||||||
|
|
||||||
template <typename CityHashUInt128> /// Partial specialization here allows not to include <city.h> in this header.
|
template <typename CityHashUInt128> /// Partial specialization here allows not to include <city.h> in this header.
|
||||||
struct HexConversion<CityHashUInt128, std::enable_if_t<std::is_same_v<CityHashUInt128, typename CityHash_v1_0_2::uint128>>>
|
requires(std::is_same_v<CityHashUInt128, typename CityHash_v1_0_2::uint128>)
|
||||||
|
struct HexConversion<CityHashUInt128>
|
||||||
{
|
{
|
||||||
static const constexpr size_t num_hex_digits = 32;
|
static const constexpr size_t num_hex_digits = 32;
|
||||||
|
|
||||||
|
@ -20,24 +20,26 @@ Out & dumpValue(Out &, T &&);
|
|||||||
|
|
||||||
/// Catch-all case.
|
/// Catch-all case.
|
||||||
template <int priority, typename Out, typename T>
|
template <int priority, typename Out, typename T>
|
||||||
std::enable_if_t<priority == -1, Out> & dumpImpl(Out & out, T &&) // NOLINT(cppcoreguidelines-missing-std-forward)
|
requires(priority == -1)
|
||||||
|
Out & dumpImpl(Out & out, T &&) // NOLINT(cppcoreguidelines-missing-std-forward)
|
||||||
{
|
{
|
||||||
return out << "{...}";
|
return out << "{...}";
|
||||||
}
|
}
|
||||||
|
|
||||||
/// An object, that could be output with operator <<.
|
/// An object, that could be output with operator <<.
|
||||||
template <int priority, typename Out, typename T>
|
template <int priority, typename Out, typename T>
|
||||||
std::enable_if_t<priority == 0, Out> & dumpImpl(Out & out, T && x, std::decay_t<decltype(std::declval<Out &>() << std::declval<T>())> * = nullptr) // NOLINT(cppcoreguidelines-missing-std-forward)
|
requires(priority == 0)
|
||||||
|
Out & dumpImpl(Out & out, T && x, std::decay_t<decltype(std::declval<Out &>() << std::declval<T>())> * = nullptr) // NOLINT(cppcoreguidelines-missing-std-forward)
|
||||||
{
|
{
|
||||||
return out << x;
|
return out << x;
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A pointer-like object.
|
/// A pointer-like object.
|
||||||
template <int priority, typename Out, typename T>
|
template <int priority, typename Out, typename T>
|
||||||
std::enable_if_t<priority == 1
|
requires(priority == 1
|
||||||
/// Protect from the case when operator * do effectively nothing (function pointer).
|
/// Protect from the case when operator * do effectively nothing (function pointer).
|
||||||
&& !std::is_same_v<std::decay_t<T>, std::decay_t<decltype(*std::declval<T>())>>
|
&& !std::is_same_v<std::decay_t<T>, std::decay_t<decltype(*std::declval<T>())>>)
|
||||||
, Out> & dumpImpl(Out & out, T && x, std::decay_t<decltype(*std::declval<T>())> * = nullptr) // NOLINT(cppcoreguidelines-missing-std-forward)
|
Out & dumpImpl(Out & out, T && x, std::decay_t<decltype(*std::declval<T>())> * = nullptr) // NOLINT(cppcoreguidelines-missing-std-forward)
|
||||||
{
|
{
|
||||||
if (!x)
|
if (!x)
|
||||||
return out << "nullptr";
|
return out << "nullptr";
|
||||||
@ -46,7 +48,8 @@ std::enable_if_t<priority == 1
|
|||||||
|
|
||||||
/// Container.
|
/// Container.
|
||||||
template <int priority, typename Out, typename T>
|
template <int priority, typename Out, typename T>
|
||||||
std::enable_if_t<priority == 2, Out> & dumpImpl(Out & out, T && x, std::decay_t<decltype(std::begin(std::declval<T>()))> * = nullptr) // NOLINT(cppcoreguidelines-missing-std-forward)
|
requires(priority == 2)
|
||||||
|
Out & dumpImpl(Out & out, T && x, std::decay_t<decltype(std::begin(std::declval<T>()))> * = nullptr) // NOLINT(cppcoreguidelines-missing-std-forward)
|
||||||
{
|
{
|
||||||
bool first = true;
|
bool first = true;
|
||||||
out << "{";
|
out << "{";
|
||||||
@ -63,8 +66,8 @@ std::enable_if_t<priority == 2, Out> & dumpImpl(Out & out, T && x, std::decay_t<
|
|||||||
|
|
||||||
|
|
||||||
template <int priority, typename Out, typename T>
|
template <int priority, typename Out, typename T>
|
||||||
std::enable_if_t<priority == 3 && std::is_enum_v<std::decay_t<T>>, Out> &
|
requires(priority == 3 && std::is_enum_v<std::decay_t<T>>)
|
||||||
dumpImpl(Out & out, T && x) // NOLINT(cppcoreguidelines-missing-std-forward)
|
Out & dumpImpl(Out & out, T && x) // NOLINT(cppcoreguidelines-missing-std-forward)
|
||||||
{
|
{
|
||||||
return out << magic_enum::enum_name(x);
|
return out << magic_enum::enum_name(x);
|
||||||
}
|
}
|
||||||
@ -72,8 +75,8 @@ dumpImpl(Out & out, T && x) // NOLINT(cppcoreguidelines-missing-std-forward)
|
|||||||
/// string and const char * - output not as container or pointer.
|
/// string and const char * - output not as container or pointer.
|
||||||
|
|
||||||
template <int priority, typename Out, typename T>
|
template <int priority, typename Out, typename T>
|
||||||
std::enable_if_t<priority == 3 && (std::is_same_v<std::decay_t<T>, std::string> || std::is_same_v<std::decay_t<T>, const char *>), Out> &
|
requires(priority == 3 && (std::is_same_v<std::decay_t<T>, std::string> || std::is_same_v<std::decay_t<T>, const char *>))
|
||||||
dumpImpl(Out & out, T && x) // NOLINT(cppcoreguidelines-missing-std-forward)
|
Out & dumpImpl(Out & out, T && x) // NOLINT(cppcoreguidelines-missing-std-forward)
|
||||||
{
|
{
|
||||||
return out << std::quoted(x);
|
return out << std::quoted(x);
|
||||||
}
|
}
|
||||||
@ -81,8 +84,8 @@ dumpImpl(Out & out, T && x) // NOLINT(cppcoreguidelines-missing-std-forward)
|
|||||||
/// UInt8 - output as number, not char.
|
/// UInt8 - output as number, not char.
|
||||||
|
|
||||||
template <int priority, typename Out, typename T>
|
template <int priority, typename Out, typename T>
|
||||||
std::enable_if_t<priority == 3 && std::is_same_v<std::decay_t<T>, unsigned char>, Out> &
|
requires(priority == 3 && std::is_same_v<std::decay_t<T>, unsigned char>)
|
||||||
dumpImpl(Out & out, T && x) // NOLINT(cppcoreguidelines-missing-std-forward)
|
Out & dumpImpl(Out & out, T && x) // NOLINT(cppcoreguidelines-missing-std-forward)
|
||||||
{
|
{
|
||||||
return out << int(x);
|
return out << int(x);
|
||||||
}
|
}
|
||||||
@ -108,7 +111,8 @@ Out & dumpTupleImpl(Out & out, T && x) // NOLINT(cppcoreguidelines-missing-std-f
|
|||||||
}
|
}
|
||||||
|
|
||||||
template <int priority, typename Out, typename T>
|
template <int priority, typename Out, typename T>
|
||||||
std::enable_if_t<priority == 4, Out> & dumpImpl(Out & out, T && x, std::decay_t<decltype(std::get<0>(std::declval<T>()))> * = nullptr) // NOLINT(cppcoreguidelines-missing-std-forward)
|
requires(priority == 4)
|
||||||
|
Out & dumpImpl(Out & out, T && x, std::decay_t<decltype(std::get<0>(std::declval<T>()))> * = nullptr) // NOLINT(cppcoreguidelines-missing-std-forward)
|
||||||
{
|
{
|
||||||
return dumpTupleImpl<0>(out, x);
|
return dumpTupleImpl<0>(out, x);
|
||||||
}
|
}
|
||||||
|
@ -250,14 +250,16 @@ ALWAYS_INLINE inline char * uitoa<UnsignedOfSize<1>, 1>(char * p, UnsignedOfSize
|
|||||||
//===----------------------------------------------------------===//
|
//===----------------------------------------------------------===//
|
||||||
|
|
||||||
// itoa: handle unsigned integral operands (selected by SFINAE)
|
// itoa: handle unsigned integral operands (selected by SFINAE)
|
||||||
template <typename U, std::enable_if_t<!std::is_signed_v<U> && std::is_integral_v<U>> * = nullptr>
|
template <typename U>
|
||||||
|
requires(!std::is_signed_v<U> && std::is_integral_v<U>)
|
||||||
ALWAYS_INLINE inline char * itoa(U u, char * p)
|
ALWAYS_INLINE inline char * itoa(U u, char * p)
|
||||||
{
|
{
|
||||||
return convert::uitoa(p, u);
|
return convert::uitoa(p, u);
|
||||||
}
|
}
|
||||||
|
|
||||||
// itoa: handle signed integral operands (selected by SFINAE)
|
// itoa: handle signed integral operands (selected by SFINAE)
|
||||||
template <typename I, size_t N = sizeof(I), std::enable_if_t<std::is_signed_v<I> && std::is_integral_v<I>> * = nullptr>
|
template <typename I, size_t N = sizeof(I)>
|
||||||
|
requires(std::is_signed_v<I> && std::is_integral_v<I>)
|
||||||
ALWAYS_INLINE inline char * itoa(I i, char * p)
|
ALWAYS_INLINE inline char * itoa(I i, char * p)
|
||||||
{
|
{
|
||||||
// Need "mask" to be filled with a copy of the sign bit.
|
// Need "mask" to be filled with a copy of the sign bit.
|
||||||
|
@ -19,8 +19,8 @@ auto map(const Collection<Params...> & collection, Mapper && mapper)
|
|||||||
using value_type = unqualified_t<decltype(mapper(*std::begin(collection)))>;
|
using value_type = unqualified_t<decltype(mapper(*std::begin(collection)))>;
|
||||||
|
|
||||||
return Collection<value_type>(
|
return Collection<value_type>(
|
||||||
boost::make_transform_iterator(std::begin(collection), std::forward<Mapper>(mapper)),
|
boost::make_transform_iterator(std::begin(collection), mapper),
|
||||||
boost::make_transform_iterator(std::end(collection), std::forward<Mapper>(mapper)));
|
boost::make_transform_iterator(std::end(collection), mapper));
|
||||||
}
|
}
|
||||||
|
|
||||||
/** \brief Returns collection of specified container-type,
|
/** \brief Returns collection of specified container-type,
|
||||||
@ -33,8 +33,8 @@ auto map(const Collection & collection, Mapper && mapper)
|
|||||||
using value_type = unqualified_t<decltype(mapper(*std::begin(collection)))>;
|
using value_type = unqualified_t<decltype(mapper(*std::begin(collection)))>;
|
||||||
|
|
||||||
return ResultCollection<value_type>(
|
return ResultCollection<value_type>(
|
||||||
boost::make_transform_iterator(std::begin(collection), std::forward<Mapper>(mapper)),
|
boost::make_transform_iterator(std::begin(collection), mapper),
|
||||||
boost::make_transform_iterator(std::end(collection), std::forward<Mapper>(mapper)));
|
boost::make_transform_iterator(std::end(collection), mapper));
|
||||||
}
|
}
|
||||||
|
|
||||||
/** \brief Returns collection of specified type,
|
/** \brief Returns collection of specified type,
|
||||||
@ -45,8 +45,8 @@ template <typename ResultCollection, typename Collection, typename Mapper>
|
|||||||
auto map(const Collection & collection, Mapper && mapper)
|
auto map(const Collection & collection, Mapper && mapper)
|
||||||
{
|
{
|
||||||
return ResultCollection(
|
return ResultCollection(
|
||||||
boost::make_transform_iterator(std::begin(collection), std::forward<Mapper>(mapper)),
|
boost::make_transform_iterator(std::begin(collection), mapper),
|
||||||
boost::make_transform_iterator(std::end(collection), std::forward<Mapper>(mapper)));
|
boost::make_transform_iterator(std::end(collection), mapper));
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -23,12 +23,10 @@ namespace internal
|
|||||||
|
|
||||||
/// For loop adaptor which is used to iterate through a half-closed interval [begin, end).
|
/// For loop adaptor which is used to iterate through a half-closed interval [begin, end).
|
||||||
/// The parameters `begin` and `end` can have any integral or enum types.
|
/// The parameters `begin` and `end` can have any integral or enum types.
|
||||||
template <typename BeginType,
|
template <typename BeginType, typename EndType>
|
||||||
typename EndType,
|
requires((std::is_integral_v<BeginType> || std::is_enum_v<BeginType>) &&
|
||||||
typename = std::enable_if_t<
|
(std::is_integral_v<EndType> || std::is_enum_v<EndType>) &&
|
||||||
(std::is_integral_v<BeginType> || std::is_enum_v<BeginType>) &&
|
(!std::is_enum_v<BeginType> || !std::is_enum_v<EndType> || std::is_same_v<BeginType, EndType>))
|
||||||
(std::is_integral_v<EndType> || std::is_enum_v<EndType>) &&
|
|
||||||
(!std::is_enum_v<BeginType> || !std::is_enum_v<EndType> || std::is_same_v<BeginType, EndType>), void>>
|
|
||||||
inline auto range(BeginType begin, EndType end)
|
inline auto range(BeginType begin, EndType end)
|
||||||
{
|
{
|
||||||
if constexpr (std::is_integral_v<BeginType> && std::is_integral_v<EndType>)
|
if constexpr (std::is_integral_v<BeginType> && std::is_integral_v<EndType>)
|
||||||
@ -50,8 +48,8 @@ inline auto range(BeginType begin, EndType end)
|
|||||||
/// For loop adaptor which is used to iterate through a half-closed interval [0, end).
|
/// For loop adaptor which is used to iterate through a half-closed interval [0, end).
|
||||||
/// The parameter `end` can have any integral or enum type.
|
/// The parameter `end` can have any integral or enum type.
|
||||||
/// The same as range(0, end).
|
/// The same as range(0, end).
|
||||||
template <typename Type,
|
template <typename Type>
|
||||||
typename = std::enable_if_t<std::is_integral_v<Type> || std::is_enum_v<Type>, void>>
|
requires(std::is_integral_v<Type> || std::is_enum_v<Type>)
|
||||||
inline auto range(Type end)
|
inline auto range(Type end)
|
||||||
{
|
{
|
||||||
if constexpr (std::is_integral_v<Type>)
|
if constexpr (std::is_integral_v<Type>)
|
||||||
|
@ -2,6 +2,7 @@
|
|||||||
|
|
||||||
#include <ctime>
|
#include <ctime>
|
||||||
#include <cerrno>
|
#include <cerrno>
|
||||||
|
#include <system_error>
|
||||||
|
|
||||||
#if defined(OS_DARWIN)
|
#if defined(OS_DARWIN)
|
||||||
#include <mach/mach.h>
|
#include <mach/mach.h>
|
||||||
@ -34,7 +35,8 @@ void sleepForNanoseconds(uint64_t nanoseconds)
|
|||||||
constexpr auto clock_type = CLOCK_MONOTONIC;
|
constexpr auto clock_type = CLOCK_MONOTONIC;
|
||||||
|
|
||||||
struct timespec current_time;
|
struct timespec current_time;
|
||||||
clock_gettime(clock_type, ¤t_time);
|
if (0 != clock_gettime(clock_type, ¤t_time))
|
||||||
|
throw std::system_error(std::error_code(errno, std::system_category()));
|
||||||
|
|
||||||
constexpr uint64_t resolution = 1'000'000'000;
|
constexpr uint64_t resolution = 1'000'000'000;
|
||||||
struct timespec finish_time = current_time;
|
struct timespec finish_time = current_time;
|
||||||
|
@ -111,7 +111,8 @@ public:
|
|||||||
|
|
||||||
constexpr explicit operator bool() const noexcept;
|
constexpr explicit operator bool() const noexcept;
|
||||||
|
|
||||||
template <typename T, typename = std::enable_if_t<std::is_arithmetic_v<T>, T>>
|
template <typename T>
|
||||||
|
requires(std::is_arithmetic_v<T>)
|
||||||
constexpr operator T() const noexcept;
|
constexpr operator T() const noexcept;
|
||||||
|
|
||||||
constexpr operator long double() const noexcept;
|
constexpr operator long double() const noexcept;
|
||||||
@ -208,12 +209,14 @@ constexpr integer<Bits, Signed> operator<<(const integer<Bits, Signed> & lhs, in
|
|||||||
template <size_t Bits, typename Signed>
|
template <size_t Bits, typename Signed>
|
||||||
constexpr integer<Bits, Signed> operator>>(const integer<Bits, Signed> & lhs, int n) noexcept;
|
constexpr integer<Bits, Signed> operator>>(const integer<Bits, Signed> & lhs, int n) noexcept;
|
||||||
|
|
||||||
template <size_t Bits, typename Signed, typename Int, typename = std::enable_if_t<!std::is_same_v<Int, int>>>
|
template <size_t Bits, typename Signed, typename Int>
|
||||||
|
requires(!std::is_same_v<Int, int>)
|
||||||
constexpr integer<Bits, Signed> operator<<(const integer<Bits, Signed> & lhs, Int n) noexcept
|
constexpr integer<Bits, Signed> operator<<(const integer<Bits, Signed> & lhs, Int n) noexcept
|
||||||
{
|
{
|
||||||
return lhs << int(n);
|
return lhs << int(n);
|
||||||
}
|
}
|
||||||
template <size_t Bits, typename Signed, typename Int, typename = std::enable_if_t<!std::is_same_v<Int, int>>>
|
template <size_t Bits, typename Signed, typename Int>
|
||||||
|
requires(!std::is_same_v<Int, int>)
|
||||||
constexpr integer<Bits, Signed> operator>>(const integer<Bits, Signed> & lhs, Int n) noexcept
|
constexpr integer<Bits, Signed> operator>>(const integer<Bits, Signed> & lhs, Int n) noexcept
|
||||||
{
|
{
|
||||||
return lhs >> int(n);
|
return lhs >> int(n);
|
||||||
@ -262,4 +265,3 @@ struct hash<wide::integer<Bits, Signed>>;
|
|||||||
// NOLINTEND(*)
|
// NOLINTEND(*)
|
||||||
|
|
||||||
#include "wide_integer_impl.h"
|
#include "wide_integer_impl.h"
|
||||||
|
|
||||||
|
@ -1246,7 +1246,8 @@ constexpr integer<Bits, Signed>::operator bool() const noexcept
|
|||||||
}
|
}
|
||||||
|
|
||||||
template <size_t Bits, typename Signed>
|
template <size_t Bits, typename Signed>
|
||||||
template <class T, class>
|
template <class T>
|
||||||
|
requires(std::is_arithmetic_v<T>)
|
||||||
constexpr integer<Bits, Signed>::operator T() const noexcept
|
constexpr integer<Bits, Signed>::operator T() const noexcept
|
||||||
{
|
{
|
||||||
static_assert(std::numeric_limits<T>::is_integer);
|
static_assert(std::numeric_limits<T>::is_integer);
|
||||||
|
@ -5,14 +5,14 @@ if (ENABLE_CLANG_TIDY)
|
|||||||
|
|
||||||
find_program (CLANG_TIDY_CACHE_PATH NAMES "clang-tidy-cache")
|
find_program (CLANG_TIDY_CACHE_PATH NAMES "clang-tidy-cache")
|
||||||
if (CLANG_TIDY_CACHE_PATH)
|
if (CLANG_TIDY_CACHE_PATH)
|
||||||
find_program (_CLANG_TIDY_PATH NAMES "clang-tidy-17" "clang-tidy-16" "clang-tidy")
|
find_program (_CLANG_TIDY_PATH NAMES "clang-tidy-18" "clang-tidy-17" "clang-tidy-16" "clang-tidy")
|
||||||
|
|
||||||
# Why do we use ';' here?
|
# Why do we use ';' here?
|
||||||
# It's a cmake black magic: https://cmake.org/cmake/help/latest/prop_tgt/LANG_CLANG_TIDY.html#prop_tgt:%3CLANG%3E_CLANG_TIDY
|
# It's a cmake black magic: https://cmake.org/cmake/help/latest/prop_tgt/LANG_CLANG_TIDY.html#prop_tgt:%3CLANG%3E_CLANG_TIDY
|
||||||
# The CLANG_TIDY_PATH is passed to CMAKE_CXX_CLANG_TIDY, which follows CXX_CLANG_TIDY syntax.
|
# The CLANG_TIDY_PATH is passed to CMAKE_CXX_CLANG_TIDY, which follows CXX_CLANG_TIDY syntax.
|
||||||
set (CLANG_TIDY_PATH "${CLANG_TIDY_CACHE_PATH};${_CLANG_TIDY_PATH}" CACHE STRING "A combined command to run clang-tidy with caching wrapper")
|
set (CLANG_TIDY_PATH "${CLANG_TIDY_CACHE_PATH};${_CLANG_TIDY_PATH}" CACHE STRING "A combined command to run clang-tidy with caching wrapper")
|
||||||
else ()
|
else ()
|
||||||
find_program (CLANG_TIDY_PATH NAMES "clang-tidy-17" "clang-tidy-16" "clang-tidy")
|
find_program (CLANG_TIDY_PATH NAMES "clang-tidy-18" "clang-tidy-17" "clang-tidy-16" "clang-tidy")
|
||||||
endif ()
|
endif ()
|
||||||
|
|
||||||
if (CLANG_TIDY_PATH)
|
if (CLANG_TIDY_PATH)
|
||||||
|
@ -5,17 +5,15 @@ set (DEFAULT_LIBS "-nodefaultlibs")
|
|||||||
|
|
||||||
# We need builtins from Clang's RT even without libcxx - for ubsan+int128.
|
# We need builtins from Clang's RT even without libcxx - for ubsan+int128.
|
||||||
# See https://bugs.llvm.org/show_bug.cgi?id=16404
|
# See https://bugs.llvm.org/show_bug.cgi?id=16404
|
||||||
if (COMPILER_CLANG)
|
execute_process (COMMAND ${CMAKE_CXX_COMPILER} --target=${CMAKE_CXX_COMPILER_TARGET} --print-libgcc-file-name --rtlib=compiler-rt OUTPUT_VARIABLE BUILTINS_LIBRARY OUTPUT_STRIP_TRAILING_WHITESPACE)
|
||||||
execute_process (COMMAND ${CMAKE_CXX_COMPILER} --target=${CMAKE_CXX_COMPILER_TARGET} --print-libgcc-file-name --rtlib=compiler-rt OUTPUT_VARIABLE BUILTINS_LIBRARY OUTPUT_STRIP_TRAILING_WHITESPACE)
|
|
||||||
|
|
||||||
# Apparently, in clang-19, the UBSan support library for C++ was moved out into ubsan_standalone_cxx.a, so we have to include both.
|
# Apparently, in clang-19, the UBSan support library for C++ was moved out into ubsan_standalone_cxx.a, so we have to include both.
|
||||||
if (SANITIZE STREQUAL undefined)
|
if (SANITIZE STREQUAL undefined)
|
||||||
string(REPLACE "builtins.a" "ubsan_standalone_cxx.a" EXTRA_BUILTINS_LIBRARY "${BUILTINS_LIBRARY}")
|
string(REPLACE "builtins.a" "ubsan_standalone_cxx.a" EXTRA_BUILTINS_LIBRARY "${BUILTINS_LIBRARY}")
|
||||||
endif ()
|
endif ()
|
||||||
|
|
||||||
if (NOT EXISTS "${BUILTINS_LIBRARY}")
|
if (NOT EXISTS "${BUILTINS_LIBRARY}")
|
||||||
set (BUILTINS_LIBRARY "-lgcc")
|
set (BUILTINS_LIBRARY "-lgcc")
|
||||||
endif ()
|
|
||||||
endif ()
|
endif ()
|
||||||
|
|
||||||
if (OS_ANDROID)
|
if (OS_ANDROID)
|
||||||
|
@ -26,9 +26,7 @@ if (SANITIZE)
|
|||||||
|
|
||||||
elseif (SANITIZE STREQUAL "thread")
|
elseif (SANITIZE STREQUAL "thread")
|
||||||
set (TSAN_FLAGS "-fsanitize=thread")
|
set (TSAN_FLAGS "-fsanitize=thread")
|
||||||
if (COMPILER_CLANG)
|
set (TSAN_FLAGS "${TSAN_FLAGS} -fsanitize-ignorelist=${PROJECT_SOURCE_DIR}/tests/tsan_ignorelist.txt")
|
||||||
set (TSAN_FLAGS "${TSAN_FLAGS} -fsanitize-ignorelist=${PROJECT_SOURCE_DIR}/tests/tsan_ignorelist.txt")
|
|
||||||
endif()
|
|
||||||
|
|
||||||
set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${SAN_FLAGS} ${TSAN_FLAGS}")
|
set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${SAN_FLAGS} ${TSAN_FLAGS}")
|
||||||
set (CMAKE_C_FLAGS "${CMAKE_C_FLAGS} ${SAN_FLAGS} ${TSAN_FLAGS}")
|
set (CMAKE_C_FLAGS "${CMAKE_C_FLAGS} ${SAN_FLAGS} ${TSAN_FLAGS}")
|
||||||
@ -44,9 +42,7 @@ if (SANITIZE)
|
|||||||
# that's why we often receive reports about UIO. The simplest way to avoid this is just set this flag here.
|
# that's why we often receive reports about UIO. The simplest way to avoid this is just set this flag here.
|
||||||
set(UBSAN_FLAGS "${UBSAN_FLAGS} -fno-sanitize=unsigned-integer-overflow")
|
set(UBSAN_FLAGS "${UBSAN_FLAGS} -fno-sanitize=unsigned-integer-overflow")
|
||||||
endif()
|
endif()
|
||||||
if (COMPILER_CLANG)
|
set (UBSAN_FLAGS "${UBSAN_FLAGS} -fsanitize-ignorelist=${PROJECT_SOURCE_DIR}/tests/ubsan_ignorelist.txt")
|
||||||
set (UBSAN_FLAGS "${UBSAN_FLAGS} -fsanitize-ignorelist=${PROJECT_SOURCE_DIR}/tests/ubsan_ignorelist.txt")
|
|
||||||
endif()
|
|
||||||
|
|
||||||
set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${SAN_FLAGS} ${UBSAN_FLAGS}")
|
set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${SAN_FLAGS} ${UBSAN_FLAGS}")
|
||||||
set (CMAKE_C_FLAGS "${CMAKE_C_FLAGS} ${SAN_FLAGS} ${UBSAN_FLAGS}")
|
set (CMAKE_C_FLAGS "${CMAKE_C_FLAGS} ${SAN_FLAGS} ${UBSAN_FLAGS}")
|
||||||
|
@ -1,10 +1,6 @@
|
|||||||
# Compiler
|
# Compiler
|
||||||
|
|
||||||
if (CMAKE_CXX_COMPILER_ID MATCHES "AppleClang")
|
if (NOT CMAKE_CXX_COMPILER_ID MATCHES "Clang")
|
||||||
set (COMPILER_CLANG 1) # Safe to treat AppleClang as a regular Clang, in general.
|
|
||||||
elseif (CMAKE_CXX_COMPILER_ID MATCHES "Clang")
|
|
||||||
set (COMPILER_CLANG 1)
|
|
||||||
else ()
|
|
||||||
message (FATAL_ERROR "Compiler ${CMAKE_CXX_COMPILER_ID} is not supported")
|
message (FATAL_ERROR "Compiler ${CMAKE_CXX_COMPILER_ID} is not supported")
|
||||||
endif ()
|
endif ()
|
||||||
|
|
||||||
@ -13,34 +9,30 @@ execute_process(COMMAND ${CMAKE_CXX_COMPILER} --version OUTPUT_VARIABLE COMPILER
|
|||||||
message (STATUS "Using compiler:\n${COMPILER_SELF_IDENTIFICATION}")
|
message (STATUS "Using compiler:\n${COMPILER_SELF_IDENTIFICATION}")
|
||||||
|
|
||||||
# Require minimum compiler versions
|
# Require minimum compiler versions
|
||||||
set (CLANG_MINIMUM_VERSION 16)
|
set (CLANG_MINIMUM_VERSION 17)
|
||||||
set (XCODE_MINIMUM_VERSION 12.0)
|
set (XCODE_MINIMUM_VERSION 12.0)
|
||||||
set (APPLE_CLANG_MINIMUM_VERSION 12.0.0)
|
set (APPLE_CLANG_MINIMUM_VERSION 12.0.0)
|
||||||
|
|
||||||
if (COMPILER_CLANG)
|
if (CMAKE_CXX_COMPILER_ID MATCHES "AppleClang")
|
||||||
if (CMAKE_CXX_COMPILER_ID MATCHES "AppleClang")
|
# (Experimental!) Specify "-DALLOW_APPLECLANG=ON" when running CMake configuration step, if you want to experiment with using it.
|
||||||
# (Experimental!) Specify "-DALLOW_APPLECLANG=ON" when running CMake configuration step, if you want to experiment with using it.
|
if (NOT ALLOW_APPLECLANG AND NOT DEFINED ENV{ALLOW_APPLECLANG})
|
||||||
if (NOT ALLOW_APPLECLANG AND NOT DEFINED ENV{ALLOW_APPLECLANG})
|
message (FATAL_ERROR "Compilation with AppleClang is unsupported. Please use vanilla Clang, e.g. from Homebrew.")
|
||||||
message (FATAL_ERROR "Compilation with AppleClang is unsupported. Please use vanilla Clang, e.g. from Homebrew.")
|
endif ()
|
||||||
endif ()
|
|
||||||
|
|
||||||
# For a mapping between XCode / AppleClang / vanilla Clang versions, see https://en.wikipedia.org/wiki/Xcode
|
# For a mapping between XCode / AppleClang / vanilla Clang versions, see https://en.wikipedia.org/wiki/Xcode
|
||||||
if (CMAKE_CXX_COMPILER_VERSION VERSION_LESS ${APPLE_CLANG_MINIMUM_VERSION})
|
if (CMAKE_CXX_COMPILER_VERSION VERSION_LESS ${APPLE_CLANG_MINIMUM_VERSION})
|
||||||
message (FATAL_ERROR "Compilation with AppleClang version ${CMAKE_CXX_COMPILER_VERSION} is unsupported, the minimum required version is ${APPLE_CLANG_MINIMUM_VERSION} (Xcode ${XCODE_MINIMUM_VERSION}).")
|
message (FATAL_ERROR "Compilation with AppleClang version ${CMAKE_CXX_COMPILER_VERSION} is unsupported, the minimum required version is ${APPLE_CLANG_MINIMUM_VERSION} (Xcode ${XCODE_MINIMUM_VERSION}).")
|
||||||
endif ()
|
endif ()
|
||||||
else ()
|
else ()
|
||||||
if (CMAKE_CXX_COMPILER_VERSION VERSION_LESS ${CLANG_MINIMUM_VERSION})
|
if (CMAKE_CXX_COMPILER_VERSION VERSION_LESS ${CLANG_MINIMUM_VERSION})
|
||||||
message (FATAL_ERROR "Compilation with Clang version ${CMAKE_CXX_COMPILER_VERSION} is unsupported, the minimum required version is ${CLANG_MINIMUM_VERSION}.")
|
message (FATAL_ERROR "Compilation with Clang version ${CMAKE_CXX_COMPILER_VERSION} is unsupported, the minimum required version is ${CLANG_MINIMUM_VERSION}.")
|
||||||
endif ()
|
|
||||||
endif ()
|
endif ()
|
||||||
endif ()
|
endif ()
|
||||||
|
|
||||||
# Linker
|
|
||||||
|
|
||||||
string (REGEX MATCHALL "[0-9]+" COMPILER_VERSION_LIST ${CMAKE_CXX_COMPILER_VERSION})
|
string (REGEX MATCHALL "[0-9]+" COMPILER_VERSION_LIST ${CMAKE_CXX_COMPILER_VERSION})
|
||||||
list (GET COMPILER_VERSION_LIST 0 COMPILER_VERSION_MAJOR)
|
list (GET COMPILER_VERSION_LIST 0 COMPILER_VERSION_MAJOR)
|
||||||
|
|
||||||
# Example values: `lld-10`
|
# Linker
|
||||||
option (LINKER_NAME "Linker name or full path")
|
option (LINKER_NAME "Linker name or full path")
|
||||||
|
|
||||||
if (LINKER_NAME MATCHES "gold")
|
if (LINKER_NAME MATCHES "gold")
|
||||||
@ -48,19 +40,15 @@ if (LINKER_NAME MATCHES "gold")
|
|||||||
endif ()
|
endif ()
|
||||||
|
|
||||||
if (NOT LINKER_NAME)
|
if (NOT LINKER_NAME)
|
||||||
if (COMPILER_CLANG)
|
if (OS_LINUX AND NOT ARCH_S390X)
|
||||||
if (OS_LINUX AND NOT ARCH_S390X)
|
find_program (LLD_PATH NAMES "ld.lld-${COMPILER_VERSION_MAJOR}" "ld.lld")
|
||||||
find_program (LLD_PATH NAMES "ld.lld-${COMPILER_VERSION_MAJOR}" "ld.lld")
|
elseif (OS_DARWIN)
|
||||||
elseif (OS_DARWIN)
|
find_program (LLD_PATH NAMES "ld")
|
||||||
find_program (LLD_PATH NAMES "ld")
|
|
||||||
endif ()
|
|
||||||
endif ()
|
endif ()
|
||||||
if (LLD_PATH)
|
if (LLD_PATH)
|
||||||
if (OS_LINUX OR OS_DARWIN)
|
if (OS_LINUX OR OS_DARWIN)
|
||||||
if (COMPILER_CLANG)
|
# Clang driver simply allows full linker path.
|
||||||
# Clang driver simply allows full linker path.
|
set (LINKER_NAME ${LLD_PATH})
|
||||||
set (LINKER_NAME ${LLD_PATH})
|
|
||||||
endif ()
|
|
||||||
endif ()
|
endif ()
|
||||||
endif()
|
endif()
|
||||||
endif()
|
endif()
|
||||||
@ -82,47 +70,28 @@ else ()
|
|||||||
endif ()
|
endif ()
|
||||||
|
|
||||||
# Archiver
|
# Archiver
|
||||||
|
find_program (LLVM_AR_PATH NAMES "llvm-ar-${COMPILER_VERSION_MAJOR}" "llvm-ar")
|
||||||
if (COMPILER_CLANG)
|
|
||||||
find_program (LLVM_AR_PATH NAMES "llvm-ar-${COMPILER_VERSION_MAJOR}" "llvm-ar")
|
|
||||||
endif ()
|
|
||||||
|
|
||||||
if (LLVM_AR_PATH)
|
if (LLVM_AR_PATH)
|
||||||
set (CMAKE_AR "${LLVM_AR_PATH}")
|
set (CMAKE_AR "${LLVM_AR_PATH}")
|
||||||
endif ()
|
endif ()
|
||||||
|
|
||||||
message(STATUS "Using archiver: ${CMAKE_AR}")
|
message(STATUS "Using archiver: ${CMAKE_AR}")
|
||||||
|
|
||||||
# Ranlib
|
# Ranlib
|
||||||
|
find_program (LLVM_RANLIB_PATH NAMES "llvm-ranlib-${COMPILER_VERSION_MAJOR}" "llvm-ranlib")
|
||||||
if (COMPILER_CLANG)
|
|
||||||
find_program (LLVM_RANLIB_PATH NAMES "llvm-ranlib-${COMPILER_VERSION_MAJOR}" "llvm-ranlib")
|
|
||||||
endif ()
|
|
||||||
|
|
||||||
if (LLVM_RANLIB_PATH)
|
if (LLVM_RANLIB_PATH)
|
||||||
set (CMAKE_RANLIB "${LLVM_RANLIB_PATH}")
|
set (CMAKE_RANLIB "${LLVM_RANLIB_PATH}")
|
||||||
endif ()
|
endif ()
|
||||||
|
|
||||||
message(STATUS "Using ranlib: ${CMAKE_RANLIB}")
|
message(STATUS "Using ranlib: ${CMAKE_RANLIB}")
|
||||||
|
|
||||||
# Install Name Tool
|
# Install Name Tool
|
||||||
|
find_program (LLVM_INSTALL_NAME_TOOL_PATH NAMES "llvm-install-name-tool-${COMPILER_VERSION_MAJOR}" "llvm-install-name-tool")
|
||||||
if (COMPILER_CLANG)
|
|
||||||
find_program (LLVM_INSTALL_NAME_TOOL_PATH NAMES "llvm-install-name-tool-${COMPILER_VERSION_MAJOR}" "llvm-install-name-tool")
|
|
||||||
endif ()
|
|
||||||
|
|
||||||
if (LLVM_INSTALL_NAME_TOOL_PATH)
|
if (LLVM_INSTALL_NAME_TOOL_PATH)
|
||||||
set (CMAKE_INSTALL_NAME_TOOL "${LLVM_INSTALL_NAME_TOOL_PATH}")
|
set (CMAKE_INSTALL_NAME_TOOL "${LLVM_INSTALL_NAME_TOOL_PATH}")
|
||||||
endif ()
|
endif ()
|
||||||
|
|
||||||
message(STATUS "Using install-name-tool: ${CMAKE_INSTALL_NAME_TOOL}")
|
message(STATUS "Using install-name-tool: ${CMAKE_INSTALL_NAME_TOOL}")
|
||||||
|
|
||||||
# Objcopy
|
# Objcopy
|
||||||
|
find_program (OBJCOPY_PATH NAMES "llvm-objcopy-${COMPILER_VERSION_MAJOR}" "llvm-objcopy" "objcopy")
|
||||||
if (COMPILER_CLANG)
|
|
||||||
find_program (OBJCOPY_PATH NAMES "llvm-objcopy-${COMPILER_VERSION_MAJOR}" "llvm-objcopy" "objcopy")
|
|
||||||
endif ()
|
|
||||||
|
|
||||||
if (OBJCOPY_PATH)
|
if (OBJCOPY_PATH)
|
||||||
message (STATUS "Using objcopy: ${OBJCOPY_PATH}")
|
message (STATUS "Using objcopy: ${OBJCOPY_PATH}")
|
||||||
else ()
|
else ()
|
||||||
@ -130,11 +99,7 @@ else ()
|
|||||||
endif ()
|
endif ()
|
||||||
|
|
||||||
# Strip
|
# Strip
|
||||||
|
find_program (STRIP_PATH NAMES "llvm-strip-${COMPILER_VERSION_MAJOR}" "llvm-strip" "strip")
|
||||||
if (COMPILER_CLANG)
|
|
||||||
find_program (STRIP_PATH NAMES "llvm-strip-${COMPILER_VERSION_MAJOR}" "llvm-strip" "strip")
|
|
||||||
endif ()
|
|
||||||
|
|
||||||
if (STRIP_PATH)
|
if (STRIP_PATH)
|
||||||
message (STATUS "Using strip: ${STRIP_PATH}")
|
message (STATUS "Using strip: ${STRIP_PATH}")
|
||||||
else ()
|
else ()
|
||||||
|
@ -15,37 +15,35 @@ if ((NOT CMAKE_BUILD_TYPE_UC STREQUAL "DEBUG") AND (NOT SANITIZE) AND (NOT CMAKE
|
|||||||
add_warning(frame-larger-than=65536)
|
add_warning(frame-larger-than=65536)
|
||||||
endif ()
|
endif ()
|
||||||
|
|
||||||
if (COMPILER_CLANG)
|
# Add some warnings that are not available even with -Wall -Wextra -Wpedantic.
|
||||||
# Add some warnings that are not available even with -Wall -Wextra -Wpedantic.
|
# We want to get everything out of the compiler for code quality.
|
||||||
# We want to get everything out of the compiler for code quality.
|
add_warning(everything)
|
||||||
add_warning(everything)
|
add_warning(pedantic)
|
||||||
add_warning(pedantic)
|
no_warning(zero-length-array)
|
||||||
no_warning(zero-length-array)
|
no_warning(c++98-compat-pedantic)
|
||||||
no_warning(c++98-compat-pedantic)
|
no_warning(c++98-compat)
|
||||||
no_warning(c++98-compat)
|
no_warning(c++20-compat) # Use constinit in C++20 without warnings
|
||||||
no_warning(c++20-compat) # Use constinit in C++20 without warnings
|
no_warning(sign-conversion)
|
||||||
no_warning(sign-conversion)
|
no_warning(implicit-int-conversion)
|
||||||
no_warning(implicit-int-conversion)
|
no_warning(implicit-int-float-conversion)
|
||||||
no_warning(implicit-int-float-conversion)
|
no_warning(ctad-maybe-unsupported) # clang 9+, linux-only
|
||||||
no_warning(ctad-maybe-unsupported) # clang 9+, linux-only
|
no_warning(disabled-macro-expansion)
|
||||||
no_warning(disabled-macro-expansion)
|
no_warning(documentation-unknown-command)
|
||||||
no_warning(documentation-unknown-command)
|
no_warning(double-promotion)
|
||||||
no_warning(double-promotion)
|
no_warning(exit-time-destructors)
|
||||||
no_warning(exit-time-destructors)
|
no_warning(float-equal)
|
||||||
no_warning(float-equal)
|
no_warning(global-constructors)
|
||||||
no_warning(global-constructors)
|
no_warning(missing-prototypes)
|
||||||
no_warning(missing-prototypes)
|
no_warning(missing-variable-declarations)
|
||||||
no_warning(missing-variable-declarations)
|
no_warning(padded)
|
||||||
no_warning(padded)
|
no_warning(switch-enum)
|
||||||
no_warning(switch-enum)
|
no_warning(undefined-func-template)
|
||||||
no_warning(undefined-func-template)
|
no_warning(unused-template)
|
||||||
no_warning(unused-template)
|
no_warning(vla)
|
||||||
no_warning(vla)
|
no_warning(weak-template-vtables)
|
||||||
no_warning(weak-template-vtables)
|
no_warning(weak-vtables)
|
||||||
no_warning(weak-vtables)
|
no_warning(thread-safety-negative) # experimental flag, too many false positives
|
||||||
no_warning(thread-safety-negative) # experimental flag, too many false positives
|
no_warning(enum-constexpr-conversion) # breaks magic-enum library in clang-16
|
||||||
no_warning(enum-constexpr-conversion) # breaks magic-enum library in clang-16
|
no_warning(unsafe-buffer-usage) # too aggressive
|
||||||
no_warning(unsafe-buffer-usage) # too aggressive
|
no_warning(switch-default) # conflicts with "defaults in a switch covering all enum values"
|
||||||
no_warning(switch-default) # conflicts with "defaults in a switch covering all enum values"
|
# TODO Enable conversion, sign-conversion, double-promotion warnings.
|
||||||
# TODO Enable conversion, sign-conversion, double-promotion warnings.
|
|
||||||
endif ()
|
|
||||||
|
@ -52,7 +52,7 @@ function(absl_cc_library)
|
|||||||
)
|
)
|
||||||
|
|
||||||
target_include_directories(${_NAME}
|
target_include_directories(${_NAME}
|
||||||
PUBLIC "${ABSL_COMMON_INCLUDE_DIRS}")
|
SYSTEM PUBLIC "${ABSL_COMMON_INCLUDE_DIRS}")
|
||||||
target_compile_options(${_NAME}
|
target_compile_options(${_NAME}
|
||||||
PRIVATE ${ABSL_CC_LIB_COPTS})
|
PRIVATE ${ABSL_CC_LIB_COPTS})
|
||||||
target_compile_definitions(${_NAME} PUBLIC ${ABSL_CC_LIB_DEFINES})
|
target_compile_definitions(${_NAME} PUBLIC ${ABSL_CC_LIB_DEFINES})
|
||||||
@ -61,7 +61,7 @@ function(absl_cc_library)
|
|||||||
# Generating header-only library
|
# Generating header-only library
|
||||||
add_library(${_NAME} INTERFACE)
|
add_library(${_NAME} INTERFACE)
|
||||||
target_include_directories(${_NAME}
|
target_include_directories(${_NAME}
|
||||||
INTERFACE "${ABSL_COMMON_INCLUDE_DIRS}")
|
SYSTEM INTERFACE "${ABSL_COMMON_INCLUDE_DIRS}")
|
||||||
|
|
||||||
target_link_libraries(${_NAME}
|
target_link_libraries(${_NAME}
|
||||||
INTERFACE
|
INTERFACE
|
||||||
|
@ -81,9 +81,7 @@ set (CAPNPC_SRCS
|
|||||||
add_library(_capnpc ${CAPNPC_SRCS})
|
add_library(_capnpc ${CAPNPC_SRCS})
|
||||||
target_link_libraries(_capnpc PUBLIC _capnp)
|
target_link_libraries(_capnpc PUBLIC _capnp)
|
||||||
|
|
||||||
if (COMPILER_CLANG)
|
set (CAPNP_PRIVATE_CXX_FLAGS -fno-char8_t)
|
||||||
set (CAPNP_PRIVATE_CXX_FLAGS -fno-char8_t)
|
|
||||||
endif ()
|
|
||||||
|
|
||||||
target_compile_options(_kj PRIVATE ${CAPNP_PRIVATE_CXX_FLAGS})
|
target_compile_options(_kj PRIVATE ${CAPNP_PRIVATE_CXX_FLAGS})
|
||||||
target_compile_options(_capnp PRIVATE ${CAPNP_PRIVATE_CXX_FLAGS})
|
target_compile_options(_capnp PRIVATE ${CAPNP_PRIVATE_CXX_FLAGS})
|
||||||
|
2
contrib/lz4
vendored
2
contrib/lz4
vendored
@ -1 +1 @@
|
|||||||
Subproject commit ce45a9dbdb059511a3e9576b19db3e7f1a4f172e
|
Subproject commit 145f3804ca5ef5482cda0f2a4f6a2d04ba57f965
|
@ -91,12 +91,10 @@ set(LIB_SOVERSION ${VERSION_MAJOR})
|
|||||||
|
|
||||||
enable_language(ASM)
|
enable_language(ASM)
|
||||||
|
|
||||||
if(COMPILER_CLANG)
|
add_definitions(-Wno-unused-command-line-argument)
|
||||||
add_definitions(-Wno-unused-command-line-argument)
|
# Note that s390x build uses mold linker
|
||||||
# Note that s390x build uses mold linker
|
if(NOT ARCH_S390X)
|
||||||
if(NOT ARCH_S390X)
|
set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} -fuse-ld=lld") # only relevant for -DENABLE_OPENSSL_DYNAMIC=1
|
||||||
set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} -fuse-ld=lld") # only relevant for -DENABLE_OPENSSL_DYNAMIC=1
|
|
||||||
endif()
|
|
||||||
endif()
|
endif()
|
||||||
|
|
||||||
if(ARCH_AMD64)
|
if(ARCH_AMD64)
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
if (NOT OS_FREEBSD AND NOT (OS_DARWIN AND COMPILER_CLANG))
|
if (NOT OS_FREEBSD AND NOT OS_DARWIN)
|
||||||
option (ENABLE_SENTRY "Enable Sentry" ${ENABLE_LIBRARIES})
|
option (ENABLE_SENTRY "Enable Sentry" ${ENABLE_LIBRARIES})
|
||||||
else()
|
else()
|
||||||
option (ENABLE_SENTRY "Enable Sentry" OFF)
|
option (ENABLE_SENTRY "Enable Sentry" OFF)
|
||||||
|
2
contrib/yaml-cpp
vendored
2
contrib/yaml-cpp
vendored
@ -1 +1 @@
|
|||||||
Subproject commit 0c86adac6d117ee2b4afcedb8ade19036ca0327d
|
Subproject commit f91e938341273b5f9d341380ab17bcc3de5daa06
|
@ -3,10 +3,10 @@ compilers and build settings. Correctly configured Docker daemon is single depen
|
|||||||
|
|
||||||
Usage:
|
Usage:
|
||||||
|
|
||||||
Build deb package with `clang-17` in `debug` mode:
|
Build deb package with `clang-18` in `debug` mode:
|
||||||
```
|
```
|
||||||
$ mkdir deb/test_output
|
$ mkdir deb/test_output
|
||||||
$ ./packager --output-dir deb/test_output/ --package-type deb --compiler=clang-17 --debug-build
|
$ ./packager --output-dir deb/test_output/ --package-type deb --compiler=clang-18 --debug-build
|
||||||
$ ls -l deb/test_output
|
$ ls -l deb/test_output
|
||||||
-rw-r--r-- 1 root root 3730 clickhouse-client_22.2.2+debug_all.deb
|
-rw-r--r-- 1 root root 3730 clickhouse-client_22.2.2+debug_all.deb
|
||||||
-rw-r--r-- 1 root root 84221888 clickhouse-common-static_22.2.2+debug_amd64.deb
|
-rw-r--r-- 1 root root 84221888 clickhouse-common-static_22.2.2+debug_amd64.deb
|
||||||
@ -17,11 +17,11 @@ $ ls -l deb/test_output
|
|||||||
|
|
||||||
```
|
```
|
||||||
|
|
||||||
Build ClickHouse binary with `clang-17` and `address` sanitizer in `relwithdebuginfo`
|
Build ClickHouse binary with `clang-18` and `address` sanitizer in `relwithdebuginfo`
|
||||||
mode:
|
mode:
|
||||||
```
|
```
|
||||||
$ mkdir $HOME/some_clickhouse
|
$ mkdir $HOME/some_clickhouse
|
||||||
$ ./packager --output-dir=$HOME/some_clickhouse --package-type binary --compiler=clang-17 --sanitizer=address
|
$ ./packager --output-dir=$HOME/some_clickhouse --package-type binary --compiler=clang-18 --sanitizer=address
|
||||||
$ ls -l $HOME/some_clickhouse
|
$ ls -l $HOME/some_clickhouse
|
||||||
-rwxr-xr-x 1 root root 787061952 clickhouse
|
-rwxr-xr-x 1 root root 787061952 clickhouse
|
||||||
lrwxrwxrwx 1 root root 10 clickhouse-benchmark -> clickhouse
|
lrwxrwxrwx 1 root root 10 clickhouse-benchmark -> clickhouse
|
||||||
|
@ -403,19 +403,19 @@ def parse_args() -> argparse.Namespace:
|
|||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--compiler",
|
"--compiler",
|
||||||
choices=(
|
choices=(
|
||||||
"clang-17",
|
"clang-18",
|
||||||
"clang-17-darwin",
|
"clang-18-darwin",
|
||||||
"clang-17-darwin-aarch64",
|
"clang-18-darwin-aarch64",
|
||||||
"clang-17-aarch64",
|
"clang-18-aarch64",
|
||||||
"clang-17-aarch64-v80compat",
|
"clang-18-aarch64-v80compat",
|
||||||
"clang-17-ppc64le",
|
"clang-18-ppc64le",
|
||||||
"clang-17-riscv64",
|
"clang-18-riscv64",
|
||||||
"clang-17-s390x",
|
"clang-18-s390x",
|
||||||
"clang-17-amd64-compat",
|
"clang-18-amd64-compat",
|
||||||
"clang-17-amd64-musl",
|
"clang-18-amd64-musl",
|
||||||
"clang-17-freebsd",
|
"clang-18-freebsd",
|
||||||
),
|
),
|
||||||
default="clang-17",
|
default="clang-18",
|
||||||
help="a compiler to use",
|
help="a compiler to use",
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
|
@ -1,11 +1,14 @@
|
|||||||
FROM ubuntu:20.04
|
FROM ubuntu:20.04
|
||||||
|
|
||||||
# see https://github.com/moby/moby/issues/4032#issuecomment-192327844
|
# see https://github.com/moby/moby/issues/4032#issuecomment-192327844
|
||||||
|
# It could be removed after we move on a version 23:04+
|
||||||
ARG DEBIAN_FRONTEND=noninteractive
|
ARG DEBIAN_FRONTEND=noninteractive
|
||||||
|
|
||||||
# ARG for quick switch to a given ubuntu mirror
|
# ARG for quick switch to a given ubuntu mirror
|
||||||
ARG apt_archive="http://archive.ubuntu.com"
|
ARG apt_archive="http://archive.ubuntu.com"
|
||||||
|
|
||||||
|
# We shouldn't use `apt upgrade` to not change the upstream image. It's updated biweekly
|
||||||
|
|
||||||
# user/group precreated explicitly with fixed uid/gid on purpose.
|
# user/group precreated explicitly with fixed uid/gid on purpose.
|
||||||
# It is especially important for rootless containers: in that case entrypoint
|
# It is especially important for rootless containers: in that case entrypoint
|
||||||
# can't do chown and owners of mounted volumes should be configured externally.
|
# can't do chown and owners of mounted volumes should be configured externally.
|
||||||
@ -16,13 +19,11 @@ RUN sed -i "s|http://archive.ubuntu.com|${apt_archive}|g" /etc/apt/sources.list
|
|||||||
&& groupadd -r clickhouse --gid=101 \
|
&& groupadd -r clickhouse --gid=101 \
|
||||||
&& useradd -r -g clickhouse --uid=101 --home-dir=/var/lib/clickhouse --shell=/bin/bash clickhouse \
|
&& useradd -r -g clickhouse --uid=101 --home-dir=/var/lib/clickhouse --shell=/bin/bash clickhouse \
|
||||||
&& apt-get update \
|
&& apt-get update \
|
||||||
&& apt-get upgrade -yq \
|
|
||||||
&& apt-get install --yes --no-install-recommends \
|
&& apt-get install --yes --no-install-recommends \
|
||||||
ca-certificates \
|
ca-certificates \
|
||||||
locales \
|
locales \
|
||||||
tzdata \
|
tzdata \
|
||||||
wget \
|
wget \
|
||||||
&& apt-get clean \
|
|
||||||
&& rm -rf /var/lib/apt/lists/* /var/cache/debconf /tmp/*
|
&& rm -rf /var/lib/apt/lists/* /var/cache/debconf /tmp/*
|
||||||
|
|
||||||
ARG REPO_CHANNEL="stable"
|
ARG REPO_CHANNEL="stable"
|
||||||
@ -30,6 +31,9 @@ ARG REPOSITORY="deb [signed-by=/usr/share/keyrings/clickhouse-keyring.gpg] https
|
|||||||
ARG VERSION="24.4.1.2088"
|
ARG VERSION="24.4.1.2088"
|
||||||
ARG PACKAGES="clickhouse-client clickhouse-server clickhouse-common-static"
|
ARG PACKAGES="clickhouse-client clickhouse-server clickhouse-common-static"
|
||||||
|
|
||||||
|
#docker-official-library:off
|
||||||
|
# The part between `docker-official-library` tags is related to our builds
|
||||||
|
|
||||||
# set non-empty deb_location_url url to create a docker image
|
# set non-empty deb_location_url url to create a docker image
|
||||||
# from debs created by CI build, for example:
|
# from debs created by CI build, for example:
|
||||||
# docker build . --network host --build-arg version="21.4.1.6282" --build-arg deb_location_url="https://..." -t ...
|
# docker build . --network host --build-arg version="21.4.1.6282" --build-arg deb_location_url="https://..." -t ...
|
||||||
@ -80,19 +84,22 @@ RUN if [ -n "${single_binary_location_url}" ]; then \
|
|||||||
&& rm -rf /tmp/* ; \
|
&& rm -rf /tmp/* ; \
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
# The rest is the same in the official docker and in our build system
|
||||||
|
#docker-official-library:on
|
||||||
|
|
||||||
# A fallback to installation from ClickHouse repository
|
# A fallback to installation from ClickHouse repository
|
||||||
RUN if ! clickhouse local -q "SELECT ''" > /dev/null 2>&1; then \
|
RUN if ! clickhouse local -q "SELECT ''" > /dev/null 2>&1; then \
|
||||||
apt-get update \
|
apt-get update \
|
||||||
&& apt-get install --yes --no-install-recommends \
|
&& apt-get install --yes --no-install-recommends \
|
||||||
apt-transport-https \
|
apt-transport-https \
|
||||||
ca-certificates \
|
|
||||||
dirmngr \
|
dirmngr \
|
||||||
gnupg2 \
|
gnupg2 \
|
||||||
&& mkdir -p /etc/apt/sources.list.d \
|
&& mkdir -p /etc/apt/sources.list.d \
|
||||||
&& GNUPGHOME=$(mktemp -d) \
|
&& GNUPGHOME=$(mktemp -d) \
|
||||||
&& GNUPGHOME="$GNUPGHOME" gpg --no-default-keyring \
|
&& GNUPGHOME="$GNUPGHOME" gpg --batch --no-default-keyring \
|
||||||
--keyring /usr/share/keyrings/clickhouse-keyring.gpg \
|
--keyring /usr/share/keyrings/clickhouse-keyring.gpg \
|
||||||
--keyserver hkp://keyserver.ubuntu.com:80 --recv-keys 8919F6BD2B48D754 \
|
--keyserver hkp://keyserver.ubuntu.com:80 \
|
||||||
|
--recv-keys 3a9ea1193a97b548be1457d48919f6bd2b48d754 \
|
||||||
&& rm -rf "$GNUPGHOME" \
|
&& rm -rf "$GNUPGHOME" \
|
||||||
&& chmod +r /usr/share/keyrings/clickhouse-keyring.gpg \
|
&& chmod +r /usr/share/keyrings/clickhouse-keyring.gpg \
|
||||||
&& echo "${REPOSITORY}" > /etc/apt/sources.list.d/clickhouse.list \
|
&& echo "${REPOSITORY}" > /etc/apt/sources.list.d/clickhouse.list \
|
||||||
@ -127,7 +134,6 @@ RUN mkdir /docker-entrypoint-initdb.d
|
|||||||
|
|
||||||
COPY docker_related_config.xml /etc/clickhouse-server/config.d/
|
COPY docker_related_config.xml /etc/clickhouse-server/config.d/
|
||||||
COPY entrypoint.sh /entrypoint.sh
|
COPY entrypoint.sh /entrypoint.sh
|
||||||
RUN chmod +x /entrypoint.sh
|
|
||||||
|
|
||||||
EXPOSE 9000 8123 9009
|
EXPOSE 9000 8123 9009
|
||||||
VOLUME /var/lib/clickhouse
|
VOLUME /var/lib/clickhouse
|
||||||
|
@ -4,33 +4,34 @@
|
|||||||
|
|
||||||
ClickHouse is an open-source column-oriented DBMS (columnar database management system) for online analytical processing (OLAP) that allows users to generate analytical reports using SQL queries in real-time.
|
ClickHouse is an open-source column-oriented DBMS (columnar database management system) for online analytical processing (OLAP) that allows users to generate analytical reports using SQL queries in real-time.
|
||||||
|
|
||||||
ClickHouse works 100-1000x faster than traditional database management systems, and processes hundreds of millions to over a billion rows and tens of gigabytes of data per server per second. With a widespread user base around the globe, the technology has received praise for its reliability, ease of use, and fault tolerance.
|
ClickHouse works 100-1000x faster than traditional database management systems, and processes hundreds of millions to over a billion rows and tens of gigabytes of data per server per second. With a widespread user base around the globe, the technology has received praise for its reliability, ease of use, and fault tolerance.
|
||||||
|
|
||||||
For more information and documentation see https://clickhouse.com/.
|
For more information and documentation see https://clickhouse.com/.
|
||||||
|
|
||||||
## Versions
|
## Versions
|
||||||
|
|
||||||
- The `latest` tag points to the latest release of the latest stable branch.
|
- The `latest` tag points to the latest release of the latest stable branch.
|
||||||
- Branch tags like `22.2` point to the latest release of the corresponding branch.
|
- Branch tags like `22.2` point to the latest release of the corresponding branch.
|
||||||
- Full version tags like `22.2.3.5` point to the corresponding release.
|
- Full version tags like `22.2.3.5` point to the corresponding release.
|
||||||
- The tag `head` is built from the latest commit to the default branch.
|
- The tag `head` is built from the latest commit to the default branch.
|
||||||
- Each tag has optional `-alpine` suffix to reflect that it's built on top of `alpine`.
|
- Each tag has optional `-alpine` suffix to reflect that it's built on top of `alpine`.
|
||||||
|
|
||||||
### Compatibility
|
### Compatibility
|
||||||
|
|
||||||
- The amd64 image requires support for [SSE3 instructions](https://en.wikipedia.org/wiki/SSE3). Virtually all x86 CPUs after 2005 support SSE3.
|
- The amd64 image requires support for [SSE3 instructions](https://en.wikipedia.org/wiki/SSE3). Virtually all x86 CPUs after 2005 support SSE3.
|
||||||
- The arm64 image requires support for the [ARMv8.2-A architecture](https://en.wikipedia.org/wiki/AArch64#ARMv8.2-A) and additionally the Load-Acquire RCpc register. The register is optional in version ARMv8.2-A and mandatory in [ARMv8.3-A](https://en.wikipedia.org/wiki/AArch64#ARMv8.3-A). Supported in Graviton >=2, Azure and GCP instances. Examples for unsupported devices are Raspberry Pi 4 (ARMv8.0-A) and Jetson AGX Xavier/Orin (ARMv8.2-A).
|
- The arm64 image requires support for the [ARMv8.2-A architecture](https://en.wikipedia.org/wiki/AArch64#ARMv8.2-A) and additionally the Load-Acquire RCpc register. The register is optional in version ARMv8.2-A and mandatory in [ARMv8.3-A](https://en.wikipedia.org/wiki/AArch64#ARMv8.3-A). Supported in Graviton >=2, Azure and GCP instances. Examples for unsupported devices are Raspberry Pi 4 (ARMv8.0-A) and Jetson AGX Xavier/Orin (ARMv8.2-A).
|
||||||
|
|
||||||
## How to use this image
|
## How to use this image
|
||||||
|
|
||||||
### start server instance
|
### start server instance
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
docker run -d --name some-clickhouse-server --ulimit nofile=262144:262144 clickhouse/clickhouse-server
|
docker run -d --name some-clickhouse-server --ulimit nofile=262144:262144 clickhouse/clickhouse-server
|
||||||
```
|
```
|
||||||
|
|
||||||
By default, ClickHouse will be accessible only via the Docker network. See the [networking section below](#networking).
|
By default, ClickHouse will be accessible only via the Docker network. See the [networking section below](#networking).
|
||||||
|
|
||||||
By default, starting above server instance will be run as the `default` user without password.
|
By default, starting above server instance will be run as the `default` user without password.
|
||||||
|
|
||||||
### connect to it from a native client
|
### connect to it from a native client
|
||||||
|
|
||||||
@ -66,9 +67,7 @@ docker run -d -p 18123:8123 -p19000:9000 --name some-clickhouse-server --ulimit
|
|||||||
echo 'SELECT version()' | curl 'http://localhost:18123/' --data-binary @-
|
echo 'SELECT version()' | curl 'http://localhost:18123/' --data-binary @-
|
||||||
```
|
```
|
||||||
|
|
||||||
```
|
`22.6.3.35`
|
||||||
22.6.3.35
|
|
||||||
```
|
|
||||||
|
|
||||||
or by allowing the container to use [host ports directly](https://docs.docker.com/network/host/) using `--network=host` (also allows achieving better network performance):
|
or by allowing the container to use [host ports directly](https://docs.docker.com/network/host/) using `--network=host` (also allows achieving better network performance):
|
||||||
|
|
||||||
@ -77,16 +76,14 @@ docker run -d --network=host --name some-clickhouse-server --ulimit nofile=26214
|
|||||||
echo 'SELECT version()' | curl 'http://localhost:8123/' --data-binary @-
|
echo 'SELECT version()' | curl 'http://localhost:8123/' --data-binary @-
|
||||||
```
|
```
|
||||||
|
|
||||||
```
|
`22.6.3.35`
|
||||||
22.6.3.35
|
|
||||||
```
|
|
||||||
|
|
||||||
### Volumes
|
### Volumes
|
||||||
|
|
||||||
Typically you may want to mount the following folders inside your container to achieve persistency:
|
Typically you may want to mount the following folders inside your container to achieve persistency:
|
||||||
|
|
||||||
* `/var/lib/clickhouse/` - main folder where ClickHouse stores the data
|
- `/var/lib/clickhouse/` - main folder where ClickHouse stores the data
|
||||||
* `/var/log/clickhouse-server/` - logs
|
- `/var/log/clickhouse-server/` - logs
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
docker run -d \
|
docker run -d \
|
||||||
@ -97,9 +94,9 @@ docker run -d \
|
|||||||
|
|
||||||
You may also want to mount:
|
You may also want to mount:
|
||||||
|
|
||||||
* `/etc/clickhouse-server/config.d/*.xml` - files with server configuration adjustments
|
- `/etc/clickhouse-server/config.d/*.xml` - files with server configuration adjustments
|
||||||
* `/etc/clickhouse-server/users.d/*.xml` - files with user settings adjustments
|
- `/etc/clickhouse-server/users.d/*.xml` - files with user settings adjustments
|
||||||
* `/docker-entrypoint-initdb.d/` - folder with database initialization scripts (see below).
|
- `/docker-entrypoint-initdb.d/` - folder with database initialization scripts (see below).
|
||||||
|
|
||||||
### Linux capabilities
|
### Linux capabilities
|
||||||
|
|
||||||
|
@ -17,7 +17,7 @@ stage=${stage:-}
|
|||||||
script_dir="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
|
script_dir="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
|
||||||
echo "$script_dir"
|
echo "$script_dir"
|
||||||
repo_dir=ch
|
repo_dir=ch
|
||||||
BINARY_TO_DOWNLOAD=${BINARY_TO_DOWNLOAD:="clang-17_debug_none_unsplitted_disable_False_binary"}
|
BINARY_TO_DOWNLOAD=${BINARY_TO_DOWNLOAD:="clang-18_debug_none_unsplitted_disable_False_binary"}
|
||||||
BINARY_URL_TO_DOWNLOAD=${BINARY_URL_TO_DOWNLOAD:="https://clickhouse-builds.s3.amazonaws.com/$PR_TO_TEST/$SHA_TO_TEST/clickhouse_build_check/$BINARY_TO_DOWNLOAD/clickhouse"}
|
BINARY_URL_TO_DOWNLOAD=${BINARY_URL_TO_DOWNLOAD:="https://clickhouse-builds.s3.amazonaws.com/$PR_TO_TEST/$SHA_TO_TEST/clickhouse_build_check/$BINARY_TO_DOWNLOAD/clickhouse"}
|
||||||
|
|
||||||
function git_clone_with_retry
|
function git_clone_with_retry
|
||||||
|
@ -101,7 +101,8 @@ RUN python3 -m pip install --no-cache-dir \
|
|||||||
retry==0.9.2 \
|
retry==0.9.2 \
|
||||||
bs4==0.0.2 \
|
bs4==0.0.2 \
|
||||||
lxml==5.1.0 \
|
lxml==5.1.0 \
|
||||||
urllib3==2.0.7
|
urllib3==2.0.7 \
|
||||||
|
jwcrypto==1.5.6
|
||||||
# bs4, lxml are for cloud tests, do not delete
|
# bs4, lxml are for cloud tests, do not delete
|
||||||
|
|
||||||
# Hudi supports only spark 3.3.*, not 3.4
|
# Hudi supports only spark 3.3.*, not 3.4
|
||||||
|
@ -2,7 +2,7 @@
|
|||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
|
|
||||||
|
|
||||||
CLICKHOUSE_PACKAGE=${CLICKHOUSE_PACKAGE:="https://clickhouse-builds.s3.amazonaws.com/$PR_TO_TEST/$SHA_TO_TEST/clickhouse_build_check/clang-17_relwithdebuginfo_none_unsplitted_disable_False_binary/clickhouse"}
|
CLICKHOUSE_PACKAGE=${CLICKHOUSE_PACKAGE:="https://clickhouse-builds.s3.amazonaws.com/$PR_TO_TEST/$SHA_TO_TEST/clickhouse_build_check/clang-18_relwithdebuginfo_none_unsplitted_disable_False_binary/clickhouse"}
|
||||||
CLICKHOUSE_REPO_PATH=${CLICKHOUSE_REPO_PATH:=""}
|
CLICKHOUSE_REPO_PATH=${CLICKHOUSE_REPO_PATH:=""}
|
||||||
|
|
||||||
|
|
||||||
|
@ -2,7 +2,7 @@
|
|||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
|
|
||||||
|
|
||||||
CLICKHOUSE_PACKAGE=${CLICKHOUSE_PACKAGE:="https://clickhouse-builds.s3.amazonaws.com/$PR_TO_TEST/$SHA_TO_TEST/clickhouse_build_check/clang-17_relwithdebuginfo_none_unsplitted_disable_False_binary/clickhouse"}
|
CLICKHOUSE_PACKAGE=${CLICKHOUSE_PACKAGE:="https://clickhouse-builds.s3.amazonaws.com/$PR_TO_TEST/$SHA_TO_TEST/clickhouse_build_check/clang-18_relwithdebuginfo_none_unsplitted_disable_False_binary/clickhouse"}
|
||||||
CLICKHOUSE_REPO_PATH=${CLICKHOUSE_REPO_PATH:=""}
|
CLICKHOUSE_REPO_PATH=${CLICKHOUSE_REPO_PATH:=""}
|
||||||
|
|
||||||
|
|
||||||
|
@ -6,7 +6,7 @@ set -e
|
|||||||
set -u
|
set -u
|
||||||
set -o pipefail
|
set -o pipefail
|
||||||
|
|
||||||
BINARY_TO_DOWNLOAD=${BINARY_TO_DOWNLOAD:="clang-17_debug_none_unsplitted_disable_False_binary"}
|
BINARY_TO_DOWNLOAD=${BINARY_TO_DOWNLOAD:="clang-18_debug_none_unsplitted_disable_False_binary"}
|
||||||
BINARY_URL_TO_DOWNLOAD=${BINARY_URL_TO_DOWNLOAD:="https://clickhouse-builds.s3.amazonaws.com/$PR_TO_TEST/$SHA_TO_TEST/clickhouse_build_check/$BINARY_TO_DOWNLOAD/clickhouse"}
|
BINARY_URL_TO_DOWNLOAD=${BINARY_URL_TO_DOWNLOAD:="https://clickhouse-builds.s3.amazonaws.com/$PR_TO_TEST/$SHA_TO_TEST/clickhouse_build_check/$BINARY_TO_DOWNLOAD/clickhouse"}
|
||||||
|
|
||||||
function wget_with_retry
|
function wget_with_retry
|
||||||
|
@ -5,7 +5,7 @@ FROM ubuntu:22.04
|
|||||||
ARG apt_archive="http://archive.ubuntu.com"
|
ARG apt_archive="http://archive.ubuntu.com"
|
||||||
RUN sed -i "s|http://archive.ubuntu.com|$apt_archive|g" /etc/apt/sources.list
|
RUN sed -i "s|http://archive.ubuntu.com|$apt_archive|g" /etc/apt/sources.list
|
||||||
|
|
||||||
ENV DEBIAN_FRONTEND=noninteractive LLVM_VERSION=17
|
ENV DEBIAN_FRONTEND=noninteractive LLVM_VERSION=18
|
||||||
|
|
||||||
RUN apt-get update \
|
RUN apt-get update \
|
||||||
&& apt-get install \
|
&& apt-get install \
|
||||||
|
@ -13,14 +13,14 @@ The cross-build for macOS is based on the [Build instructions](../development/bu
|
|||||||
|
|
||||||
The following sections provide a walk-through for building ClickHouse for `x86_64` macOS. If you’re targeting ARM architecture, simply substitute all occurrences of `x86_64` with `aarch64`. For example, replace `x86_64-apple-darwin` with `aarch64-apple-darwin` throughout the steps.
|
The following sections provide a walk-through for building ClickHouse for `x86_64` macOS. If you’re targeting ARM architecture, simply substitute all occurrences of `x86_64` with `aarch64`. For example, replace `x86_64-apple-darwin` with `aarch64-apple-darwin` throughout the steps.
|
||||||
|
|
||||||
## Install Clang-17
|
## Install clang-18
|
||||||
|
|
||||||
Follow the instructions from https://apt.llvm.org/ for your Ubuntu or Debian setup.
|
Follow the instructions from https://apt.llvm.org/ for your Ubuntu or Debian setup.
|
||||||
For example the commands for Bionic are like:
|
For example the commands for Bionic are like:
|
||||||
|
|
||||||
``` bash
|
``` bash
|
||||||
sudo echo "deb [trusted=yes] http://apt.llvm.org/bionic/ llvm-toolchain-bionic-17 main" >> /etc/apt/sources.list
|
sudo echo "deb [trusted=yes] http://apt.llvm.org/bionic/ llvm-toolchain-bionic-17 main" >> /etc/apt/sources.list
|
||||||
sudo apt-get install clang-17
|
sudo apt-get install clang-18
|
||||||
```
|
```
|
||||||
|
|
||||||
## Install Cross-Compilation Toolset {#install-cross-compilation-toolset}
|
## Install Cross-Compilation Toolset {#install-cross-compilation-toolset}
|
||||||
@ -59,7 +59,7 @@ curl -L 'https://github.com/phracker/MacOSX-SDKs/releases/download/11.3/MacOSX11
|
|||||||
cd ClickHouse
|
cd ClickHouse
|
||||||
mkdir build-darwin
|
mkdir build-darwin
|
||||||
cd build-darwin
|
cd build-darwin
|
||||||
CC=clang-17 CXX=clang++-17 cmake -DCMAKE_AR:FILEPATH=${CCTOOLS}/bin/x86_64-apple-darwin-ar -DCMAKE_INSTALL_NAME_TOOL=${CCTOOLS}/bin/x86_64-apple-darwin-install_name_tool -DCMAKE_RANLIB:FILEPATH=${CCTOOLS}/bin/x86_64-apple-darwin-ranlib -DLINKER_NAME=${CCTOOLS}/bin/x86_64-apple-darwin-ld -DCMAKE_TOOLCHAIN_FILE=cmake/darwin/toolchain-x86_64.cmake ..
|
CC=clang-18 CXX=clang++-18 cmake -DCMAKE_AR:FILEPATH=${CCTOOLS}/bin/x86_64-apple-darwin-ar -DCMAKE_INSTALL_NAME_TOOL=${CCTOOLS}/bin/x86_64-apple-darwin-install_name_tool -DCMAKE_RANLIB:FILEPATH=${CCTOOLS}/bin/x86_64-apple-darwin-ranlib -DLINKER_NAME=${CCTOOLS}/bin/x86_64-apple-darwin-ld -DCMAKE_TOOLCHAIN_FILE=cmake/darwin/toolchain-x86_64.cmake ..
|
||||||
ninja
|
ninja
|
||||||
```
|
```
|
||||||
|
|
||||||
|
@ -23,7 +23,7 @@ sudo bash -c "$(wget -O - https://apt.llvm.org/llvm.sh)"
|
|||||||
``` bash
|
``` bash
|
||||||
cd ClickHouse
|
cd ClickHouse
|
||||||
mkdir build-riscv64
|
mkdir build-riscv64
|
||||||
CC=clang-17 CXX=clang++-17 cmake . -Bbuild-riscv64 -G Ninja -DCMAKE_TOOLCHAIN_FILE=cmake/linux/toolchain-riscv64.cmake -DGLIBC_COMPATIBILITY=OFF -DENABLE_LDAP=OFF -DOPENSSL_NO_ASM=ON -DENABLE_JEMALLOC=ON -DENABLE_PARQUET=OFF -DENABLE_GRPC=OFF -DENABLE_HDFS=OFF -DENABLE_MYSQL=OFF
|
CC=clang-18 CXX=clang++-18 cmake . -Bbuild-riscv64 -G Ninja -DCMAKE_TOOLCHAIN_FILE=cmake/linux/toolchain-riscv64.cmake -DGLIBC_COMPATIBILITY=OFF -DENABLE_LDAP=OFF -DOPENSSL_NO_ASM=ON -DENABLE_JEMALLOC=ON -DENABLE_PARQUET=OFF -DENABLE_GRPC=OFF -DENABLE_HDFS=OFF -DENABLE_MYSQL=OFF
|
||||||
ninja -C build-riscv64
|
ninja -C build-riscv64
|
||||||
```
|
```
|
||||||
|
|
||||||
|
@ -109,7 +109,7 @@ The build requires the following components:
|
|||||||
|
|
||||||
- Git (used to checkout the sources, not needed for the build)
|
- Git (used to checkout the sources, not needed for the build)
|
||||||
- CMake 3.20 or newer
|
- CMake 3.20 or newer
|
||||||
- Compiler: clang-17 or newer
|
- Compiler: clang-18 or newer
|
||||||
- Linker: lld-17 or newer
|
- Linker: lld-17 or newer
|
||||||
- Ninja
|
- Ninja
|
||||||
- Yasm
|
- Yasm
|
||||||
|
@ -153,7 +153,7 @@ Builds ClickHouse in various configurations for use in further steps. You have t
|
|||||||
|
|
||||||
### Report Details
|
### Report Details
|
||||||
|
|
||||||
- **Compiler**: `clang-17`, optionally with the name of a target platform
|
- **Compiler**: `clang-18`, optionally with the name of a target platform
|
||||||
- **Build type**: `Debug` or `RelWithDebInfo` (cmake).
|
- **Build type**: `Debug` or `RelWithDebInfo` (cmake).
|
||||||
- **Sanitizer**: `none` (without sanitizers), `address` (ASan), `memory` (MSan), `undefined` (UBSan), or `thread` (TSan).
|
- **Sanitizer**: `none` (without sanitizers), `address` (ASan), `memory` (MSan), `undefined` (UBSan), or `thread` (TSan).
|
||||||
- **Status**: `success` or `fail`
|
- **Status**: `success` or `fail`
|
||||||
@ -177,7 +177,7 @@ Performs static analysis and code style checks using `clang-tidy`. The report is
|
|||||||
There is a convenience `packager` script that runs the clang-tidy build in docker
|
There is a convenience `packager` script that runs the clang-tidy build in docker
|
||||||
```sh
|
```sh
|
||||||
mkdir build_tidy
|
mkdir build_tidy
|
||||||
./docker/packager/packager --output-dir=./build_tidy --package-type=binary --compiler=clang-17 --debug-build --clang-tidy
|
./docker/packager/packager --output-dir=./build_tidy --package-type=binary --compiler=clang-18 --debug-build --clang-tidy
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
||||||
|
@ -121,7 +121,7 @@ While inside the `build` directory, configure your build by running CMake. Befor
|
|||||||
export CC=clang CXX=clang++
|
export CC=clang CXX=clang++
|
||||||
cmake ..
|
cmake ..
|
||||||
|
|
||||||
If you installed clang using the automatic installation script above, also specify the version of clang installed in the first command, e.g. `export CC=clang-17 CXX=clang++-17`. The clang version will be in the script output.
|
If you installed clang using the automatic installation script above, also specify the version of clang installed in the first command, e.g. `export CC=clang-18 CXX=clang++-18`. The clang version will be in the script output.
|
||||||
|
|
||||||
The `CC` variable specifies the compiler for C (short for C Compiler), and `CXX` variable instructs which C++ compiler is to be used for building.
|
The `CC` variable specifies the compiler for C (short for C Compiler), and `CXX` variable instructs which C++ compiler is to be used for building.
|
||||||
|
|
||||||
|
@ -51,6 +51,9 @@ ENGINE = MaterializedMySQL('host:port', ['database' | database], 'user', 'passwo
|
|||||||
### allows_query_when_mysql_lost
|
### allows_query_when_mysql_lost
|
||||||
`allows_query_when_mysql_lost` — Allows to query a materialized table when MySQL is lost. Default: `0` (`false`).
|
`allows_query_when_mysql_lost` — Allows to query a materialized table when MySQL is lost. Default: `0` (`false`).
|
||||||
|
|
||||||
|
### allow_startup_database_without_connection_to_mysql
|
||||||
|
`allow_startup_database_without_connection_to_mysql` — Allow to create and attach database without available connection to MySQL. Default: `0` (`false`).
|
||||||
|
|
||||||
### materialized_mysql_tables_list
|
### materialized_mysql_tables_list
|
||||||
|
|
||||||
`materialized_mysql_tables_list` — a comma-separated list of mysql database tables, which will be replicated by MaterializedMySQL database engine. Default value: empty list — means whole tables will be replicated.
|
`materialized_mysql_tables_list` — a comma-separated list of mysql database tables, which will be replicated by MaterializedMySQL database engine. Default value: empty list — means whole tables will be replicated.
|
||||||
|
@ -10,7 +10,7 @@ sidebar_label: Data Replication
|
|||||||
In ClickHouse Cloud replication is managed for you. Please create your tables without adding arguments. For example, in the text below you would replace:
|
In ClickHouse Cloud replication is managed for you. Please create your tables without adding arguments. For example, in the text below you would replace:
|
||||||
|
|
||||||
```sql
|
```sql
|
||||||
ENGINE = ReplicatedReplacingMergeTree(
|
ENGINE = ReplicatedMergeTree(
|
||||||
'/clickhouse/tables/{shard}/table_name',
|
'/clickhouse/tables/{shard}/table_name',
|
||||||
'{replica}',
|
'{replica}',
|
||||||
ver
|
ver
|
||||||
@ -20,7 +20,7 @@ ENGINE = ReplicatedReplacingMergeTree(
|
|||||||
with:
|
with:
|
||||||
|
|
||||||
```sql
|
```sql
|
||||||
ENGINE = ReplicatedReplacingMergeTree
|
ENGINE = ReplicatedMergeTree
|
||||||
```
|
```
|
||||||
:::
|
:::
|
||||||
|
|
||||||
@ -140,11 +140,11 @@ The system monitors data synchronicity on replicas and is able to recover after
|
|||||||
:::note
|
:::note
|
||||||
In ClickHouse Cloud replication is managed for you. Please create your tables without adding arguments. For example, in the text below you would replace:
|
In ClickHouse Cloud replication is managed for you. Please create your tables without adding arguments. For example, in the text below you would replace:
|
||||||
```
|
```
|
||||||
ENGINE = ReplicatedReplacingMergeTree('/clickhouse/tables/{shard}/table_name', '{replica}', ver)
|
ENGINE = ReplicatedMergeTree('/clickhouse/tables/{shard}/table_name', '{replica}', ver)
|
||||||
```
|
```
|
||||||
with:
|
with:
|
||||||
```
|
```
|
||||||
ENGINE = ReplicatedReplacingMergeTree
|
ENGINE = ReplicatedMergeTree
|
||||||
```
|
```
|
||||||
:::
|
:::
|
||||||
|
|
||||||
@ -177,7 +177,7 @@ CREATE TABLE table_name
|
|||||||
CounterID UInt32,
|
CounterID UInt32,
|
||||||
UserID UInt32,
|
UserID UInt32,
|
||||||
ver UInt16
|
ver UInt16
|
||||||
) ENGINE = ReplicatedReplacingMergeTree('/clickhouse/tables/{shard}/table_name', '{replica}', ver)
|
) ENGINE = ReplicatedMergeTree('/clickhouse/tables/{shard}/table_name', '{replica}', ver)
|
||||||
PARTITION BY toYYYYMM(EventDate)
|
PARTITION BY toYYYYMM(EventDate)
|
||||||
ORDER BY (CounterID, EventDate, intHash32(UserID))
|
ORDER BY (CounterID, EventDate, intHash32(UserID))
|
||||||
SAMPLE BY intHash32(UserID);
|
SAMPLE BY intHash32(UserID);
|
||||||
|
@ -75,7 +75,7 @@ The supported formats are:
|
|||||||
| [ArrowStream](#data-format-arrow-stream) | ✔ | ✔ |
|
| [ArrowStream](#data-format-arrow-stream) | ✔ | ✔ |
|
||||||
| [ORC](#data-format-orc) | ✔ | ✔ |
|
| [ORC](#data-format-orc) | ✔ | ✔ |
|
||||||
| [One](#data-format-one) | ✔ | ✗ |
|
| [One](#data-format-one) | ✔ | ✗ |
|
||||||
| [Npy](#data-format-npy) | ✔ | ✗ |
|
| [Npy](#data-format-npy) | ✔ | ✔ |
|
||||||
| [RowBinary](#rowbinary) | ✔ | ✔ |
|
| [RowBinary](#rowbinary) | ✔ | ✔ |
|
||||||
| [RowBinaryWithNames](#rowbinarywithnamesandtypes) | ✔ | ✔ |
|
| [RowBinaryWithNames](#rowbinarywithnamesandtypes) | ✔ | ✔ |
|
||||||
| [RowBinaryWithNamesAndTypes](#rowbinarywithnamesandtypes) | ✔ | ✔ |
|
| [RowBinaryWithNamesAndTypes](#rowbinarywithnamesandtypes) | ✔ | ✔ |
|
||||||
@ -207,7 +207,7 @@ SELECT * FROM nestedt FORMAT TSV
|
|||||||
Differs from `TabSeparated` format in that the rows are written without escaping.
|
Differs from `TabSeparated` format in that the rows are written without escaping.
|
||||||
When parsing with this format, tabs or linefeeds are not allowed in each field.
|
When parsing with this format, tabs or linefeeds are not allowed in each field.
|
||||||
|
|
||||||
This format is also available under the name `TSVRaw`.
|
This format is also available under the names `TSVRaw`, `Raw`.
|
||||||
|
|
||||||
## TabSeparatedWithNames {#tabseparatedwithnames}
|
## TabSeparatedWithNames {#tabseparatedwithnames}
|
||||||
|
|
||||||
@ -242,14 +242,14 @@ This format is also available under the name `TSVWithNamesAndTypes`.
|
|||||||
Differs from `TabSeparatedWithNames` format in that the rows are written without escaping.
|
Differs from `TabSeparatedWithNames` format in that the rows are written without escaping.
|
||||||
When parsing with this format, tabs or linefeeds are not allowed in each field.
|
When parsing with this format, tabs or linefeeds are not allowed in each field.
|
||||||
|
|
||||||
This format is also available under the name `TSVRawWithNames`.
|
This format is also available under the names `TSVRawWithNames`, `RawWithNames`.
|
||||||
|
|
||||||
## TabSeparatedRawWithNamesAndTypes {#tabseparatedrawwithnamesandtypes}
|
## TabSeparatedRawWithNamesAndTypes {#tabseparatedrawwithnamesandtypes}
|
||||||
|
|
||||||
Differs from `TabSeparatedWithNamesAndTypes` format in that the rows are written without escaping.
|
Differs from `TabSeparatedWithNamesAndTypes` format in that the rows are written without escaping.
|
||||||
When parsing with this format, tabs or linefeeds are not allowed in each field.
|
When parsing with this format, tabs or linefeeds are not allowed in each field.
|
||||||
|
|
||||||
This format is also available under the name `TSVRawWithNamesAndNames`.
|
This format is also available under the names `TSVRawWithNamesAndNames`, `RawWithNamesAndNames`.
|
||||||
|
|
||||||
## Template {#format-template}
|
## Template {#format-template}
|
||||||
|
|
||||||
@ -2467,22 +2467,21 @@ Result:
|
|||||||
## Npy {#data-format-npy}
|
## Npy {#data-format-npy}
|
||||||
|
|
||||||
This function is designed to load a NumPy array from a .npy file into ClickHouse. The NumPy file format is a binary format used for efficiently storing arrays of numerical data. During import, ClickHouse treats top level dimension as an array of rows with single column. Supported Npy data types and their corresponding type in ClickHouse:
|
This function is designed to load a NumPy array from a .npy file into ClickHouse. The NumPy file format is a binary format used for efficiently storing arrays of numerical data. During import, ClickHouse treats top level dimension as an array of rows with single column. Supported Npy data types and their corresponding type in ClickHouse:
|
||||||
| Npy type | ClickHouse type |
|
|
||||||
|:--------:|:---------------:|
|
| Npy data type (`INSERT`) | ClickHouse data type | Npy data type (`SELECT`) |
|
||||||
| b1 | UInt8 |
|
|--------------------------|-----------------------------------------------------------------|--------------------------|
|
||||||
| i1 | Int8 |
|
| `i1` | [Int8](/docs/en/sql-reference/data-types/int-uint.md) | `i1` |
|
||||||
| i2 | Int16 |
|
| `i2` | [Int16](/docs/en/sql-reference/data-types/int-uint.md) | `i2` |
|
||||||
| i4 | Int32 |
|
| `i4` | [Int32](/docs/en/sql-reference/data-types/int-uint.md) | `i4` |
|
||||||
| i8 | Int64 |
|
| `i8` | [Int64](/docs/en/sql-reference/data-types/int-uint.md) | `i8` |
|
||||||
| u1 | UInt8 |
|
| `u1`, `b1` | [UInt8](/docs/en/sql-reference/data-types/int-uint.md) | `u1` |
|
||||||
| u2 | UInt16 |
|
| `u2` | [UInt16](/docs/en/sql-reference/data-types/int-uint.md) | `u2` |
|
||||||
| u4 | UInt32 |
|
| `u4` | [UInt32](/docs/en/sql-reference/data-types/int-uint.md) | `u4` |
|
||||||
| u8 | UInt64 |
|
| `u8` | [UInt64](/docs/en/sql-reference/data-types/int-uint.md) | `u8` |
|
||||||
| f2 | Float32 |
|
| `f2`, `f4` | [Float32](/docs/en/sql-reference/data-types/float.md) | `f4` |
|
||||||
| f4 | Float32 |
|
| `f8` | [Float64](/docs/en/sql-reference/data-types/float.md) | `f8` |
|
||||||
| f8 | Float64 |
|
| `S`, `U` | [String](/docs/en/sql-reference/data-types/string.md) | `S` |
|
||||||
| S | String |
|
| | [FixedString](/docs/en/sql-reference/data-types/fixedstring.md) | `S` |
|
||||||
| U | String |
|
|
||||||
|
|
||||||
**Example of saving an array in .npy format using Python**
|
**Example of saving an array in .npy format using Python**
|
||||||
|
|
||||||
@ -2509,6 +2508,14 @@ Result:
|
|||||||
└───────────────┘
|
└───────────────┘
|
||||||
```
|
```
|
||||||
|
|
||||||
|
**Selecting Data**
|
||||||
|
|
||||||
|
You can select data from a ClickHouse table and save them into some file in the Npy format by the following command:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
$ clickhouse-client --query="SELECT {column} FROM {some_table} FORMAT Npy" > {filename.npy}
|
||||||
|
```
|
||||||
|
|
||||||
## LineAsString {#lineasstring}
|
## LineAsString {#lineasstring}
|
||||||
|
|
||||||
In this format, every line of input data is interpreted as a single string value. This format can only be parsed for table with a single field of type [String](/docs/en/sql-reference/data-types/string.md). The remaining columns must be set to [DEFAULT](/docs/en/sql-reference/statements/create/table.md/#default) or [MATERIALIZED](/docs/en/sql-reference/statements/create/table.md/#materialized), or omitted.
|
In this format, every line of input data is interpreted as a single string value. This format can only be parsed for table with a single field of type [String](/docs/en/sql-reference/data-types/string.md). The remaining columns must be set to [DEFAULT](/docs/en/sql-reference/statements/create/table.md/#default) or [MATERIALIZED](/docs/en/sql-reference/statements/create/table.md/#materialized), or omitted.
|
||||||
|
12
docs/en/interfaces/third-party/gui.md
vendored
12
docs/en/interfaces/third-party/gui.md
vendored
@ -210,6 +210,18 @@ Features:
|
|||||||
- Pre-built metrics dashboards.
|
- Pre-built metrics dashboards.
|
||||||
- Multiple users/projects via YAML config.
|
- Multiple users/projects via YAML config.
|
||||||
|
|
||||||
|
### clickhouse-monitoring {#clickhouse-monitoring}
|
||||||
|
|
||||||
|
[clickhouse-monitoring](https://github.com/duyet/clickhouse-monitoring) is a simple Next.js dashboard that relies on `system.*` tables to help monitor and provide an overview of your ClickHouse cluster.
|
||||||
|
|
||||||
|
Features:
|
||||||
|
|
||||||
|
- Query monitor: current queries, query history, query resources (memory, parts read, file_open, ...), most expensive queries, most used tables or columns, etc.
|
||||||
|
- Cluster monitor: total memory/CPU usage, distributed queue, global settings, mergetree settings, metrics, etc.
|
||||||
|
- Tables and parts information: size, row count, compression, part size, etc., at the column level detail.
|
||||||
|
- Useful tools: Zookeeper data exploration, query EXPLAIN, kill queries, etc.
|
||||||
|
- Visualization metric charts: queries and resource usage, number of merges/mutation, merge performance, query performance, etc.
|
||||||
|
|
||||||
## Commercial {#commercial}
|
## Commercial {#commercial}
|
||||||
|
|
||||||
### DataGrip {#datagrip}
|
### DataGrip {#datagrip}
|
||||||
|
@ -371,6 +371,8 @@ is equal to
|
|||||||
</s3_plain_rewritable>
|
</s3_plain_rewritable>
|
||||||
```
|
```
|
||||||
|
|
||||||
|
Starting from `24.5` it is possible configure any object storage disk (`s3`, `azure`, `local`) using `plain_rewritable` metadata type.
|
||||||
|
|
||||||
### Using Azure Blob Storage {#azure-blob-storage}
|
### Using Azure Blob Storage {#azure-blob-storage}
|
||||||
|
|
||||||
`MergeTree` family table engines can store data to [Azure Blob Storage](https://azure.microsoft.com/en-us/services/storage/blobs/) using a disk with type `azure_blob_storage`.
|
`MergeTree` family table engines can store data to [Azure Blob Storage](https://azure.microsoft.com/en-us/services/storage/blobs/) using a disk with type `azure_blob_storage`.
|
||||||
|
@ -14,7 +14,7 @@ The `system.part_log` table contains the following columns:
|
|||||||
- `event_type` ([Enum8](../../sql-reference/data-types/enum.md)) — Type of the event that occurred with the data part. Can have one of the following values:
|
- `event_type` ([Enum8](../../sql-reference/data-types/enum.md)) — Type of the event that occurred with the data part. Can have one of the following values:
|
||||||
- `NewPart` — Inserting of a new data part.
|
- `NewPart` — Inserting of a new data part.
|
||||||
- `MergeParts` — Merging of data parts.
|
- `MergeParts` — Merging of data parts.
|
||||||
- `DownloadParts` — Downloading a data part.
|
- `DownloadPart` — Downloading a data part.
|
||||||
- `RemovePart` — Removing or detaching a data part using [DETACH PARTITION](../../sql-reference/statements/alter/partition.md#alter_detach-partition).
|
- `RemovePart` — Removing or detaching a data part using [DETACH PARTITION](../../sql-reference/statements/alter/partition.md#alter_detach-partition).
|
||||||
- `MutatePart` — Mutating of a data part.
|
- `MutatePart` — Mutating of a data part.
|
||||||
- `MovePart` — Moving the data part from the one disk to another one.
|
- `MovePart` — Moving the data part from the one disk to another one.
|
||||||
|
@ -7,6 +7,7 @@ sidebar_label: Map(K, V)
|
|||||||
# Map(K, V)
|
# Map(K, V)
|
||||||
|
|
||||||
`Map(K, V)` data type stores `key:value` pairs.
|
`Map(K, V)` data type stores `key:value` pairs.
|
||||||
|
The Map datatype is implemented as `Array(Tuple(key T1, value T2))`, which means that the order of keys in each map does not change, i.e., this data type maintains insertion order.
|
||||||
|
|
||||||
**Parameters**
|
**Parameters**
|
||||||
|
|
||||||
|
@ -234,3 +234,34 @@ SELECT least(toDateTime32(now() + toIntervalDay(1)), toDateTime64(now(), 3))
|
|||||||
:::note
|
:::note
|
||||||
The type returned is a DateTime64 as the DataTime32 must be promoted to 64 bit for the comparison.
|
The type returned is a DateTime64 as the DataTime32 must be promoted to 64 bit for the comparison.
|
||||||
:::
|
:::
|
||||||
|
|
||||||
|
## clamp
|
||||||
|
|
||||||
|
Constrain the return value between A and B.
|
||||||
|
|
||||||
|
**Syntax**
|
||||||
|
|
||||||
|
``` sql
|
||||||
|
clamp(value, min, max)
|
||||||
|
```
|
||||||
|
|
||||||
|
**Arguments**
|
||||||
|
|
||||||
|
- `value` – Input value.
|
||||||
|
- `min` – Limit the lower bound.
|
||||||
|
- `max` – Limit the upper bound.
|
||||||
|
|
||||||
|
**Returned values**
|
||||||
|
|
||||||
|
If the value is less than the minimum value, return the minimum value; if it is greater than the maximum value, return the maximum value; otherwise, return the current value.
|
||||||
|
|
||||||
|
Examples:
|
||||||
|
|
||||||
|
```sql
|
||||||
|
SELECT clamp(1, 2, 3) result, toTypeName(result) type;
|
||||||
|
```
|
||||||
|
```response
|
||||||
|
┌─result─┬─type────┐
|
||||||
|
│ 2 │ Float64 │
|
||||||
|
└────────┴─────────┘
|
||||||
|
```
|
@ -2558,13 +2558,27 @@ Like function `YYYYMMDDhhmmssToDate()` but produces a [DateTime64](../../sql-ref
|
|||||||
|
|
||||||
Accepts an additional, optional `precision` parameter after the `timezone` parameter.
|
Accepts an additional, optional `precision` parameter after the `timezone` parameter.
|
||||||
|
|
||||||
## addYears, addQuarters, addMonths, addWeeks, addDays, addHours, addMinutes, addSeconds, addMilliseconds, addMicroseconds, addNanoseconds
|
## addYears
|
||||||
|
|
||||||
These functions add units of the interval specified by the function name to a date, a date with time or a string-encoded date / date with time. A date or date with time is returned.
|
Adds a specified number of years to a date, a date with time or a string-encoded date / date with time.
|
||||||
|
|
||||||
Example:
|
**Syntax**
|
||||||
|
|
||||||
``` sql
|
```sql
|
||||||
|
addYears(date, num)
|
||||||
|
```
|
||||||
|
|
||||||
|
**Parameters**
|
||||||
|
|
||||||
|
- `date`: Date / date with time to add specified number of years to. [Date](../data-types/date.md)/[Date32](../data-types/date32.md)/[DateTime](../data-types/datetime.md)/[DateTime64](../data-types/datetime64.md), [String](../data-types/string.md).
|
||||||
|
- `num`: Number of years to add. [(U)Int*](../data-types/int-uint.md), [Float*](../data-types/float.md).
|
||||||
|
|
||||||
|
**Returned value**
|
||||||
|
- Returns `date` plus `num` years. [Date](../data-types/date.md)/[Date32](../data-types/date32.md)/[DateTime](../data-types/datetime.md)/[DateTime64](../data-types/datetime64.md).
|
||||||
|
|
||||||
|
**Example**
|
||||||
|
|
||||||
|
```sql
|
||||||
WITH
|
WITH
|
||||||
toDate('2024-01-01') AS date,
|
toDate('2024-01-01') AS date,
|
||||||
toDateTime('2024-01-01 00:00:00') AS date_time,
|
toDateTime('2024-01-01 00:00:00') AS date_time,
|
||||||
@ -2575,12 +2589,456 @@ SELECT
|
|||||||
addYears(date_time_string, 1) AS add_years_with_date_time_string
|
addYears(date_time_string, 1) AS add_years_with_date_time_string
|
||||||
```
|
```
|
||||||
|
|
||||||
``` text
|
```response
|
||||||
┌─add_years_with_date─┬─add_years_with_date_time─┬─add_years_with_date_time_string─┐
|
┌─add_years_with_date─┬─add_years_with_date_time─┬─add_years_with_date_time_string─┐
|
||||||
│ 2025-01-01 │ 2025-01-01 00:00:00 │ 2025-01-01 00:00:00.000 │
|
│ 2025-01-01 │ 2025-01-01 00:00:00 │ 2025-01-01 00:00:00.000 │
|
||||||
└─────────────────────┴──────────────────────────┴─────────────────────────────────┘
|
└─────────────────────┴──────────────────────────┴─────────────────────────────────┘
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## addQuarters
|
||||||
|
|
||||||
|
Adds a specified number of quarters to a date, a date with time or a string-encoded date / date with time.
|
||||||
|
|
||||||
|
**Syntax**
|
||||||
|
|
||||||
|
```sql
|
||||||
|
addQuarters(date, num)
|
||||||
|
```
|
||||||
|
|
||||||
|
**Parameters**
|
||||||
|
|
||||||
|
- `date`: Date / date with time to add specified number of quarters to. [Date](../data-types/date.md)/[Date32](../data-types/date32.md)/[DateTime](../data-types/datetime.md)/[DateTime64](../data-types/datetime64.md), [String](../data-types/string.md).
|
||||||
|
- `num`: Number of quarters to add. [(U)Int*](../data-types/int-uint.md), [Float*](../data-types/float.md).
|
||||||
|
|
||||||
|
**Returned value**
|
||||||
|
- Returns `date` plus `num` quarters. [Date](../data-types/date.md)/[Date32](../data-types/date32.md)/[DateTime](../data-types/datetime.md)/[DateTime64](../data-types/datetime64.md).
|
||||||
|
|
||||||
|
**Example**
|
||||||
|
|
||||||
|
```sql
|
||||||
|
WITH
|
||||||
|
toDate('2024-01-01') AS date,
|
||||||
|
toDateTime('2024-01-01 00:00:00') AS date_time,
|
||||||
|
'2024-01-01 00:00:00' AS date_time_string
|
||||||
|
SELECT
|
||||||
|
addQuarters(date, 1) AS add_quarters_with_date,
|
||||||
|
addQuarters(date_time, 1) AS add_quarters_with_date_time,
|
||||||
|
addQuarters(date_time_string, 1) AS add_quarters_with_date_time_string
|
||||||
|
```
|
||||||
|
|
||||||
|
```response
|
||||||
|
┌─add_quarters_with_date─┬─add_quarters_with_date_time─┬─add_quarters_with_date_time_string─┐
|
||||||
|
│ 2024-04-01 │ 2024-04-01 00:00:00 │ 2024-04-01 00:00:00.000 │
|
||||||
|
└────────────────────────┴─────────────────────────────┴────────────────────────────────────┘
|
||||||
|
```
|
||||||
|
|
||||||
|
## addMonths
|
||||||
|
|
||||||
|
Adds a specified number of months to a date, a date with time or a string-encoded date / date with time.
|
||||||
|
|
||||||
|
**Syntax**
|
||||||
|
|
||||||
|
```sql
|
||||||
|
addMonths(date, num)
|
||||||
|
```
|
||||||
|
|
||||||
|
**Parameters**
|
||||||
|
|
||||||
|
- `date`: Date / date with time to add specified number of months to. [Date](../data-types/date.md)/[Date32](../data-types/date32.md)/[DateTime](../data-types/datetime.md)/[DateTime64](../data-types/datetime64.md), [String](../data-types/string.md).
|
||||||
|
- `num`: Number of months to add. [(U)Int*](../data-types/int-uint.md), [Float*](../data-types/float.md).
|
||||||
|
|
||||||
|
**Returned value**
|
||||||
|
- Returns `date` plus `num` months. [Date](../data-types/date.md)/[Date32](../data-types/date32.md)/[DateTime](../data-types/datetime.md)/[DateTime64](../data-types/datetime64.md).
|
||||||
|
|
||||||
|
**Example**
|
||||||
|
|
||||||
|
```sql
|
||||||
|
WITH
|
||||||
|
toDate('2024-01-01') AS date,
|
||||||
|
toDateTime('2024-01-01 00:00:00') AS date_time,
|
||||||
|
'2024-01-01 00:00:00' AS date_time_string
|
||||||
|
SELECT
|
||||||
|
addMonths(date, 6) AS add_months_with_date,
|
||||||
|
addMonths(date_time, 6) AS add_months_with_date_time,
|
||||||
|
addMonths(date_time_string, 6) AS add_months_with_date_time_string
|
||||||
|
```
|
||||||
|
|
||||||
|
```response
|
||||||
|
┌─add_months_with_date─┬─add_months_with_date_time─┬─add_months_with_date_time_string─┐
|
||||||
|
│ 2024-07-01 │ 2024-07-01 00:00:00 │ 2024-07-01 00:00:00.000 │
|
||||||
|
└──────────────────────┴───────────────────────────┴──────────────────────────────────┘
|
||||||
|
```
|
||||||
|
|
||||||
|
## addWeeks
|
||||||
|
|
||||||
|
Adds a specified number of weeks to a date, a date with time or a string-encoded date / date with time.
|
||||||
|
|
||||||
|
**Syntax**
|
||||||
|
|
||||||
|
```sql
|
||||||
|
addWeeks(date, num)
|
||||||
|
```
|
||||||
|
|
||||||
|
**Parameters**
|
||||||
|
|
||||||
|
- `date`: Date / date with time to add specified number of weeks to. [Date](../data-types/date.md)/[Date32](../data-types/date32.md)/[DateTime](../data-types/datetime.md)/[DateTime64](../data-types/datetime64.md), [String](../data-types/string.md).
|
||||||
|
- `num`: Number of weeks to add. [(U)Int*](../data-types/int-uint.md), [Float*](../data-types/float.md).
|
||||||
|
|
||||||
|
**Returned value**
|
||||||
|
- Returns `date` plus `num` weeks. [Date](../data-types/date.md)/[Date32](../data-types/date32.md)/[DateTime](../data-types/datetime.md)/[DateTime64](../data-types/datetime64.md).
|
||||||
|
|
||||||
|
**Example**
|
||||||
|
|
||||||
|
```sql
|
||||||
|
WITH
|
||||||
|
toDate('2024-01-01') AS date,
|
||||||
|
toDateTime('2024-01-01 00:00:00') AS date_time,
|
||||||
|
'2024-01-01 00:00:00' AS date_time_string
|
||||||
|
SELECT
|
||||||
|
addWeeks(date, 5) AS add_weeks_with_date,
|
||||||
|
addWeeks(date_time, 5) AS add_weeks_with_date_time,
|
||||||
|
addWeeks(date_time_string, 5) AS add_weeks_with_date_time_string
|
||||||
|
```
|
||||||
|
|
||||||
|
```response
|
||||||
|
┌─add_weeks_with_date─┬─add_weeks_with_date_time─┬─add_weeks_with_date_time_string─┐
|
||||||
|
│ 2024-02-05 │ 2024-02-05 00:00:00 │ 2024-02-05 00:00:00.000 │
|
||||||
|
└─────────────────────┴──────────────────────────┴─────────────────────────────────┘
|
||||||
|
```
|
||||||
|
|
||||||
|
## addDays
|
||||||
|
|
||||||
|
Adds a specified number of days to a date, a date with time or a string-encoded date / date with time.
|
||||||
|
|
||||||
|
**Syntax**
|
||||||
|
|
||||||
|
```sql
|
||||||
|
addDays(date, num)
|
||||||
|
```
|
||||||
|
|
||||||
|
**Parameters**
|
||||||
|
|
||||||
|
- `date`: Date / date with time to add specified number of days to. [Date](../data-types/date.md)/[Date32](../data-types/date32.md)/[DateTime](../data-types/datetime.md)/[DateTime64](../data-types/datetime64.md), [String](../data-types/string.md).
|
||||||
|
- `num`: Number of days to add. [(U)Int*](../data-types/int-uint.md), [Float*](../data-types/float.md).
|
||||||
|
|
||||||
|
**Returned value**
|
||||||
|
- Returns `date` plus `num` days. [Date](../data-types/date.md)/[Date32](../data-types/date32.md)/[DateTime](../data-types/datetime.md)/[DateTime64](../data-types/datetime64.md).
|
||||||
|
|
||||||
|
**Example**
|
||||||
|
|
||||||
|
```sql
|
||||||
|
WITH
|
||||||
|
toDate('2024-01-01') AS date,
|
||||||
|
toDateTime('2024-01-01 00:00:00') AS date_time,
|
||||||
|
'2024-01-01 00:00:00' AS date_time_string
|
||||||
|
SELECT
|
||||||
|
addDays(date, 5) AS add_days_with_date,
|
||||||
|
addDays(date_time, 5) AS add_days_with_date_time,
|
||||||
|
addDays(date_time_string, 5) AS add_days_with_date_time_string
|
||||||
|
```
|
||||||
|
|
||||||
|
```response
|
||||||
|
┌─add_days_with_date─┬─add_days_with_date_time─┬─add_days_with_date_time_string─┐
|
||||||
|
│ 2024-01-06 │ 2024-01-06 00:00:00 │ 2024-01-06 00:00:00.000 │
|
||||||
|
└────────────────────┴─────────────────────────┴────────────────────────────────┘
|
||||||
|
```
|
||||||
|
|
||||||
|
## addHours
|
||||||
|
|
||||||
|
Adds a specified number of days to a date, a date with time or a string-encoded date / date with time.
|
||||||
|
|
||||||
|
**Syntax**
|
||||||
|
|
||||||
|
```sql
|
||||||
|
addHours(date, num)
|
||||||
|
```
|
||||||
|
|
||||||
|
**Parameters**
|
||||||
|
|
||||||
|
- `date`: Date / date with time to add specified number of hours to. [Date](../data-types/date.md)/[Date32](../data-types/date32.md)/[DateTime](../data-types/datetime.md)/[DateTime64](../data-types/datetime64.md), [String](../data-types/string.md).
|
||||||
|
- `num`: Number of hours to add. [(U)Int*](../data-types/int-uint.md), [Float*](../data-types/float.md).
|
||||||
|
|
||||||
|
**Returned value**
|
||||||
|
- Returns `date` plus `num` hours. [Date](../data-types/date.md)/[Date32](../data-types/date32.md)/[DateTime](../data-types/datetime.md)/[DateTime64](../data-types/datetime64.md).
|
||||||
|
|
||||||
|
**Example**
|
||||||
|
|
||||||
|
```sql
|
||||||
|
WITH
|
||||||
|
toDate('2024-01-01') AS date,
|
||||||
|
toDateTime('2024-01-01 00:00:00') AS date_time,
|
||||||
|
'2024-01-01 00:00:00' AS date_time_string
|
||||||
|
SELECT
|
||||||
|
addHours(date, 12) AS add_hours_with_date,
|
||||||
|
addHours(date_time, 12) AS add_hours_with_date_time,
|
||||||
|
addHours(date_time_string, 12) AS add_hours_with_date_time_string
|
||||||
|
```
|
||||||
|
|
||||||
|
```response
|
||||||
|
┌─add_hours_with_date─┬─add_hours_with_date_time─┬─add_hours_with_date_time_string─┐
|
||||||
|
│ 2024-01-01 12:00:00 │ 2024-01-01 12:00:00 │ 2024-01-01 12:00:00.000 │
|
||||||
|
└─────────────────────┴──────────────────────────┴─────────────────────────────────┘
|
||||||
|
```
|
||||||
|
|
||||||
|
## addMinutes
|
||||||
|
|
||||||
|
Adds a specified number of minutes to a date, a date with time or a string-encoded date / date with time.
|
||||||
|
|
||||||
|
**Syntax**
|
||||||
|
|
||||||
|
```sql
|
||||||
|
addMinutes(date, num)
|
||||||
|
```
|
||||||
|
|
||||||
|
**Parameters**
|
||||||
|
|
||||||
|
- `date`: Date / date with time to add specified number of minutes to. [Date](../data-types/date.md)/[Date32](../data-types/date32.md)/[DateTime](../data-types/datetime.md)/[DateTime64](../data-types/datetime64.md), [String](../data-types/string.md).
|
||||||
|
- `num`: Number of minutes to add. [(U)Int*](../data-types/int-uint.md), [Float*](../data-types/float.md).
|
||||||
|
|
||||||
|
**Returned value**
|
||||||
|
- Returns `date` plus `num` minutes. [Date](../data-types/date.md)/[Date32](../data-types/date32.md)/[DateTime](../data-types/datetime.md)/[DateTime64](../data-types/datetime64.md).
|
||||||
|
|
||||||
|
**Example**
|
||||||
|
|
||||||
|
```sql
|
||||||
|
WITH
|
||||||
|
toDate('2024-01-01') AS date,
|
||||||
|
toDateTime('2024-01-01 00:00:00') AS date_time,
|
||||||
|
'2024-01-01 00:00:00' AS date_time_string
|
||||||
|
SELECT
|
||||||
|
addMinutes(date, 20) AS add_minutes_with_date,
|
||||||
|
addMinutes(date_time, 20) AS add_minutes_with_date_time,
|
||||||
|
addMinutes(date_time_string, 20) AS add_minutes_with_date_time_string
|
||||||
|
```
|
||||||
|
|
||||||
|
```response
|
||||||
|
┌─add_minutes_with_date─┬─add_minutes_with_date_time─┬─add_minutes_with_date_time_string─┐
|
||||||
|
│ 2024-01-01 00:20:00 │ 2024-01-01 00:20:00 │ 2024-01-01 00:20:00.000 │
|
||||||
|
└───────────────────────┴────────────────────────────┴───────────────────────────────────┘
|
||||||
|
```
|
||||||
|
|
||||||
|
## addSeconds
|
||||||
|
|
||||||
|
Adds a specified number of seconds to a date, a date with time or a string-encoded date / date with time.
|
||||||
|
|
||||||
|
**Syntax**
|
||||||
|
|
||||||
|
```sql
|
||||||
|
addSeconds(date, num)
|
||||||
|
```
|
||||||
|
|
||||||
|
**Parameters**
|
||||||
|
|
||||||
|
- `date`: Date / date with time to add specified number of seconds to. [Date](../data-types/date.md)/[Date32](../data-types/date32.md)/[DateTime](../data-types/datetime.md)/[DateTime64](../data-types/datetime64.md), [String](../data-types/string.md).
|
||||||
|
- `num`: Number of seconds to add. [(U)Int*](../data-types/int-uint.md), [Float*](../data-types/float.md).
|
||||||
|
|
||||||
|
**Returned value**
|
||||||
|
- Returns `date` plus `num` seconds. [Date](../data-types/date.md)/[Date32](../data-types/date32.md)/[DateTime](../data-types/datetime.md)/[DateTime64](../data-types/datetime64.md).
|
||||||
|
|
||||||
|
**Example**
|
||||||
|
|
||||||
|
```sql
|
||||||
|
WITH
|
||||||
|
toDate('2024-01-01') AS date,
|
||||||
|
toDateTime('2024-01-01 00:00:00') AS date_time,
|
||||||
|
'2024-01-01 00:00:00' AS date_time_string
|
||||||
|
SELECT
|
||||||
|
addSeconds(date, 30) AS add_seconds_with_date,
|
||||||
|
addSeconds(date_time, 30) AS add_seconds_with_date_time,
|
||||||
|
addSeconds(date_time_string, 30) AS add_seconds_with_date_time_string
|
||||||
|
```
|
||||||
|
|
||||||
|
```response
|
||||||
|
┌─add_seconds_with_date─┬─add_seconds_with_date_time─┬─add_seconds_with_date_time_string─┐
|
||||||
|
│ 2024-01-01 00:00:30 │ 2024-01-01 00:00:30 │ 2024-01-01 00:00:30.000 │
|
||||||
|
└───────────────────────┴────────────────────────────┴───────────────────────────────────┘
|
||||||
|
```
|
||||||
|
|
||||||
|
## addMilliseconds
|
||||||
|
|
||||||
|
Adds a specified number of milliseconds to a date with time or a string-encoded date with time.
|
||||||
|
|
||||||
|
**Syntax**
|
||||||
|
|
||||||
|
```sql
|
||||||
|
addMilliseconds(date_time, num)
|
||||||
|
```
|
||||||
|
|
||||||
|
**Parameters**
|
||||||
|
|
||||||
|
- `date_time`: Date with time to add specified number of milliseconds to. [DateTime](../data-types/datetime.md)/[DateTime64](../data-types/datetime64.md), [String](../data-types/string.md).
|
||||||
|
- `num`: Number of milliseconds to add. [(U)Int*](../data-types/int-uint.md), [Float*](../data-types/float.md).
|
||||||
|
|
||||||
|
**Returned value**
|
||||||
|
- Returns `date_time` plus `num` milliseconds. [DateTime64](../data-types/datetime64.md).
|
||||||
|
|
||||||
|
**Example**
|
||||||
|
|
||||||
|
```sql
|
||||||
|
WITH
|
||||||
|
toDateTime('2024-01-01 00:00:00') AS date_time,
|
||||||
|
'2024-01-01 00:00:00' AS date_time_string
|
||||||
|
SELECT
|
||||||
|
addMilliseconds(date_time, 1000) AS add_milliseconds_with_date_time,
|
||||||
|
addMilliseconds(date_time_string, 1000) AS add_milliseconds_with_date_time_string
|
||||||
|
```
|
||||||
|
|
||||||
|
```response
|
||||||
|
┌─add_milliseconds_with_date_time─┬─add_milliseconds_with_date_time_string─┐
|
||||||
|
│ 2024-01-01 00:00:01.000 │ 2024-01-01 00:00:01.000 │
|
||||||
|
└─────────────────────────────────┴────────────────────────────────────────┘
|
||||||
|
```
|
||||||
|
|
||||||
|
## addMicroseconds
|
||||||
|
|
||||||
|
Adds a specified number of microseconds to a date with time or a string-encoded date with time.
|
||||||
|
|
||||||
|
**Syntax**
|
||||||
|
|
||||||
|
```sql
|
||||||
|
addMicroseconds(date_time, num)
|
||||||
|
```
|
||||||
|
|
||||||
|
**Parameters**
|
||||||
|
|
||||||
|
- `date_time`: Date with time to add specified number of microseconds to. [DateTime](../data-types/datetime.md)/[DateTime64](../data-types/datetime64.md), [String](../data-types/string.md).
|
||||||
|
- `num`: Number of microseconds to add. [(U)Int*](../data-types/int-uint.md), [Float*](../data-types/float.md).
|
||||||
|
|
||||||
|
**Returned value**
|
||||||
|
- Returns `date_time` plus `num` microseconds. [DateTime64](../data-types/datetime64.md).
|
||||||
|
|
||||||
|
**Example**
|
||||||
|
|
||||||
|
```sql
|
||||||
|
WITH
|
||||||
|
toDateTime('2024-01-01 00:00:00') AS date_time,
|
||||||
|
'2024-01-01 00:00:00' AS date_time_string
|
||||||
|
SELECT
|
||||||
|
addMicroseconds(date_time, 1000000) AS add_microseconds_with_date_time,
|
||||||
|
addMicroseconds(date_time_string, 1000000) AS add_microseconds_with_date_time_string
|
||||||
|
```
|
||||||
|
|
||||||
|
```response
|
||||||
|
┌─add_microseconds_with_date_time─┬─add_microseconds_with_date_time_string─┐
|
||||||
|
│ 2024-01-01 00:00:01.000000 │ 2024-01-01 00:00:01.000000 │
|
||||||
|
└─────────────────────────────────┴────────────────────────────────────────┘
|
||||||
|
```
|
||||||
|
|
||||||
|
## addNanoseconds
|
||||||
|
|
||||||
|
Adds a specified number of microseconds to a date with time or a string-encoded date with time.
|
||||||
|
|
||||||
|
**Syntax**
|
||||||
|
|
||||||
|
```sql
|
||||||
|
addNanoseconds(date_time, num)
|
||||||
|
```
|
||||||
|
|
||||||
|
**Parameters**
|
||||||
|
|
||||||
|
- `date_time`: Date with time to add specified number of nanoseconds to. [DateTime](../data-types/datetime.md)/[DateTime64](../data-types/datetime64.md), [String](../data-types/string.md).
|
||||||
|
- `num`: Number of nanoseconds to add. [(U)Int*](../data-types/int-uint.md), [Float*](../data-types/float.md).
|
||||||
|
|
||||||
|
**Returned value**
|
||||||
|
- Returns `date_time` plus `num` nanoseconds. [DateTime64](../data-types/datetime64.md).
|
||||||
|
|
||||||
|
**Example**
|
||||||
|
|
||||||
|
```sql
|
||||||
|
WITH
|
||||||
|
toDateTime('2024-01-01 00:00:00') AS date_time,
|
||||||
|
'2024-01-01 00:00:00' AS date_time_string
|
||||||
|
SELECT
|
||||||
|
addNanoseconds(date_time, 1000) AS add_nanoseconds_with_date_time,
|
||||||
|
addNanoseconds(date_time_string, 1000) AS add_nanoseconds_with_date_time_string
|
||||||
|
```
|
||||||
|
|
||||||
|
```response
|
||||||
|
┌─add_nanoseconds_with_date_time─┬─add_nanoseconds_with_date_time_string─┐
|
||||||
|
│ 2024-01-01 00:00:00.000001000 │ 2024-01-01 00:00:00.000001000 │
|
||||||
|
└────────────────────────────────┴───────────────────────────────────────┘
|
||||||
|
```
|
||||||
|
|
||||||
|
## addInterval
|
||||||
|
|
||||||
|
Adds an interval to another interval or tuple of intervals.
|
||||||
|
|
||||||
|
**Syntax**
|
||||||
|
|
||||||
|
```sql
|
||||||
|
addInterval(interval_1, interval_2)
|
||||||
|
```
|
||||||
|
|
||||||
|
**Parameters**
|
||||||
|
|
||||||
|
- `interval_1`: First interval or tuple of intervals. [interval](../data-types/special-data-types/interval.md), [tuple](../data-types/tuple.md)([interval](../data-types/special-data-types/interval.md)).
|
||||||
|
- `interval_2`: Second interval to be added. [interval](../data-types/special-data-types/interval.md).
|
||||||
|
|
||||||
|
**Returned value**
|
||||||
|
- Returns a tuple of intervals. [tuple](../data-types/tuple.md)([interval](../data-types/special-data-types/interval.md)).
|
||||||
|
|
||||||
|
:::note
|
||||||
|
Intervals of the same type will be combined into a single interval. For instance if `toIntervalDay(1)` and `toIntervalDay(2)` are passed then the result will be `(3)` rather than `(1,1)`.
|
||||||
|
:::
|
||||||
|
|
||||||
|
**Example**
|
||||||
|
|
||||||
|
Query:
|
||||||
|
|
||||||
|
```sql
|
||||||
|
SELECT addInterval(INTERVAL 1 DAY, INTERVAL 1 MONTH);
|
||||||
|
SELECT addInterval((INTERVAL 1 DAY, INTERVAL 1 YEAR), INTERVAL 1 MONTH);
|
||||||
|
SELECT addInterval(INTERVAL 2 DAY, INTERVAL 1 DAY);
|
||||||
|
```
|
||||||
|
|
||||||
|
Result:
|
||||||
|
|
||||||
|
```response
|
||||||
|
┌─addInterval(toIntervalDay(1), toIntervalMonth(1))─┐
|
||||||
|
│ (1,1) │
|
||||||
|
└───────────────────────────────────────────────────┘
|
||||||
|
┌─addInterval((toIntervalDay(1), toIntervalYear(1)), toIntervalMonth(1))─┐
|
||||||
|
│ (1,1,1) │
|
||||||
|
└────────────────────────────────────────────────────────────────────────┘
|
||||||
|
┌─addInterval(toIntervalDay(2), toIntervalDay(1))─┐
|
||||||
|
│ (3) │
|
||||||
|
└─────────────────────────────────────────────────┘
|
||||||
|
```
|
||||||
|
|
||||||
|
## addTupleOfIntervals
|
||||||
|
|
||||||
|
Consecutively adds a tuple of intervals to a Date or a DateTime.
|
||||||
|
|
||||||
|
**Syntax**
|
||||||
|
|
||||||
|
```sql
|
||||||
|
addTupleOfIntervals(interval_1, interval_2)
|
||||||
|
```
|
||||||
|
|
||||||
|
**Parameters**
|
||||||
|
|
||||||
|
- `date`: First interval or interval of tuples. [date](../data-types/date.md)/[date32](../data-types/date32.md)/[datetime](../data-types/datetime.md)/[datetime64](../data-types/datetime64.md).
|
||||||
|
- `intervals`: Tuple of intervals to add to `date`. [tuple](../data-types/tuple.md)([interval](../data-types/special-data-types/interval.md)).
|
||||||
|
|
||||||
|
**Returned value**
|
||||||
|
- Returns `date` with added `intervals`. [date](../data-types/date.md)/[date32](../data-types/date32.md)/[datetime](../data-types/datetime.md)/[datetime64](../data-types/datetime64.md).
|
||||||
|
|
||||||
|
**Example**
|
||||||
|
|
||||||
|
Query:
|
||||||
|
|
||||||
|
```sql
|
||||||
|
WITH toDate('2018-01-01') AS date
|
||||||
|
SELECT addTupleOfIntervals(date, (INTERVAL 1 DAY, INTERVAL 1 MONTH, INTERVAL 1 YEAR))
|
||||||
|
```
|
||||||
|
|
||||||
|
Result:
|
||||||
|
|
||||||
|
```response
|
||||||
|
┌─addTupleOfIntervals(date, (toIntervalDay(1), toIntervalMonth(1), toIntervalYear(1)))─┐
|
||||||
|
│ 2019-02-02 │
|
||||||
|
└──────────────────────────────────────────────────────────────────────────────────────┘
|
||||||
|
```
|
||||||
## subtractYears
|
## subtractYears
|
||||||
|
|
||||||
Subtracts a specified number of years from a date, a date with time or a string-encoded date / date with time.
|
Subtracts a specified number of years from a date, a date with time or a string-encoded date / date with time.
|
||||||
@ -2893,7 +3351,7 @@ subtractMilliseconds(date_time, num)
|
|||||||
- `num`: Number of milliseconds to subtract. [(U)Int*](../data-types/int-uint.md), [Float*](../data-types/float.md).
|
- `num`: Number of milliseconds to subtract. [(U)Int*](../data-types/int-uint.md), [Float*](../data-types/float.md).
|
||||||
|
|
||||||
**Returned value**
|
**Returned value**
|
||||||
- Returns `date_time` minus `num` milliseconds. [Date](../data-types/date.md)/[Date32](../data-types/date32.md)/[DateTime](../data-types/datetime.md)/[DateTime64](../data-types/datetime64.md).
|
- Returns `date_time` minus `num` milliseconds. [DateTime64](../data-types/datetime64.md).
|
||||||
|
|
||||||
**Example**
|
**Example**
|
||||||
|
|
||||||
@ -2928,7 +3386,7 @@ subtractMicroseconds(date_time, num)
|
|||||||
- `num`: Number of microseconds to subtract. [(U)Int*](../data-types/int-uint.md), [Float*](../data-types/float.md).
|
- `num`: Number of microseconds to subtract. [(U)Int*](../data-types/int-uint.md), [Float*](../data-types/float.md).
|
||||||
|
|
||||||
**Returned value**
|
**Returned value**
|
||||||
- Returns `date_time` minus `num` microseconds. [DateTime](../data-types/datetime.md)/[DateTime64](../data-types/datetime64.md).
|
- Returns `date_time` minus `num` microseconds. [DateTime64](../data-types/datetime64.md).
|
||||||
|
|
||||||
**Example**
|
**Example**
|
||||||
|
|
||||||
@ -2963,7 +3421,7 @@ subtractNanoseconds(date_time, num)
|
|||||||
- `num`: Number of nanoseconds to subtract. [(U)Int*](../data-types/int-uint.md), [Float*](../data-types/float.md).
|
- `num`: Number of nanoseconds to subtract. [(U)Int*](../data-types/int-uint.md), [Float*](../data-types/float.md).
|
||||||
|
|
||||||
**Returned value**
|
**Returned value**
|
||||||
- Returns `date_time` minus `num` nanoseconds. [DateTime](../data-types/datetime.md)/[DateTime64](../data-types/datetime64.md).
|
- Returns `date_time` minus `num` nanoseconds. [DateTime64](../data-types/datetime64.md).
|
||||||
|
|
||||||
**Example**
|
**Example**
|
||||||
|
|
||||||
@ -3001,7 +3459,7 @@ subtractInterval(interval_1, interval_2)
|
|||||||
- Returns a tuple of intervals. [tuple](../data-types/tuple.md)([interval](../data-types/special-data-types/interval.md)).
|
- Returns a tuple of intervals. [tuple](../data-types/tuple.md)([interval](../data-types/special-data-types/interval.md)).
|
||||||
|
|
||||||
:::note
|
:::note
|
||||||
If the types of the first interval (or the interval in the tuple) and the second interval are the same they will be merged into one interval.
|
Intervals of the same type will be combined into a single interval. For instance if `toIntervalDay(2)` and `toIntervalDay(1)` are passed then the result will be `(1)` rather than `(2,1)`
|
||||||
:::
|
:::
|
||||||
|
|
||||||
**Example**
|
**Example**
|
||||||
|
@ -12,6 +12,8 @@ Returns whether the argument is [NULL](../../sql-reference/syntax.md#null).
|
|||||||
|
|
||||||
See also operator [`IS NULL`](../operators/index.md#is_null).
|
See also operator [`IS NULL`](../operators/index.md#is_null).
|
||||||
|
|
||||||
|
**Syntax**
|
||||||
|
|
||||||
``` sql
|
``` sql
|
||||||
isNull(x)
|
isNull(x)
|
||||||
```
|
```
|
||||||
@ -52,6 +54,45 @@ Result:
|
|||||||
└───┘
|
└───┘
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## isNullable
|
||||||
|
|
||||||
|
Returns `1` if a column is [Nullable](../data-types/nullable.md) (i.e allows `NULL` values), `0` otherwise.
|
||||||
|
|
||||||
|
**Syntax**
|
||||||
|
|
||||||
|
``` sql
|
||||||
|
isNullable(x)
|
||||||
|
```
|
||||||
|
|
||||||
|
**Arguments**
|
||||||
|
|
||||||
|
- `x` — column.
|
||||||
|
|
||||||
|
**Returned value**
|
||||||
|
|
||||||
|
- `1` if `x` allows `NULL` values. [UInt8](../data-types/int-uint.md).
|
||||||
|
- `0` if `x` does not allow `NULL` values. [UInt8](../data-types/int-uint.md).
|
||||||
|
|
||||||
|
**Example**
|
||||||
|
|
||||||
|
Query:
|
||||||
|
|
||||||
|
``` sql
|
||||||
|
CREATE TABLE tab (ordinary_col UInt32, nullable_col Nullable(UInt32)) ENGINE = Log;
|
||||||
|
INSERT INTO tab (ordinary_col, nullable_col) VALUES (1,1), (2, 2), (3,3);
|
||||||
|
SELECT isNullable(ordinary_col), isNullable(nullable_col) FROM tab;
|
||||||
|
```
|
||||||
|
|
||||||
|
Result:
|
||||||
|
|
||||||
|
``` text
|
||||||
|
┌───isNullable(ordinary_col)──┬───isNullable(nullable_col)──┐
|
||||||
|
1. │ 0 │ 1 │
|
||||||
|
2. │ 0 │ 1 │
|
||||||
|
3. │ 0 │ 1 │
|
||||||
|
└─────────────────────────────┴─────────────────────────────┘
|
||||||
|
```
|
||||||
|
|
||||||
## isNotNull
|
## isNotNull
|
||||||
|
|
||||||
Returns whether the argument is not [NULL](../../sql-reference/syntax.md#null-literal).
|
Returns whether the argument is not [NULL](../../sql-reference/syntax.md#null-literal).
|
||||||
@ -96,6 +137,36 @@ Result:
|
|||||||
└───┘
|
└───┘
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## isNotDistinctFrom
|
||||||
|
|
||||||
|
Performs null-safe comparison. Used to compare JOIN keys which contain NULL values in the JOIN ON section.
|
||||||
|
This function will consider two `NULL` values as identical and will return `true`, which is distinct from the usual
|
||||||
|
equals behavior where comparing two `NULL` values would return `NULL`.
|
||||||
|
|
||||||
|
:::note
|
||||||
|
This function is an internal function used by the implementation of JOIN ON. Please do not use it manually in queries.
|
||||||
|
:::
|
||||||
|
|
||||||
|
**Syntax**
|
||||||
|
|
||||||
|
``` sql
|
||||||
|
isNotDistinctFrom(x, y)
|
||||||
|
```
|
||||||
|
|
||||||
|
**Arguments**
|
||||||
|
|
||||||
|
- `x` — first JOIN key.
|
||||||
|
- `y` — second JOIN key.
|
||||||
|
|
||||||
|
**Returned value**
|
||||||
|
|
||||||
|
- `true` when `x` and `y` are both `NULL`.
|
||||||
|
- `false` otherwise.
|
||||||
|
|
||||||
|
**Example**
|
||||||
|
|
||||||
|
For a complete example see: [NULL values in JOIN keys](../../sql-reference/statements/select/join#null-values-in-join-keys).
|
||||||
|
|
||||||
## isZeroOrNull
|
## isZeroOrNull
|
||||||
|
|
||||||
Returns whether the argument is 0 (zero) or [NULL](../../sql-reference/syntax.md#null-literal).
|
Returns whether the argument is 0 (zero) or [NULL](../../sql-reference/syntax.md#null-literal).
|
||||||
|
@ -1058,7 +1058,7 @@ convertCharset(s, from, to)
|
|||||||
|
|
||||||
## base58Encode
|
## base58Encode
|
||||||
|
|
||||||
Encodes a String using [Base58](https://tools.ietf.org/id/draft-msporny-base58-01.html) in the "Bitcoin" alphabet.
|
Encodes a String using [Base58](https://datatracker.ietf.org/doc/html/draft-msporny-base58) in the "Bitcoin" alphabet.
|
||||||
|
|
||||||
**Syntax**
|
**Syntax**
|
||||||
|
|
||||||
@ -1092,7 +1092,7 @@ Result:
|
|||||||
|
|
||||||
## base58Decode
|
## base58Decode
|
||||||
|
|
||||||
Accepts a String and decodes it using [Base58](https://tools.ietf.org/id/draft-msporny-base58-01.html) encoding scheme using "Bitcoin" alphabet.
|
Accepts a String and decodes it using [Base58](https://datatracker.ietf.org/doc/html/draft-msporny-base58) encoding scheme using "Bitcoin" alphabet.
|
||||||
|
|
||||||
**Syntax**
|
**Syntax**
|
||||||
|
|
||||||
|
@ -151,6 +151,14 @@ Result:
|
|||||||
|
|
||||||
Query with `INNER` type of a join and conditions with `OR` and `AND`:
|
Query with `INNER` type of a join and conditions with `OR` and `AND`:
|
||||||
|
|
||||||
|
:::note
|
||||||
|
|
||||||
|
By default, non-equal conditions are supported as long as they use columns from the same table.
|
||||||
|
For example, `t1.a = t2.key AND t1.b > 0 AND t2.b > t2.c`, because `t1.b > 0` uses columns only from `t1` and `t2.b > t2.c` uses columns only from `t2`.
|
||||||
|
However, you can try experimental support for conditions like `t1.a = t2.key AND t1.b > t2.key`, check out section below for more details.
|
||||||
|
|
||||||
|
:::
|
||||||
|
|
||||||
``` sql
|
``` sql
|
||||||
SELECT a, b, val FROM t1 INNER JOIN t2 ON t1.a = t2.key OR t1.b = t2.key AND t2.val > 3;
|
SELECT a, b, val FROM t1 INNER JOIN t2 ON t1.a = t2.key OR t1.b = t2.key AND t2.val > 3;
|
||||||
```
|
```
|
||||||
@ -165,7 +173,7 @@ Result:
|
|||||||
└───┴────┴─────┘
|
└───┴────┴─────┘
|
||||||
```
|
```
|
||||||
|
|
||||||
## [experimental] Join with inequality conditions
|
## [experimental] Join with inequality conditions for columns from different tables
|
||||||
|
|
||||||
:::note
|
:::note
|
||||||
This feature is experimental. To use it, set `allow_experimental_join_condition` to 1 in your configuration files or by using the `SET` command:
|
This feature is experimental. To use it, set `allow_experimental_join_condition` to 1 in your configuration files or by using the `SET` command:
|
||||||
|
@ -87,3 +87,226 @@ LIMIT 10;
|
|||||||
WITH test1 AS (SELECT i + 1, j + 1 FROM test1)
|
WITH test1 AS (SELECT i + 1, j + 1 FROM test1)
|
||||||
SELECT * FROM test1;
|
SELECT * FROM test1;
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## Recursive Queries
|
||||||
|
|
||||||
|
The optional RECURSIVE modifier allows for a WITH query to refer to its own output. Example:
|
||||||
|
|
||||||
|
**Example:** Sum integers from 1 through 100
|
||||||
|
|
||||||
|
```sql
|
||||||
|
WITH RECURSIVE test_table AS (
|
||||||
|
SELECT 1 AS number
|
||||||
|
UNION ALL
|
||||||
|
SELECT number + 1 FROM test_table WHERE number < 100
|
||||||
|
)
|
||||||
|
SELECT sum(number) FROM test_table;
|
||||||
|
```
|
||||||
|
|
||||||
|
``` text
|
||||||
|
┌─sum(number)─┐
|
||||||
|
│ 5050 │
|
||||||
|
└─────────────┘
|
||||||
|
```
|
||||||
|
|
||||||
|
The general form of a recursive `WITH` query is always a non-recursive term, then `UNION ALL`, then a recursive term, where only the recursive term can contain a reference to the query's own output. Recursive CTE query is executed as follows:
|
||||||
|
|
||||||
|
1. Evaluate the non-recursive term. Place result of non-recursive term query in a temporary working table.
|
||||||
|
2. As long as the working table is not empty, repeat these steps:
|
||||||
|
1. Evaluate the recursive term, substituting the current contents of the working table for the recursive self-reference. Place result of recursive term query in a temporary intermediate table.
|
||||||
|
2. Replace the contents of the working table with the contents of the intermediate table, then empty the intermediate table.
|
||||||
|
|
||||||
|
Recursive queries are typically used to work with hierarchical or tree-structured data. For example, we can write a query that performs tree traversal:
|
||||||
|
|
||||||
|
**Example:** Tree traversal
|
||||||
|
|
||||||
|
First let's create tree table:
|
||||||
|
|
||||||
|
```sql
|
||||||
|
DROP TABLE IF EXISTS tree;
|
||||||
|
CREATE TABLE tree
|
||||||
|
(
|
||||||
|
id UInt64,
|
||||||
|
parent_id Nullable(UInt64),
|
||||||
|
data String
|
||||||
|
) ENGINE = MergeTree ORDER BY id;
|
||||||
|
|
||||||
|
INSERT INTO tree VALUES (0, NULL, 'ROOT'), (1, 0, 'Child_1'), (2, 0, 'Child_2'), (3, 1, 'Child_1_1');
|
||||||
|
```
|
||||||
|
|
||||||
|
We can traverse those tree with such query:
|
||||||
|
|
||||||
|
**Example:** Tree traversal
|
||||||
|
```sql
|
||||||
|
WITH RECURSIVE search_tree AS (
|
||||||
|
SELECT id, parent_id, data
|
||||||
|
FROM tree t
|
||||||
|
WHERE t.id = 0
|
||||||
|
UNION ALL
|
||||||
|
SELECT t.id, t.parent_id, t.data
|
||||||
|
FROM tree t, search_tree st
|
||||||
|
WHERE t.parent_id = st.id
|
||||||
|
)
|
||||||
|
SELECT * FROM search_tree;
|
||||||
|
```
|
||||||
|
|
||||||
|
```text
|
||||||
|
┌─id─┬─parent_id─┬─data──────┐
|
||||||
|
│ 0 │ ᴺᵁᴸᴸ │ ROOT │
|
||||||
|
│ 1 │ 0 │ Child_1 │
|
||||||
|
│ 2 │ 0 │ Child_2 │
|
||||||
|
│ 3 │ 1 │ Child_1_1 │
|
||||||
|
└────┴───────────┴───────────┘
|
||||||
|
```
|
||||||
|
|
||||||
|
### Search order
|
||||||
|
|
||||||
|
To create a depth-first order, we compute for each result row an array of rows that we have already visited:
|
||||||
|
|
||||||
|
**Example:** Tree traversal depth-first order
|
||||||
|
```sql
|
||||||
|
WITH RECURSIVE search_tree AS (
|
||||||
|
SELECT id, parent_id, data, [t.id] AS path
|
||||||
|
FROM tree t
|
||||||
|
WHERE t.id = 0
|
||||||
|
UNION ALL
|
||||||
|
SELECT t.id, t.parent_id, t.data, arrayConcat(path, [t.id])
|
||||||
|
FROM tree t, search_tree st
|
||||||
|
WHERE t.parent_id = st.id
|
||||||
|
)
|
||||||
|
SELECT * FROM search_tree ORDER BY path;
|
||||||
|
```
|
||||||
|
|
||||||
|
```text
|
||||||
|
┌─id─┬─parent_id─┬─data──────┬─path────┐
|
||||||
|
│ 0 │ ᴺᵁᴸᴸ │ ROOT │ [0] │
|
||||||
|
│ 1 │ 0 │ Child_1 │ [0,1] │
|
||||||
|
│ 3 │ 1 │ Child_1_1 │ [0,1,3] │
|
||||||
|
│ 2 │ 0 │ Child_2 │ [0,2] │
|
||||||
|
└────┴───────────┴───────────┴─────────┘
|
||||||
|
```
|
||||||
|
|
||||||
|
To create a breadth-first order, standard approach is to add column that tracks the depth of the search:
|
||||||
|
|
||||||
|
**Example:** Tree traversal breadth-first order
|
||||||
|
```sql
|
||||||
|
WITH RECURSIVE search_tree AS (
|
||||||
|
SELECT id, parent_id, data, [t.id] AS path, toUInt64(0) AS depth
|
||||||
|
FROM tree t
|
||||||
|
WHERE t.id = 0
|
||||||
|
UNION ALL
|
||||||
|
SELECT t.id, t.parent_id, t.data, arrayConcat(path, [t.id]), depth + 1
|
||||||
|
FROM tree t, search_tree st
|
||||||
|
WHERE t.parent_id = st.id
|
||||||
|
)
|
||||||
|
SELECT * FROM search_tree ORDER BY depth;
|
||||||
|
```
|
||||||
|
|
||||||
|
```text
|
||||||
|
┌─id─┬─link─┬─data──────┬─path────┬─depth─┐
|
||||||
|
│ 0 │ ᴺᵁᴸᴸ │ ROOT │ [0] │ 0 │
|
||||||
|
│ 1 │ 0 │ Child_1 │ [0,1] │ 1 │
|
||||||
|
│ 2 │ 0 │ Child_2 │ [0,2] │ 1 │
|
||||||
|
│ 3 │ 1 │ Child_1_1 │ [0,1,3] │ 2 │
|
||||||
|
└────┴──────┴───────────┴─────────┴───────┘
|
||||||
|
```
|
||||||
|
|
||||||
|
### Cycle detection
|
||||||
|
|
||||||
|
First let's create graph table:
|
||||||
|
|
||||||
|
```sql
|
||||||
|
DROP TABLE IF EXISTS graph;
|
||||||
|
CREATE TABLE graph
|
||||||
|
(
|
||||||
|
from UInt64,
|
||||||
|
to UInt64,
|
||||||
|
label String
|
||||||
|
) ENGINE = MergeTree ORDER BY (from, to);
|
||||||
|
|
||||||
|
INSERT INTO graph VALUES (1, 2, '1 -> 2'), (1, 3, '1 -> 3'), (2, 3, '2 -> 3'), (1, 4, '1 -> 4'), (4, 5, '4 -> 5');
|
||||||
|
```
|
||||||
|
|
||||||
|
We can traverse that graph with such query:
|
||||||
|
|
||||||
|
**Example:** Graph traversal without cycle detection
|
||||||
|
```sql
|
||||||
|
WITH RECURSIVE search_graph AS (
|
||||||
|
SELECT from, to, label FROM graph g
|
||||||
|
UNION ALL
|
||||||
|
SELECT g.from, g.to, g.label
|
||||||
|
FROM graph g, search_graph sg
|
||||||
|
WHERE g.from = sg.to
|
||||||
|
)
|
||||||
|
SELECT DISTINCT * FROM search_graph ORDER BY from;
|
||||||
|
```
|
||||||
|
```text
|
||||||
|
┌─from─┬─to─┬─label──┐
|
||||||
|
│ 1 │ 4 │ 1 -> 4 │
|
||||||
|
│ 1 │ 2 │ 1 -> 2 │
|
||||||
|
│ 1 │ 3 │ 1 -> 3 │
|
||||||
|
│ 2 │ 3 │ 2 -> 3 │
|
||||||
|
│ 4 │ 5 │ 4 -> 5 │
|
||||||
|
└──────┴────┴────────┘
|
||||||
|
```
|
||||||
|
|
||||||
|
But if we add cycle in that graph, previous query will fail with `Maximum recursive CTE evaluation depth` error:
|
||||||
|
|
||||||
|
```sql
|
||||||
|
INSERT INTO graph VALUES (5, 1, '5 -> 1');
|
||||||
|
|
||||||
|
WITH RECURSIVE search_graph AS (
|
||||||
|
SELECT from, to, label FROM graph g
|
||||||
|
UNION ALL
|
||||||
|
SELECT g.from, g.to, g.label
|
||||||
|
FROM graph g, search_graph sg
|
||||||
|
WHERE g.from = sg.to
|
||||||
|
)
|
||||||
|
SELECT DISTINCT * FROM search_graph ORDER BY from;
|
||||||
|
```
|
||||||
|
|
||||||
|
```text
|
||||||
|
Code: 306. DB::Exception: Received from localhost:9000. DB::Exception: Maximum recursive CTE evaluation depth (1000) exceeded, during evaluation of search_graph AS (SELECT from, to, label FROM graph AS g UNION ALL SELECT g.from, g.to, g.label FROM graph AS g, search_graph AS sg WHERE g.from = sg.to). Consider raising max_recursive_cte_evaluation_depth setting.: While executing RecursiveCTESource. (TOO_DEEP_RECURSION)
|
||||||
|
```
|
||||||
|
|
||||||
|
The standard method for handling cycles is to compute an array of the already visited nodes:
|
||||||
|
|
||||||
|
**Example:** Graph traversal with cycle detection
|
||||||
|
```sql
|
||||||
|
WITH RECURSIVE search_graph AS (
|
||||||
|
SELECT from, to, label, false AS is_cycle, [tuple(g.from, g.to)] AS path FROM graph g
|
||||||
|
UNION ALL
|
||||||
|
SELECT g.from, g.to, g.label, has(path, tuple(g.from, g.to)), arrayConcat(sg.path, [tuple(g.from, g.to)])
|
||||||
|
FROM graph g, search_graph sg
|
||||||
|
WHERE g.from = sg.to AND NOT is_cycle
|
||||||
|
)
|
||||||
|
SELECT * FROM search_graph WHERE is_cycle ORDER BY from;
|
||||||
|
```
|
||||||
|
|
||||||
|
```text
|
||||||
|
┌─from─┬─to─┬─label──┬─is_cycle─┬─path──────────────────────┐
|
||||||
|
│ 1 │ 4 │ 1 -> 4 │ true │ [(1,4),(4,5),(5,1),(1,4)] │
|
||||||
|
│ 4 │ 5 │ 4 -> 5 │ true │ [(4,5),(5,1),(1,4),(4,5)] │
|
||||||
|
│ 5 │ 1 │ 5 -> 1 │ true │ [(5,1),(1,4),(4,5),(5,1)] │
|
||||||
|
└──────┴────┴────────┴──────────┴───────────────────────────┘
|
||||||
|
```
|
||||||
|
|
||||||
|
### Infinite queries
|
||||||
|
|
||||||
|
It is also possible to use infinite recursive CTE queries if `LIMIT` is used in outer query:
|
||||||
|
|
||||||
|
**Example:** Infinite recursive CTE query
|
||||||
|
```sql
|
||||||
|
WITH RECURSIVE test_table AS (
|
||||||
|
SELECT 1 AS number
|
||||||
|
UNION ALL
|
||||||
|
SELECT number + 1 FROM test_table
|
||||||
|
)
|
||||||
|
SELECT sum(number) FROM (SELECT number FROM test_table LIMIT 100);
|
||||||
|
```
|
||||||
|
|
||||||
|
```text
|
||||||
|
┌─sum(number)─┐
|
||||||
|
│ 5050 │
|
||||||
|
└─────────────┘
|
||||||
|
```
|
||||||
|
311
docs/ru/sql-reference/functions/null-functions.md
Normal file
311
docs/ru/sql-reference/functions/null-functions.md
Normal file
@ -0,0 +1,311 @@
|
|||||||
|
---
|
||||||
|
slug: /ru/sql-reference/functions/functions-for-nulls
|
||||||
|
sidebar_position: 63
|
||||||
|
sidebar_label: "Функции для работы с Nullable-аргументами"
|
||||||
|
---
|
||||||
|
|
||||||
|
# Функции для работы с Nullable-аргументами {#funktsii-dlia-raboty-s-nullable-argumentami}
|
||||||
|
|
||||||
|
## isNull {#isnull}
|
||||||
|
|
||||||
|
Проверяет является ли аргумент [NULL](../../sql-reference/syntax.md#null-literal).
|
||||||
|
|
||||||
|
``` sql
|
||||||
|
isNull(x)
|
||||||
|
```
|
||||||
|
|
||||||
|
Синоним: `ISNULL`.
|
||||||
|
|
||||||
|
**Аргументы**
|
||||||
|
|
||||||
|
- `x` — значение с не составным типом данных.
|
||||||
|
|
||||||
|
**Возвращаемое значение**
|
||||||
|
|
||||||
|
- `1`, если `x` — `NULL`.
|
||||||
|
- `0`, если `x` — не `NULL`.
|
||||||
|
|
||||||
|
**Пример**
|
||||||
|
|
||||||
|
Входная таблица
|
||||||
|
|
||||||
|
``` text
|
||||||
|
┌─x─┬────y─┐
|
||||||
|
│ 1 │ ᴺᵁᴸᴸ │
|
||||||
|
│ 2 │ 3 │
|
||||||
|
└───┴──────┘
|
||||||
|
```
|
||||||
|
|
||||||
|
Запрос
|
||||||
|
|
||||||
|
``` sql
|
||||||
|
SELECT x FROM t_null WHERE isNull(y);
|
||||||
|
```
|
||||||
|
|
||||||
|
``` text
|
||||||
|
┌─x─┐
|
||||||
|
│ 1 │
|
||||||
|
└───┘
|
||||||
|
```
|
||||||
|
|
||||||
|
## isNotNull {#isnotnull}
|
||||||
|
|
||||||
|
Проверяет не является ли аргумент [NULL](../../sql-reference/syntax.md#null-literal).
|
||||||
|
|
||||||
|
``` sql
|
||||||
|
isNotNull(x)
|
||||||
|
```
|
||||||
|
|
||||||
|
**Аргументы**
|
||||||
|
|
||||||
|
- `x` — значение с не составным типом данных.
|
||||||
|
|
||||||
|
**Возвращаемое значение**
|
||||||
|
|
||||||
|
- `0`, если `x` — `NULL`.
|
||||||
|
- `1`, если `x` — не `NULL`.
|
||||||
|
|
||||||
|
**Пример**
|
||||||
|
|
||||||
|
Входная таблица
|
||||||
|
|
||||||
|
``` text
|
||||||
|
┌─x─┬────y─┐
|
||||||
|
│ 1 │ ᴺᵁᴸᴸ │
|
||||||
|
│ 2 │ 3 │
|
||||||
|
└───┴──────┘
|
||||||
|
```
|
||||||
|
|
||||||
|
Запрос
|
||||||
|
|
||||||
|
``` sql
|
||||||
|
SELECT x FROM t_null WHERE isNotNull(y);
|
||||||
|
```
|
||||||
|
|
||||||
|
``` text
|
||||||
|
┌─x─┐
|
||||||
|
│ 2 │
|
||||||
|
└───┘
|
||||||
|
```
|
||||||
|
|
||||||
|
## coalesce {#coalesce}
|
||||||
|
|
||||||
|
Последовательно слева-направо проверяет являются ли переданные аргументы `NULL` и возвращает первый не `NULL`.
|
||||||
|
|
||||||
|
``` sql
|
||||||
|
coalesce(x,...)
|
||||||
|
```
|
||||||
|
|
||||||
|
**Аргументы**
|
||||||
|
|
||||||
|
- Произвольное количество параметров не составного типа. Все параметры должны быть совместимы по типу данных.
|
||||||
|
|
||||||
|
**Возвращаемые значения**
|
||||||
|
|
||||||
|
- Первый не `NULL` аргумент.
|
||||||
|
- `NULL`, если все аргументы — `NULL`.
|
||||||
|
|
||||||
|
**Пример**
|
||||||
|
|
||||||
|
Рассмотрим адресную книгу, в которой может быть указано несколько способов связи с клиентом.
|
||||||
|
|
||||||
|
``` text
|
||||||
|
┌─name─────┬─mail─┬─phone─────┬──icq─┐
|
||||||
|
│ client 1 │ ᴺᵁᴸᴸ │ 123-45-67 │ 123 │
|
||||||
|
│ client 2 │ ᴺᵁᴸᴸ │ ᴺᵁᴸᴸ │ ᴺᵁᴸᴸ │
|
||||||
|
└──────────┴──────┴───────────┴──────┘
|
||||||
|
```
|
||||||
|
|
||||||
|
Поля `mail` и `phone` имеют тип String, а поле `icq` — `UInt32`, его необходимо будет преобразовать в `String`.
|
||||||
|
|
||||||
|
Получим из адресной книги первый доступный способ связаться с клиентом:
|
||||||
|
|
||||||
|
``` sql
|
||||||
|
SELECT coalesce(mail, phone, CAST(icq,'Nullable(String)')) FROM aBook;
|
||||||
|
```
|
||||||
|
|
||||||
|
``` text
|
||||||
|
┌─name─────┬─coalesce(mail, phone, CAST(icq, 'Nullable(String)'))─┐
|
||||||
|
│ client 1 │ 123-45-67 │
|
||||||
|
│ client 2 │ ᴺᵁᴸᴸ │
|
||||||
|
└──────────┴──────────────────────────────────────────────────────┘
|
||||||
|
```
|
||||||
|
|
||||||
|
## ifNull {#ifnull}
|
||||||
|
|
||||||
|
Возвращает альтернативное значение, если основной аргумент — `NULL`.
|
||||||
|
|
||||||
|
``` sql
|
||||||
|
ifNull(x,alt)
|
||||||
|
```
|
||||||
|
|
||||||
|
**Аргументы**
|
||||||
|
|
||||||
|
- `x` — значение для проверки на `NULL`,
|
||||||
|
- `alt` — значение, которое функция вернёт, если `x` — `NULL`.
|
||||||
|
|
||||||
|
**Возвращаемые значения**
|
||||||
|
|
||||||
|
- Значение `x`, если `x` — не `NULL`.
|
||||||
|
- Значение `alt`, если `x` — `NULL`.
|
||||||
|
|
||||||
|
**Пример**
|
||||||
|
|
||||||
|
``` sql
|
||||||
|
SELECT ifNull('a', 'b');
|
||||||
|
```
|
||||||
|
|
||||||
|
``` text
|
||||||
|
┌─ifNull('a', 'b')─┐
|
||||||
|
│ a │
|
||||||
|
└──────────────────┘
|
||||||
|
```
|
||||||
|
|
||||||
|
``` sql
|
||||||
|
SELECT ifNull(NULL, 'b');
|
||||||
|
```
|
||||||
|
|
||||||
|
``` text
|
||||||
|
┌─ifNull(NULL, 'b')─┐
|
||||||
|
│ b │
|
||||||
|
└───────────────────┘
|
||||||
|
```
|
||||||
|
|
||||||
|
## nullIf {#nullif}
|
||||||
|
|
||||||
|
Возвращает `NULL`, если аргументы равны.
|
||||||
|
|
||||||
|
``` sql
|
||||||
|
nullIf(x, y)
|
||||||
|
```
|
||||||
|
|
||||||
|
**Аргументы**
|
||||||
|
|
||||||
|
`x`, `y` — значения для сравнивания. Они должны быть совместимых типов, иначе ClickHouse сгенерирует исключение.
|
||||||
|
|
||||||
|
**Возвращаемые значения**
|
||||||
|
|
||||||
|
- `NULL`, если аргументы равны.
|
||||||
|
- Значение `x`, если аргументы не равны.
|
||||||
|
|
||||||
|
**Пример**
|
||||||
|
|
||||||
|
``` sql
|
||||||
|
SELECT nullIf(1, 1);
|
||||||
|
```
|
||||||
|
|
||||||
|
``` text
|
||||||
|
┌─nullIf(1, 1)─┐
|
||||||
|
│ ᴺᵁᴸᴸ │
|
||||||
|
└──────────────┘
|
||||||
|
```
|
||||||
|
|
||||||
|
``` sql
|
||||||
|
SELECT nullIf(1, 2);
|
||||||
|
```
|
||||||
|
|
||||||
|
``` text
|
||||||
|
┌─nullIf(1, 2)─┐
|
||||||
|
│ 1 │
|
||||||
|
└──────────────┘
|
||||||
|
```
|
||||||
|
|
||||||
|
## assumeNotNull {#assumenotnull}
|
||||||
|
|
||||||
|
Приводит значение типа [Nullable](../../sql-reference/functions/functions-for-nulls.md) к не `Nullable`, если значение не `NULL`.
|
||||||
|
|
||||||
|
``` sql
|
||||||
|
assumeNotNull(x)
|
||||||
|
```
|
||||||
|
|
||||||
|
**Аргументы**
|
||||||
|
|
||||||
|
- `x` — исходное значение.
|
||||||
|
|
||||||
|
**Возвращаемые значения**
|
||||||
|
|
||||||
|
- Исходное значение с не `Nullable` типом, если оно — не `NULL`.
|
||||||
|
- Неспецифицированный результат, зависящий от реализации, если исходное значение — `NULL`.
|
||||||
|
|
||||||
|
**Пример**
|
||||||
|
|
||||||
|
Рассмотрим таблицу `t_null`.
|
||||||
|
|
||||||
|
``` sql
|
||||||
|
SHOW CREATE TABLE t_null;
|
||||||
|
```
|
||||||
|
|
||||||
|
``` text
|
||||||
|
┌─statement─────────────────────────────────────────────────────────────────┐
|
||||||
|
│ CREATE TABLE default.t_null ( x Int8, y Nullable(Int8)) ENGINE = TinyLog │
|
||||||
|
└───────────────────────────────────────────────────────────────────────────┘
|
||||||
|
```
|
||||||
|
|
||||||
|
``` text
|
||||||
|
┌─x─┬────y─┐
|
||||||
|
│ 1 │ ᴺᵁᴸᴸ │
|
||||||
|
│ 2 │ 3 │
|
||||||
|
└───┴──────┘
|
||||||
|
```
|
||||||
|
|
||||||
|
Применим функцию `assumeNotNull` к столбцу `y`.
|
||||||
|
|
||||||
|
``` sql
|
||||||
|
SELECT assumeNotNull(y) FROM t_null;
|
||||||
|
```
|
||||||
|
|
||||||
|
``` text
|
||||||
|
┌─assumeNotNull(y)─┐
|
||||||
|
│ 0 │
|
||||||
|
│ 3 │
|
||||||
|
└──────────────────┘
|
||||||
|
```
|
||||||
|
|
||||||
|
``` sql
|
||||||
|
SELECT toTypeName(assumeNotNull(y)) FROM t_null;
|
||||||
|
```
|
||||||
|
|
||||||
|
``` text
|
||||||
|
┌─toTypeName(assumeNotNull(y))─┐
|
||||||
|
│ Int8 │
|
||||||
|
│ Int8 │
|
||||||
|
└──────────────────────────────┘
|
||||||
|
```
|
||||||
|
|
||||||
|
## toNullable {#tonullable}
|
||||||
|
|
||||||
|
Преобразует тип аргумента к `Nullable`.
|
||||||
|
|
||||||
|
``` sql
|
||||||
|
toNullable(x)
|
||||||
|
```
|
||||||
|
|
||||||
|
**Аргументы**
|
||||||
|
|
||||||
|
- `x` — значение произвольного не составного типа.
|
||||||
|
|
||||||
|
**Возвращаемое значение**
|
||||||
|
|
||||||
|
- Входное значение с типом не `Nullable`.
|
||||||
|
|
||||||
|
**Пример**
|
||||||
|
|
||||||
|
``` sql
|
||||||
|
SELECT toTypeName(10);
|
||||||
|
```
|
||||||
|
|
||||||
|
``` text
|
||||||
|
┌─toTypeName(10)─┐
|
||||||
|
│ UInt8 │
|
||||||
|
└────────────────┘
|
||||||
|
```
|
||||||
|
|
||||||
|
``` sql
|
||||||
|
SELECT toTypeName(toNullable(10));
|
||||||
|
```
|
||||||
|
|
||||||
|
``` text
|
||||||
|
┌─toTypeName(toNullable(10))─┐
|
||||||
|
│ Nullable(UInt8) │
|
||||||
|
└────────────────────────────┘
|
||||||
|
```
|
@ -493,7 +493,7 @@ SELECT concat(key1, key2), sum(value) FROM key_val GROUP BY (key1, key2);
|
|||||||
|
|
||||||
## base58Encode(plaintext), base58Decode(encoded_text) {#base58}
|
## base58Encode(plaintext), base58Decode(encoded_text) {#base58}
|
||||||
|
|
||||||
Принимает на вход строку или колонку строк и кодирует/раскодирует их с помощью схемы кодирования [Base58](https://tools.ietf.org/id/draft-msporny-base58-01.html) с использованием стандартного алфавита Bitcoin.
|
Принимает на вход строку или колонку строк и кодирует/раскодирует их с помощью схемы кодирования [Base58](https://datatracker.ietf.org/doc/html/draft-msporny-base58) с использованием стандартного алфавита Bitcoin.
|
||||||
|
|
||||||
**Синтаксис**
|
**Синтаксис**
|
||||||
|
|
||||||
|
254
docs/zh/sql-reference/functions/null-functions.md
Normal file
254
docs/zh/sql-reference/functions/null-functions.md
Normal file
@ -0,0 +1,254 @@
|
|||||||
|
---
|
||||||
|
slug: /zh/sql-reference/functions/functions-for-nulls
|
||||||
|
---
|
||||||
|
# Nullable处理函数 {#nullablechu-li-han-shu}
|
||||||
|
|
||||||
|
## isNull {#isnull}
|
||||||
|
|
||||||
|
检查参数是否为[NULL](../../sql-reference/syntax.md#null-literal)。
|
||||||
|
|
||||||
|
isNull(x)
|
||||||
|
|
||||||
|
**参数**
|
||||||
|
|
||||||
|
- `x` — 一个非复合数据类型的值。
|
||||||
|
|
||||||
|
**返回值**
|
||||||
|
|
||||||
|
- `1` 如果`x`为`NULL`。
|
||||||
|
- `0` 如果`x`不为`NULL`。
|
||||||
|
|
||||||
|
**示例**
|
||||||
|
|
||||||
|
存在以下内容的表
|
||||||
|
|
||||||
|
```response
|
||||||
|
┌─x─┬────y─┐
|
||||||
|
│ 1 │ ᴺᵁᴸᴸ │
|
||||||
|
│ 2 │ 3 │
|
||||||
|
└───┴──────┘
|
||||||
|
```
|
||||||
|
|
||||||
|
对其进行查询
|
||||||
|
|
||||||
|
```sql
|
||||||
|
SELECT x FROM t_null WHERE isNull(y)
|
||||||
|
```
|
||||||
|
|
||||||
|
```response
|
||||||
|
┌─x─┐
|
||||||
|
│ 1 │
|
||||||
|
└───┘
|
||||||
|
```
|
||||||
|
|
||||||
|
## isNotNull {#isnotnull}
|
||||||
|
|
||||||
|
检查参数是否不为 [NULL](../../sql-reference/syntax.md#null-literal).
|
||||||
|
|
||||||
|
isNotNull(x)
|
||||||
|
|
||||||
|
**参数:**
|
||||||
|
|
||||||
|
- `x` — 一个非复合数据类型的值。
|
||||||
|
|
||||||
|
**返回值**
|
||||||
|
|
||||||
|
- `0` 如果`x`为`NULL`。
|
||||||
|
- `1` 如果`x`不为`NULL`。
|
||||||
|
|
||||||
|
**示例**
|
||||||
|
|
||||||
|
存在以下内容的表
|
||||||
|
|
||||||
|
```response
|
||||||
|
┌─x─┬────y─┐
|
||||||
|
│ 1 │ ᴺᵁᴸᴸ │
|
||||||
|
│ 2 │ 3 │
|
||||||
|
└───┴──────┘
|
||||||
|
```
|
||||||
|
|
||||||
|
对其进行查询
|
||||||
|
|
||||||
|
```sql
|
||||||
|
SELECT x FROM t_null WHERE isNotNull(y)
|
||||||
|
```
|
||||||
|
|
||||||
|
```response
|
||||||
|
┌─x─┐
|
||||||
|
│ 2 │
|
||||||
|
└───┘
|
||||||
|
```
|
||||||
|
|
||||||
|
## 合并 {#coalesce}
|
||||||
|
|
||||||
|
检查从左到右是否传递了«NULL»参数并返回第一个非`'NULL`参数。
|
||||||
|
|
||||||
|
coalesce(x,...)
|
||||||
|
|
||||||
|
**参数:**
|
||||||
|
|
||||||
|
- 任何数量的非复合类型的参数。所有参数必须与数据类型兼容。
|
||||||
|
|
||||||
|
**返回值**
|
||||||
|
|
||||||
|
- 第一个非’NULL\`参数。
|
||||||
|
- `NULL`,如果所有参数都是’NULL\`。
|
||||||
|
|
||||||
|
**示例**
|
||||||
|
|
||||||
|
考虑可以指定多种联系客户的方式的联系人列表。
|
||||||
|
|
||||||
|
```response
|
||||||
|
┌─name─────┬─mail─┬─phone─────┬──icq─┐
|
||||||
|
│ client 1 │ ᴺᵁᴸᴸ │ 123-45-67 │ 123 │
|
||||||
|
│ client 2 │ ᴺᵁᴸᴸ │ ᴺᵁᴸᴸ │ ᴺᵁᴸᴸ │
|
||||||
|
└──────────┴──────┴───────────┴──────┘
|
||||||
|
```
|
||||||
|
|
||||||
|
`mail`和`phone`字段是String类型,但`icq`字段是`UInt32`,所以它需要转换为`String`。
|
||||||
|
|
||||||
|
从联系人列表中获取客户的第一个可用联系方式:
|
||||||
|
|
||||||
|
```sql
|
||||||
|
SELECT coalesce(mail, phone, CAST(icq,'Nullable(String)')) FROM aBook
|
||||||
|
```
|
||||||
|
|
||||||
|
```response
|
||||||
|
┌─name─────┬─coalesce(mail, phone, CAST(icq, 'Nullable(String)'))─┐
|
||||||
|
│ client 1 │ 123-45-67 │
|
||||||
|
│ client 2 │ ᴺᵁᴸᴸ │
|
||||||
|
└──────────┴──────────────────────────────────────────────────────┘
|
||||||
|
```
|
||||||
|
|
||||||
|
## ifNull {#ifnull}
|
||||||
|
|
||||||
|
如果第一个参数为«NULL»,则返回第二个参数的值。
|
||||||
|
|
||||||
|
ifNull(x,alt)
|
||||||
|
|
||||||
|
**参数:**
|
||||||
|
|
||||||
|
- `x` — 要检查«NULL»的值。
|
||||||
|
- `alt` — 如果`x`为’NULL\`,函数返回的值。
|
||||||
|
|
||||||
|
**返回值**
|
||||||
|
|
||||||
|
- 价值 `x`,如果 `x` 不是 `NULL`.
|
||||||
|
- 价值 `alt`,如果 `x` 是 `NULL`.
|
||||||
|
|
||||||
|
**示例**
|
||||||
|
|
||||||
|
SELECT ifNull('a', 'b')
|
||||||
|
|
||||||
|
┌─ifNull('a', 'b')─┐
|
||||||
|
│ a │
|
||||||
|
└──────────────────┘
|
||||||
|
|
||||||
|
SELECT ifNull(NULL, 'b')
|
||||||
|
|
||||||
|
┌─ifNull(NULL, 'b')─┐
|
||||||
|
│ b │
|
||||||
|
└───────────────────┘
|
||||||
|
|
||||||
|
## nullIf {#nullif}
|
||||||
|
|
||||||
|
如果参数相等,则返回`NULL`。
|
||||||
|
|
||||||
|
nullIf(x, y)
|
||||||
|
|
||||||
|
**参数:**
|
||||||
|
|
||||||
|
`x`, `y` — 用于比较的值。 它们必须是类型兼容的,否则将抛出异常。
|
||||||
|
|
||||||
|
**返回值**
|
||||||
|
|
||||||
|
- 如果参数相等,则为`NULL`。
|
||||||
|
- 如果参数不相等,则为`x`值。
|
||||||
|
|
||||||
|
**示例**
|
||||||
|
|
||||||
|
SELECT nullIf(1, 1)
|
||||||
|
|
||||||
|
┌─nullIf(1, 1)─┐
|
||||||
|
│ ᴺᵁᴸᴸ │
|
||||||
|
└──────────────┘
|
||||||
|
|
||||||
|
SELECT nullIf(1, 2)
|
||||||
|
|
||||||
|
┌─nullIf(1, 2)─┐
|
||||||
|
│ 1 │
|
||||||
|
└──────────────┘
|
||||||
|
|
||||||
|
## assumeNotNull {#assumenotnull}
|
||||||
|
|
||||||
|
将[可为空](../../sql-reference/functions/functions-for-nulls.md)类型的值转换为非`Nullable`类型的值。
|
||||||
|
|
||||||
|
assumeNotNull(x)
|
||||||
|
|
||||||
|
**参数:**
|
||||||
|
|
||||||
|
- `x` — 原始值。
|
||||||
|
|
||||||
|
**返回值**
|
||||||
|
|
||||||
|
- 如果`x`不为`NULL`,返回非`Nullable`类型的原始值。
|
||||||
|
- 如果`x`为`NULL`,则返回任意值。
|
||||||
|
|
||||||
|
**示例**
|
||||||
|
|
||||||
|
存在如下`t_null`表。
|
||||||
|
|
||||||
|
SHOW CREATE TABLE t_null
|
||||||
|
|
||||||
|
┌─statement─────────────────────────────────────────────────────────────────┐
|
||||||
|
│ CREATE TABLE default.t_null ( x Int8, y Nullable(Int8)) ENGINE = TinyLog │
|
||||||
|
└───────────────────────────────────────────────────────────────────────────┘
|
||||||
|
|
||||||
|
┌─x─┬────y─┐
|
||||||
|
│ 1 │ ᴺᵁᴸᴸ │
|
||||||
|
│ 2 │ 3 │
|
||||||
|
└───┴──────┘
|
||||||
|
|
||||||
|
将列`y`作为`assumeNotNull`函数的参数。
|
||||||
|
|
||||||
|
SELECT assumeNotNull(y) FROM t_null
|
||||||
|
|
||||||
|
┌─assumeNotNull(y)─┐
|
||||||
|
│ 0 │
|
||||||
|
│ 3 │
|
||||||
|
└──────────────────┘
|
||||||
|
|
||||||
|
SELECT toTypeName(assumeNotNull(y)) FROM t_null
|
||||||
|
|
||||||
|
┌─toTypeName(assumeNotNull(y))─┐
|
||||||
|
│ Int8 │
|
||||||
|
│ Int8 │
|
||||||
|
└──────────────────────────────┘
|
||||||
|
|
||||||
|
## 可调整 {#tonullable}
|
||||||
|
|
||||||
|
将参数的类型转换为`Nullable`。
|
||||||
|
|
||||||
|
toNullable(x)
|
||||||
|
|
||||||
|
**参数:**
|
||||||
|
|
||||||
|
- `x` — 任何非复合类型的值。
|
||||||
|
|
||||||
|
**返回值**
|
||||||
|
|
||||||
|
- 输入的值,但其类型为`Nullable`。
|
||||||
|
|
||||||
|
**示例**
|
||||||
|
|
||||||
|
SELECT toTypeName(10)
|
||||||
|
|
||||||
|
┌─toTypeName(10)─┐
|
||||||
|
│ UInt8 │
|
||||||
|
└────────────────┘
|
||||||
|
|
||||||
|
SELECT toTypeName(toNullable(10))
|
||||||
|
|
||||||
|
┌─toTypeName(toNullable(10))─┐
|
||||||
|
│ Nullable(UInt8) │
|
||||||
|
└────────────────────────────┘
|
@ -233,7 +233,7 @@ struct Commit
|
|||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
enum class FileChangeType
|
enum class FileChangeType : uint8_t
|
||||||
{
|
{
|
||||||
Add,
|
Add,
|
||||||
Delete,
|
Delete,
|
||||||
@ -291,7 +291,7 @@ struct FileChange
|
|||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
enum class LineType
|
enum class LineType : uint8_t
|
||||||
{
|
{
|
||||||
Empty,
|
Empty,
|
||||||
Comment,
|
Comment,
|
||||||
|
@ -323,7 +323,7 @@ int mainEntryClickHouseInstall(int argc, char ** argv)
|
|||||||
{
|
{
|
||||||
fmt::print("Symlink {} already exists but it points to {}. Will replace the old symlink to {}.\n",
|
fmt::print("Symlink {} already exists but it points to {}. Will replace the old symlink to {}.\n",
|
||||||
main_bin_path.string(), points_to.string(), binary_self_canonical_path.string());
|
main_bin_path.string(), points_to.string(), binary_self_canonical_path.string());
|
||||||
fs::remove(main_bin_path);
|
(void)fs::remove(main_bin_path);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -489,7 +489,7 @@ int mainEntryClickHouseInstall(int argc, char ** argv)
|
|||||||
{
|
{
|
||||||
fmt::print("Symlink {} already exists but it points to {}. Will replace the old symlink to {}.\n",
|
fmt::print("Symlink {} already exists but it points to {}. Will replace the old symlink to {}.\n",
|
||||||
symlink_path.string(), points_to.string(), main_bin_path.string());
|
symlink_path.string(), points_to.string(), main_bin_path.string());
|
||||||
fs::remove(symlink_path);
|
(void)fs::remove(symlink_path);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -1006,7 +1006,7 @@ namespace
|
|||||||
else
|
else
|
||||||
{
|
{
|
||||||
fmt::print("{} file exists but damaged, ignoring.\n", pid_file.string());
|
fmt::print("{} file exists but damaged, ignoring.\n", pid_file.string());
|
||||||
fs::remove(pid_file);
|
(void)fs::remove(pid_file);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
@ -1014,7 +1014,7 @@ namespace
|
|||||||
/// Create a directory for pid file.
|
/// Create a directory for pid file.
|
||||||
/// It's created by "install" but we also support cases when ClickHouse is already installed different way.
|
/// It's created by "install" but we also support cases when ClickHouse is already installed different way.
|
||||||
fs::path pid_path = pid_file;
|
fs::path pid_path = pid_file;
|
||||||
pid_path.remove_filename();
|
pid_path = pid_path.remove_filename();
|
||||||
fs::create_directories(pid_path);
|
fs::create_directories(pid_path);
|
||||||
/// All users are allowed to read pid file (for clickhouse status command).
|
/// All users are allowed to read pid file (for clickhouse status command).
|
||||||
fs::permissions(pid_path, fs::perms::owner_all | fs::perms::group_read | fs::perms::others_read, fs::perm_options::replace);
|
fs::permissions(pid_path, fs::perms::owner_all | fs::perms::group_read | fs::perms::others_read, fs::perm_options::replace);
|
||||||
@ -1098,7 +1098,7 @@ namespace
|
|||||||
else
|
else
|
||||||
{
|
{
|
||||||
fmt::print("{} file exists but damaged, ignoring.\n", pid_file.string());
|
fmt::print("{} file exists but damaged, ignoring.\n", pid_file.string());
|
||||||
fs::remove(pid_file);
|
(void)fs::remove(pid_file);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
catch (const Exception & e)
|
catch (const Exception & e)
|
||||||
|
@ -86,7 +86,10 @@ std::vector<String> KeeperClient::getCompletions(const String & prefix) const
|
|||||||
void KeeperClient::askConfirmation(const String & prompt, std::function<void()> && callback)
|
void KeeperClient::askConfirmation(const String & prompt, std::function<void()> && callback)
|
||||||
{
|
{
|
||||||
if (!ask_confirmation)
|
if (!ask_confirmation)
|
||||||
return callback();
|
{
|
||||||
|
callback();
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
std::cout << prompt << " Continue?\n";
|
std::cout << prompt << " Continue?\n";
|
||||||
waiting_confirmation = true;
|
waiting_confirmation = true;
|
||||||
|
@ -284,7 +284,6 @@ void ExternalDictionaryLibraryBridgeRequestHandler::handleRequest(HTTPServerRequ
|
|||||||
else if (method == "extDict_loadIds")
|
else if (method == "extDict_loadIds")
|
||||||
{
|
{
|
||||||
LOG_DEBUG(log, "Getting diciontary ids for dictionary with id: {}", dictionary_id);
|
LOG_DEBUG(log, "Getting diciontary ids for dictionary with id: {}", dictionary_id);
|
||||||
String ids_string;
|
|
||||||
std::vector<uint64_t> ids = parseIdsFromBinary(request.getStream());
|
std::vector<uint64_t> ids = parseIdsFromBinary(request.getStream());
|
||||||
|
|
||||||
auto library_handler = ExternalDictionaryLibraryHandlerFactory::instance().get(dictionary_id);
|
auto library_handler = ExternalDictionaryLibraryHandlerFactory::instance().get(dictionary_id);
|
||||||
|
@ -14,7 +14,7 @@ namespace ErrorCodes
|
|||||||
|
|
||||||
SharedLibrary::SharedLibrary(std::string_view path, int flags)
|
SharedLibrary::SharedLibrary(std::string_view path, int flags)
|
||||||
{
|
{
|
||||||
handle = dlopen(path.data(), flags);
|
handle = dlopen(path.data(), flags); // NOLINT
|
||||||
if (!handle)
|
if (!handle)
|
||||||
throw Exception(ErrorCodes::CANNOT_DLOPEN, "Cannot dlopen: ({})", dlerror()); // NOLINT(concurrency-mt-unsafe) // MT-Safe on Linux, see man dlerror
|
throw Exception(ErrorCodes::CANNOT_DLOPEN, "Cannot dlopen: ({})", dlerror()); // NOLINT(concurrency-mt-unsafe) // MT-Safe on Linux, see man dlerror
|
||||||
|
|
||||||
@ -34,7 +34,7 @@ void * SharedLibrary::getImpl(std::string_view name, bool no_throw)
|
|||||||
{
|
{
|
||||||
dlerror(); // NOLINT(concurrency-mt-unsafe) // MT-Safe on Linux, see man dlerror
|
dlerror(); // NOLINT(concurrency-mt-unsafe) // MT-Safe on Linux, see man dlerror
|
||||||
|
|
||||||
auto * res = dlsym(handle, name.data());
|
auto * res = dlsym(handle, name.data()); // NOLINT
|
||||||
|
|
||||||
if (char * error = dlerror()) // NOLINT(concurrency-mt-unsafe) // MT-Safe on Linux, see man dlerror
|
if (char * error = dlerror()) // NOLINT(concurrency-mt-unsafe) // MT-Safe on Linux, see man dlerror
|
||||||
{
|
{
|
||||||
|
@ -119,7 +119,7 @@ std::pair<std::string_view, std::string_view> clickhouse_short_names[] =
|
|||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
enum class InstructionFail
|
enum class InstructionFail : uint8_t
|
||||||
{
|
{
|
||||||
NONE = 0,
|
NONE = 0,
|
||||||
SSE3 = 1,
|
SSE3 = 1,
|
||||||
|
@ -674,8 +674,7 @@ private:
|
|||||||
|
|
||||||
if (pos + length > end)
|
if (pos + length > end)
|
||||||
length = end - pos;
|
length = end - pos;
|
||||||
if (length > sizeof(CodePoint))
|
length = std::min(length, sizeof(CodePoint));
|
||||||
length = sizeof(CodePoint);
|
|
||||||
|
|
||||||
CodePoint res = 0;
|
CodePoint res = 0;
|
||||||
memcpy(&res, pos, length);
|
memcpy(&res, pos, length);
|
||||||
@ -883,9 +882,7 @@ public:
|
|||||||
throw Exception(ErrorCodes::LOGICAL_ERROR, "Logical error in markov model");
|
throw Exception(ErrorCodes::LOGICAL_ERROR, "Logical error in markov model");
|
||||||
|
|
||||||
size_t offset_from_begin_of_string = pos - data;
|
size_t offset_from_begin_of_string = pos - data;
|
||||||
size_t determinator_sliding_window_size = params.determinator_sliding_window_size;
|
size_t determinator_sliding_window_size = std::min(params.determinator_sliding_window_size, determinator_size);
|
||||||
if (determinator_sliding_window_size > determinator_size)
|
|
||||||
determinator_sliding_window_size = determinator_size;
|
|
||||||
|
|
||||||
size_t determinator_sliding_window_overflow = offset_from_begin_of_string + determinator_sliding_window_size > determinator_size
|
size_t determinator_sliding_window_overflow = offset_from_begin_of_string + determinator_sliding_window_size > determinator_size
|
||||||
? offset_from_begin_of_string + determinator_sliding_window_size - determinator_size : 0;
|
? offset_from_begin_of_string + determinator_sliding_window_size - determinator_size : 0;
|
||||||
|
@ -119,8 +119,7 @@ void ODBCSource::insertValue(
|
|||||||
time_t time = 0;
|
time_t time = 0;
|
||||||
const DataTypeDateTime & datetime_type = assert_cast<const DataTypeDateTime &>(*data_type);
|
const DataTypeDateTime & datetime_type = assert_cast<const DataTypeDateTime &>(*data_type);
|
||||||
readDateTimeText(time, in, datetime_type.getTimeZone());
|
readDateTimeText(time, in, datetime_type.getTimeZone());
|
||||||
if (time < 0)
|
time = std::max<time_t>(time, 0);
|
||||||
time = 0;
|
|
||||||
column.insert(static_cast<UInt32>(time));
|
column.insert(static_cast<UInt32>(time));
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
@ -37,7 +37,7 @@ std::string getIdentifierQuote(nanodbc::ConnectionHolderPtr connection_holder)
|
|||||||
IdentifierQuotingStyle getQuotingStyle(nanodbc::ConnectionHolderPtr connection)
|
IdentifierQuotingStyle getQuotingStyle(nanodbc::ConnectionHolderPtr connection)
|
||||||
{
|
{
|
||||||
auto identifier_quote = getIdentifierQuote(connection);
|
auto identifier_quote = getIdentifierQuote(connection);
|
||||||
if (identifier_quote.length() == 0)
|
if (identifier_quote.empty())
|
||||||
return IdentifierQuotingStyle::None;
|
return IdentifierQuotingStyle::None;
|
||||||
else if (identifier_quote[0] == '`')
|
else if (identifier_quote[0] == '`')
|
||||||
return IdentifierQuotingStyle::Backticks;
|
return IdentifierQuotingStyle::Backticks;
|
||||||
|
@ -538,9 +538,57 @@ let params = default_params;
|
|||||||
|
|
||||||
/// Palette generation for charts
|
/// Palette generation for charts
|
||||||
function generatePalette(numColors) {
|
function generatePalette(numColors) {
|
||||||
|
// oklch() does not work in firefox<=125 inside <canvas> element so we convert it back to rgb for now.
|
||||||
|
// Based on https://github.com/color-js/color.js/blob/main/src/spaces/oklch.js
|
||||||
|
const multiplyMatrices = (A, B) => {
|
||||||
|
return [
|
||||||
|
A[0]*B[0] + A[1]*B[1] + A[2]*B[2],
|
||||||
|
A[3]*B[0] + A[4]*B[1] + A[5]*B[2],
|
||||||
|
A[6]*B[0] + A[7]*B[1] + A[8]*B[2]
|
||||||
|
];
|
||||||
|
}
|
||||||
|
|
||||||
|
const oklch2oklab = ([l, c, h]) => [
|
||||||
|
l,
|
||||||
|
isNaN(h) ? 0 : c * Math.cos(h * Math.PI / 180),
|
||||||
|
isNaN(h) ? 0 : c * Math.sin(h * Math.PI / 180)
|
||||||
|
]
|
||||||
|
|
||||||
|
const srgbLinear2rgb = rgb => rgb.map(c =>
|
||||||
|
Math.abs(c) > 0.0031308 ?
|
||||||
|
(c < 0 ? -1 : 1) * (1.055 * (Math.abs(c) ** (1 / 2.4)) - 0.055) :
|
||||||
|
12.92 * c
|
||||||
|
)
|
||||||
|
|
||||||
|
const oklab2xyz = lab => {
|
||||||
|
const LMSg = multiplyMatrices([
|
||||||
|
1, 0.3963377773761749, 0.2158037573099136,
|
||||||
|
1, -0.1055613458156586, -0.0638541728258133,
|
||||||
|
1, -0.0894841775298119, -1.2914855480194092,
|
||||||
|
], lab)
|
||||||
|
const LMS = LMSg.map(val => val ** 3)
|
||||||
|
return multiplyMatrices([
|
||||||
|
1.2268798758459243, -0.5578149944602171, 0.2813910456659647,
|
||||||
|
-0.0405757452148008, 1.1122868032803170, -0.0717110580655164,
|
||||||
|
-0.0763729366746601, -0.4214933324022432, 1.5869240198367816
|
||||||
|
], LMS)
|
||||||
|
}
|
||||||
|
|
||||||
|
const xyz2rgbLinear = xyz => {
|
||||||
|
return multiplyMatrices([
|
||||||
|
3.2409699419045226, -1.537383177570094, -0.4986107602930034,
|
||||||
|
-0.9692436362808796, 1.8759675015077202, 0.04155505740717559,
|
||||||
|
0.05563007969699366, -0.20397695888897652, 1.0569715142428786
|
||||||
|
], xyz)
|
||||||
|
}
|
||||||
|
|
||||||
|
const oklch2rgb = lch => srgbLinear2rgb(xyz2rgbLinear(oklab2xyz(oklch2oklab(lch))))
|
||||||
|
|
||||||
palette = [];
|
palette = [];
|
||||||
for (let i = 0; i < numColors; i++) {
|
for (let i = 0; i < numColors; i++) {
|
||||||
palette.push(`oklch(${theme != 'dark' ? 0.75 : 0.5}, 0.15, ${360 * i / numColors})`);
|
//palette.push(`oklch(${theme != 'dark' ? 0.75 : 0.5}, 0.15, ${360 * i / numColors})`);
|
||||||
|
let rgb = oklch2rgb([theme != 'dark' ? 0.75 : 0.5, 0.15, 360 * i / numColors]);
|
||||||
|
palette.push(`rgb(${rgb[0] * 255}, ${rgb[1] * 255}, ${rgb[2] * 255})`);
|
||||||
}
|
}
|
||||||
return palette;
|
return palette;
|
||||||
}
|
}
|
||||||
|
@ -111,13 +111,11 @@ void processTableFiles(const fs::path & data_path, fs::path dst_path, bool test_
|
|||||||
std::shared_ptr<WriteBuffer> directory_meta;
|
std::shared_ptr<WriteBuffer> directory_meta;
|
||||||
if (test_mode)
|
if (test_mode)
|
||||||
{
|
{
|
||||||
auto files_root = dst_path / prefix;
|
|
||||||
directory_meta = std::make_shared<WriteBufferFromHTTP>(HTTPConnectionGroupType::HTTP, Poco::URI(dst_path / directory_prefix / ".index"), Poco::Net::HTTPRequest::HTTP_PUT);
|
directory_meta = std::make_shared<WriteBufferFromHTTP>(HTTPConnectionGroupType::HTTP, Poco::URI(dst_path / directory_prefix / ".index"), Poco::Net::HTTPRequest::HTTP_PUT);
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
dst_path = fs::canonical(dst_path);
|
dst_path = fs::canonical(dst_path);
|
||||||
auto files_root = dst_path / prefix;
|
|
||||||
fs::create_directories(dst_path / directory_prefix);
|
fs::create_directories(dst_path / directory_prefix);
|
||||||
directory_meta = std::make_shared<WriteBufferFromFile>(dst_path / directory_prefix / ".index");
|
directory_meta = std::make_shared<WriteBufferFromFile>(dst_path / directory_prefix / ".index");
|
||||||
}
|
}
|
||||||
|
@ -93,8 +93,6 @@ namespace
|
|||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
size_t id_endpos = line.find('\t');
|
|
||||||
String id_as_string = line.substr(0, id_endpos);
|
|
||||||
UUID id = parse<UUID>(line);
|
UUID id = parse<UUID>(line);
|
||||||
line.clear();
|
line.clear();
|
||||||
|
|
||||||
|
@ -8,7 +8,7 @@
|
|||||||
namespace DB
|
namespace DB
|
||||||
{
|
{
|
||||||
class AccessControl;
|
class AccessControl;
|
||||||
enum class AccessEntityType;
|
enum class AccessEntityType : uint8_t;
|
||||||
struct IAccessEntity;
|
struct IAccessEntity;
|
||||||
using AccessEntityPtr = std::shared_ptr<const IAccessEntity>;
|
using AccessEntityPtr = std::shared_ptr<const IAccessEntity>;
|
||||||
class AccessRightsElements;
|
class AccessRightsElements;
|
||||||
|
@ -233,7 +233,7 @@ namespace
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Levels:
|
* Levels:
|
||||||
* 1. GLOBAL
|
* 1. GLOBAL
|
||||||
* 2. DATABASE_LEVEL 2. GLOBAL_WITH_PARAMETER (parameter example: named collection)
|
* 2. DATABASE_LEVEL 2. GLOBAL_WITH_PARAMETER (parameter example: named collection)
|
||||||
* 3. TABLE_LEVEL
|
* 3. TABLE_LEVEL
|
||||||
* 4. COLUMN_LEVEL
|
* 4. COLUMN_LEVEL
|
||||||
@ -241,11 +241,12 @@ namespace
|
|||||||
|
|
||||||
enum Level
|
enum Level
|
||||||
{
|
{
|
||||||
GLOBAL_LEVEL,
|
GLOBAL_LEVEL = 0,
|
||||||
DATABASE_LEVEL,
|
DATABASE_LEVEL = 1,
|
||||||
GLOBAL_WITH_PARAMETER = DATABASE_LEVEL,
|
GLOBAL_WITH_PARAMETER = DATABASE_LEVEL,
|
||||||
TABLE_LEVEL,
|
TABLE_LEVEL = 2,
|
||||||
COLUMN_LEVEL,
|
COLUMN_LEVEL = 3,
|
||||||
|
MAX = COLUMN_LEVEL,
|
||||||
};
|
};
|
||||||
|
|
||||||
AccessFlags getAllGrantableFlags(Level level)
|
AccessFlags getAllGrantableFlags(Level level)
|
||||||
@ -520,7 +521,7 @@ public:
|
|||||||
|
|
||||||
private:
|
private:
|
||||||
AccessFlags getAllGrantableFlags() const { return ::DB::getAllGrantableFlags(level); }
|
AccessFlags getAllGrantableFlags() const { return ::DB::getAllGrantableFlags(level); }
|
||||||
AccessFlags getChildAllGrantableFlags() const { return ::DB::getAllGrantableFlags(static_cast<Level>(level + 1)); }
|
AccessFlags getChildAllGrantableFlags() const { return ::DB::getAllGrantableFlags(static_cast<Level>(level == Level::MAX ? level : (level + 1))); }
|
||||||
|
|
||||||
Node * tryGetChild(std::string_view name) const
|
Node * tryGetChild(std::string_view name) const
|
||||||
{
|
{
|
||||||
|
@ -118,13 +118,16 @@ void AuthenticationData::setPassword(const String & password_)
|
|||||||
switch (type)
|
switch (type)
|
||||||
{
|
{
|
||||||
case AuthenticationType::PLAINTEXT_PASSWORD:
|
case AuthenticationType::PLAINTEXT_PASSWORD:
|
||||||
return setPasswordHashBinary(Util::stringToDigest(password_));
|
setPasswordHashBinary(Util::stringToDigest(password_));
|
||||||
|
return;
|
||||||
|
|
||||||
case AuthenticationType::SHA256_PASSWORD:
|
case AuthenticationType::SHA256_PASSWORD:
|
||||||
return setPasswordHashBinary(Util::encodeSHA256(password_));
|
setPasswordHashBinary(Util::encodeSHA256(password_));
|
||||||
|
return;
|
||||||
|
|
||||||
case AuthenticationType::DOUBLE_SHA1_PASSWORD:
|
case AuthenticationType::DOUBLE_SHA1_PASSWORD:
|
||||||
return setPasswordHashBinary(Util::encodeDoubleSHA1(password_));
|
setPasswordHashBinary(Util::encodeDoubleSHA1(password_));
|
||||||
|
return;
|
||||||
|
|
||||||
case AuthenticationType::BCRYPT_PASSWORD:
|
case AuthenticationType::BCRYPT_PASSWORD:
|
||||||
case AuthenticationType::NO_PASSWORD:
|
case AuthenticationType::NO_PASSWORD:
|
||||||
@ -146,7 +149,7 @@ void AuthenticationData::setPasswordBcrypt(const String & password_, int workfac
|
|||||||
if (type != AuthenticationType::BCRYPT_PASSWORD)
|
if (type != AuthenticationType::BCRYPT_PASSWORD)
|
||||||
throw Exception(ErrorCodes::LOGICAL_ERROR, "Cannot specify bcrypt password for authentication type {}", toString(type));
|
throw Exception(ErrorCodes::LOGICAL_ERROR, "Cannot specify bcrypt password for authentication type {}", toString(type));
|
||||||
|
|
||||||
return setPasswordHashBinary(Util::encodeBcrypt(password_, workfactor_));
|
setPasswordHashBinary(Util::encodeBcrypt(password_, workfactor_));
|
||||||
}
|
}
|
||||||
|
|
||||||
String AuthenticationData::getPassword() const
|
String AuthenticationData::getPassword() const
|
||||||
|
@ -7,7 +7,7 @@ namespace DB
|
|||||||
{
|
{
|
||||||
|
|
||||||
/// Represents the type of an access entity (see the IAccessEntity class).
|
/// Represents the type of an access entity (see the IAccessEntity class).
|
||||||
enum class AccessEntityType
|
enum class AccessEntityType : uint8_t
|
||||||
{
|
{
|
||||||
USER,
|
USER,
|
||||||
ROLE,
|
ROLE,
|
||||||
|
@ -115,15 +115,15 @@ namespace
|
|||||||
{
|
{
|
||||||
UNKNOWN = -2,
|
UNKNOWN = -2,
|
||||||
GROUP = -1,
|
GROUP = -1,
|
||||||
GLOBAL,
|
GLOBAL = 0,
|
||||||
DATABASE,
|
DATABASE = 1,
|
||||||
TABLE,
|
TABLE = 2,
|
||||||
VIEW = TABLE,
|
VIEW = TABLE,
|
||||||
COLUMN,
|
COLUMN = 3,
|
||||||
DICTIONARY,
|
DICTIONARY = 4,
|
||||||
NAMED_COLLECTION,
|
NAMED_COLLECTION = 5,
|
||||||
USER_NAME,
|
USER_NAME = 6,
|
||||||
TABLE_ENGINE,
|
TABLE_ENGINE = 7,
|
||||||
};
|
};
|
||||||
|
|
||||||
struct Node;
|
struct Node;
|
||||||
|
@ -245,7 +245,7 @@ bool AccessRightsElements::sameOptions() const
|
|||||||
|
|
||||||
void AccessRightsElements::eraseNonGrantable()
|
void AccessRightsElements::eraseNonGrantable()
|
||||||
{
|
{
|
||||||
boost::range::remove_erase_if(*this, [](AccessRightsElement & element)
|
std::erase_if(*this, [](AccessRightsElement & element)
|
||||||
{
|
{
|
||||||
element.eraseNonGrantable();
|
element.eraseNonGrantable();
|
||||||
return element.empty();
|
return element.empty();
|
||||||
|
@ -7,7 +7,7 @@ namespace DB
|
|||||||
{
|
{
|
||||||
|
|
||||||
/// Represents an access type which can be granted on databases, tables, columns, etc.
|
/// Represents an access type which can be granted on databases, tables, columns, etc.
|
||||||
enum class AccessType
|
enum class AccessType : uint8_t
|
||||||
{
|
{
|
||||||
/// Macro M should be defined as M(name, aliases, node_type, parent_group_name)
|
/// Macro M should be defined as M(name, aliases, node_type, parent_group_name)
|
||||||
/// where name is identifier with underscores (instead of spaces);
|
/// where name is identifier with underscores (instead of spaces);
|
||||||
|
@ -308,7 +308,7 @@ void AllowedClientHosts::removeAddress(const IPAddress & address)
|
|||||||
if (address.isLoopback())
|
if (address.isLoopback())
|
||||||
local_host = false;
|
local_host = false;
|
||||||
else
|
else
|
||||||
boost::range::remove_erase(addresses, address);
|
std::erase(addresses, address);
|
||||||
}
|
}
|
||||||
|
|
||||||
void AllowedClientHosts::addSubnet(const IPSubnet & subnet)
|
void AllowedClientHosts::addSubnet(const IPSubnet & subnet)
|
||||||
@ -328,7 +328,7 @@ void AllowedClientHosts::removeSubnet(const IPSubnet & subnet)
|
|||||||
else if (subnet.isMaskAllBitsOne())
|
else if (subnet.isMaskAllBitsOne())
|
||||||
removeAddress(subnet.getPrefix());
|
removeAddress(subnet.getPrefix());
|
||||||
else
|
else
|
||||||
boost::range::remove_erase(subnets, subnet);
|
std::erase(subnets, subnet);
|
||||||
}
|
}
|
||||||
|
|
||||||
void AllowedClientHosts::addName(const String & name)
|
void AllowedClientHosts::addName(const String & name)
|
||||||
@ -344,7 +344,7 @@ void AllowedClientHosts::removeName(const String & name)
|
|||||||
if (boost::iequals(name, "localhost"))
|
if (boost::iequals(name, "localhost"))
|
||||||
local_host = false;
|
local_host = false;
|
||||||
else
|
else
|
||||||
boost::range::remove_erase(names, name);
|
std::erase(names, name);
|
||||||
}
|
}
|
||||||
|
|
||||||
void AllowedClientHosts::addNameRegexp(const String & name_regexp)
|
void AllowedClientHosts::addNameRegexp(const String & name_regexp)
|
||||||
@ -364,7 +364,7 @@ void AllowedClientHosts::removeNameRegexp(const String & name_regexp)
|
|||||||
else if (name_regexp == ".*")
|
else if (name_regexp == ".*")
|
||||||
any_host = false;
|
any_host = false;
|
||||||
else
|
else
|
||||||
boost::range::remove_erase(name_regexps, name_regexp);
|
std::erase(name_regexps, name_regexp);
|
||||||
}
|
}
|
||||||
|
|
||||||
void AllowedClientHosts::addLikePattern(const String & pattern)
|
void AllowedClientHosts::addLikePattern(const String & pattern)
|
||||||
@ -384,7 +384,7 @@ void AllowedClientHosts::removeLikePattern(const String & pattern)
|
|||||||
else if ((pattern == "%") || (pattern == "0.0.0.0/0") || (pattern == "::/0"))
|
else if ((pattern == "%") || (pattern == "0.0.0.0/0") || (pattern == "::/0"))
|
||||||
any_host = false;
|
any_host = false;
|
||||||
else
|
else
|
||||||
boost::range::remove_erase(like_patterns, pattern);
|
std::erase(like_patterns, pattern);
|
||||||
}
|
}
|
||||||
|
|
||||||
void AllowedClientHosts::addLocalHost()
|
void AllowedClientHosts::addLocalHost()
|
||||||
|
@ -6,7 +6,7 @@
|
|||||||
namespace DB
|
namespace DB
|
||||||
{
|
{
|
||||||
|
|
||||||
enum class AuthenticationType
|
enum class AuthenticationType : uint8_t
|
||||||
{
|
{
|
||||||
/// User doesn't have to enter password.
|
/// User doesn't have to enter password.
|
||||||
NO_PASSWORD,
|
NO_PASSWORD,
|
||||||
|
@ -9,7 +9,7 @@ namespace DB
|
|||||||
using QuotaValue = UInt64;
|
using QuotaValue = UInt64;
|
||||||
|
|
||||||
/// Kinds of resource what we wish to quota.
|
/// Kinds of resource what we wish to quota.
|
||||||
enum class QuotaType
|
enum class QuotaType : uint8_t
|
||||||
{
|
{
|
||||||
QUERIES, /// Number of queries.
|
QUERIES, /// Number of queries.
|
||||||
QUERY_SELECTS, /// Number of select queries.
|
QUERY_SELECTS, /// Number of select queries.
|
||||||
@ -45,7 +45,7 @@ struct QuotaTypeInfo
|
|||||||
|
|
||||||
/// Key to share quota consumption.
|
/// Key to share quota consumption.
|
||||||
/// Users with the same key share the same amount of resource.
|
/// Users with the same key share the same amount of resource.
|
||||||
enum class QuotaKeyType
|
enum class QuotaKeyType : uint8_t
|
||||||
{
|
{
|
||||||
NONE, /// All users share the same quota.
|
NONE, /// All users share the same quota.
|
||||||
USER_NAME, /// Connections with the same user name share the same quota.
|
USER_NAME, /// Connections with the same user name share the same quota.
|
||||||
|
@ -25,7 +25,7 @@ struct RowPolicyName
|
|||||||
|
|
||||||
/// Types of the filters of row policies.
|
/// Types of the filters of row policies.
|
||||||
/// Currently only RowPolicyFilterType::SELECT is supported.
|
/// Currently only RowPolicyFilterType::SELECT is supported.
|
||||||
enum class RowPolicyFilterType
|
enum class RowPolicyFilterType : uint8_t
|
||||||
{
|
{
|
||||||
/// Filter is a SQL conditional expression used to figure out which rows should be visible
|
/// Filter is a SQL conditional expression used to figure out which rows should be visible
|
||||||
/// for user or available for modification. If the expression returns NULL or false for some rows
|
/// for user or available for modification. If the expression returns NULL or false for some rows
|
||||||
|
@ -71,7 +71,7 @@ namespace
|
|||||||
SCOPE_EXIT(
|
SCOPE_EXIT(
|
||||||
{
|
{
|
||||||
if (!succeeded)
|
if (!succeeded)
|
||||||
std::filesystem::remove(tmp_file_path);
|
(void)std::filesystem::remove(tmp_file_path);
|
||||||
});
|
});
|
||||||
|
|
||||||
/// Write the file.
|
/// Write the file.
|
||||||
@ -302,7 +302,7 @@ void DiskAccessStorage::writeLists()
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// The list files was successfully written, we don't need the 'need_rebuild_lists.mark' file any longer.
|
/// The list files was successfully written, we don't need the 'need_rebuild_lists.mark' file any longer.
|
||||||
std::filesystem::remove(getNeedRebuildListsMarkFilePath(directory_path));
|
(void)std::filesystem::remove(getNeedRebuildListsMarkFilePath(directory_path));
|
||||||
types_of_lists_to_write.clear();
|
types_of_lists_to_write.clear();
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -419,7 +419,7 @@ void DiskAccessStorage::removeAllExceptInMemory(const boost::container::flat_set
|
|||||||
const auto & id = it->first;
|
const auto & id = it->first;
|
||||||
++it; /// We must go to the next element in the map `entries_by_id` here because otherwise removeNoLock() can invalidate our iterator.
|
++it; /// We must go to the next element in the map `entries_by_id` here because otherwise removeNoLock() can invalidate our iterator.
|
||||||
if (!ids_to_keep.contains(id))
|
if (!ids_to_keep.contains(id))
|
||||||
removeNoLock(id, /* throw_if_not_exists */ true, /* write_on_disk= */ false);
|
(void)removeNoLock(id, /* throw_if_not_exists */ true, /* write_on_disk= */ false);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -549,7 +549,7 @@ bool DiskAccessStorage::insertNoLock(const UUID & id, const AccessEntityPtr & ne
|
|||||||
if (name_collision && (id_by_name != id))
|
if (name_collision && (id_by_name != id))
|
||||||
{
|
{
|
||||||
assert(replace_if_exists);
|
assert(replace_if_exists);
|
||||||
removeNoLock(id_by_name, /* throw_if_not_exists= */ false, write_on_disk);
|
removeNoLock(id_by_name, /* throw_if_not_exists= */ false, write_on_disk); // NOLINT
|
||||||
}
|
}
|
||||||
|
|
||||||
if (id_collision)
|
if (id_collision)
|
||||||
@ -574,7 +574,7 @@ bool DiskAccessStorage::insertNoLock(const UUID & id, const AccessEntityPtr & ne
|
|||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
removeNoLock(id, /* throw_if_not_exists= */ false, write_on_disk);
|
removeNoLock(id, /* throw_if_not_exists= */ false, write_on_disk); // NOLINT
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Do insertion.
|
/// Do insertion.
|
||||||
|
@ -161,9 +161,9 @@ void GrantedRoles::makeUnion(const GrantedRoles & other)
|
|||||||
|
|
||||||
void GrantedRoles::makeIntersection(const GrantedRoles & other)
|
void GrantedRoles::makeIntersection(const GrantedRoles & other)
|
||||||
{
|
{
|
||||||
boost::range::remove_erase_if(roles, [&other](const UUID & id) { return other.roles.find(id) == other.roles.end(); });
|
boost::range::remove_erase_if(roles, [&other](const UUID & id) { return other.roles.find(id) == other.roles.end(); }); // NOLINT
|
||||||
|
|
||||||
boost::range::remove_erase_if(roles_with_admin_option, [&other](const UUID & id)
|
boost::range::remove_erase_if(roles_with_admin_option, [&other](const UUID & id) // NOLINT
|
||||||
{
|
{
|
||||||
return other.roles_with_admin_option.find(id) == other.roles_with_admin_option.end();
|
return other.roles_with_admin_option.find(id) == other.roles_with_admin_option.end();
|
||||||
});
|
});
|
||||||
|
@ -583,7 +583,7 @@ void IAccessStorage::backup(BackupEntriesCollector & backup_entries_collector, c
|
|||||||
throwBackupNotAllowed();
|
throwBackupNotAllowed();
|
||||||
|
|
||||||
auto entities = readAllWithIDs(type);
|
auto entities = readAllWithIDs(type);
|
||||||
boost::range::remove_erase_if(entities, [](const std::pair<UUID, AccessEntityPtr> & x) { return !x.second->isBackupAllowed(); });
|
std::erase_if(entities, [](const std::pair<UUID, AccessEntityPtr> & x) { return !x.second->isBackupAllowed(); });
|
||||||
|
|
||||||
if (entities.empty())
|
if (entities.empty())
|
||||||
return;
|
return;
|
||||||
|
@ -24,7 +24,7 @@ namespace DB
|
|||||||
struct User;
|
struct User;
|
||||||
class Credentials;
|
class Credentials;
|
||||||
class ExternalAuthenticators;
|
class ExternalAuthenticators;
|
||||||
enum class AuthenticationType;
|
enum class AuthenticationType : uint8_t;
|
||||||
class BackupEntriesCollector;
|
class BackupEntriesCollector;
|
||||||
class RestorerFromBackup;
|
class RestorerFromBackup;
|
||||||
|
|
||||||
|
@ -76,7 +76,7 @@ void LDAPAccessStorage::setConfiguration(const Poco::Util::AbstractConfiguration
|
|||||||
config.keys(prefix, all_keys);
|
config.keys(prefix, all_keys);
|
||||||
for (const auto & key : all_keys)
|
for (const auto & key : all_keys)
|
||||||
{
|
{
|
||||||
if (key == "role_mapping" || key.find("role_mapping[") == 0)
|
if (key == "role_mapping" || key.starts_with("role_mapping["))
|
||||||
parseLDAPRoleSearchParams(role_search_params_cfg.emplace_back(), config, prefix_str + key);
|
parseLDAPRoleSearchParams(role_search_params_cfg.emplace_back(), config, prefix_str + key);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -94,7 +94,7 @@ void LDAPAccessStorage::setConfiguration(const Poco::Util::AbstractConfiguration
|
|||||||
role_change_subscription = access_control.subscribeForChanges<Role>(
|
role_change_subscription = access_control.subscribeForChanges<Role>(
|
||||||
[this] (const UUID & id, const AccessEntityPtr & entity)
|
[this] (const UUID & id, const AccessEntityPtr & entity)
|
||||||
{
|
{
|
||||||
return this->processRoleChange(id, entity);
|
this->processRoleChange(id, entity);
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
@ -200,7 +200,7 @@ void LDAPAccessStorage::applyRoleChangeNoLock(bool grant, const UUID & role_id,
|
|||||||
void LDAPAccessStorage::assignRolesNoLock(User & user, const LDAPClient::SearchResultsList & external_roles) const
|
void LDAPAccessStorage::assignRolesNoLock(User & user, const LDAPClient::SearchResultsList & external_roles) const
|
||||||
{
|
{
|
||||||
const auto external_roles_hash = boost::hash<LDAPClient::SearchResultsList>{}(external_roles);
|
const auto external_roles_hash = boost::hash<LDAPClient::SearchResultsList>{}(external_roles);
|
||||||
return assignRolesNoLock(user, external_roles, external_roles_hash);
|
assignRolesNoLock(user, external_roles, external_roles_hash);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -26,7 +26,7 @@ class LDAPClient
|
|||||||
public:
|
public:
|
||||||
struct SearchParams
|
struct SearchParams
|
||||||
{
|
{
|
||||||
enum class Scope
|
enum class Scope : uint8_t
|
||||||
{
|
{
|
||||||
BASE,
|
BASE,
|
||||||
ONE_LEVEL,
|
ONE_LEVEL,
|
||||||
@ -57,20 +57,20 @@ public:
|
|||||||
|
|
||||||
struct Params
|
struct Params
|
||||||
{
|
{
|
||||||
enum class ProtocolVersion
|
enum class ProtocolVersion : uint8_t
|
||||||
{
|
{
|
||||||
V2,
|
V2,
|
||||||
V3
|
V3
|
||||||
};
|
};
|
||||||
|
|
||||||
enum class TLSEnable
|
enum class TLSEnable : uint8_t
|
||||||
{
|
{
|
||||||
NO,
|
NO,
|
||||||
YES_STARTTLS,
|
YES_STARTTLS,
|
||||||
YES
|
YES
|
||||||
};
|
};
|
||||||
|
|
||||||
enum class TLSProtocolVersion
|
enum class TLSProtocolVersion : uint8_t
|
||||||
{
|
{
|
||||||
SSL2,
|
SSL2,
|
||||||
SSL3,
|
SSL3,
|
||||||
@ -79,7 +79,7 @@ public:
|
|||||||
TLS1_2
|
TLS1_2
|
||||||
};
|
};
|
||||||
|
|
||||||
enum class TLSRequireCert
|
enum class TLSRequireCert : uint8_t
|
||||||
{
|
{
|
||||||
NEVER,
|
NEVER,
|
||||||
ALLOW,
|
ALLOW,
|
||||||
@ -87,7 +87,7 @@ public:
|
|||||||
DEMAND
|
DEMAND
|
||||||
};
|
};
|
||||||
|
|
||||||
enum class SASLMechanism
|
enum class SASLMechanism : uint8_t
|
||||||
{
|
{
|
||||||
UNKNOWN,
|
UNKNOWN,
|
||||||
SIMPLE
|
SIMPLE
|
||||||
|
@ -106,7 +106,7 @@ bool MemoryAccessStorage::insertNoLock(const UUID & id, const AccessEntityPtr &
|
|||||||
if (name_collision && (id_by_name != id))
|
if (name_collision && (id_by_name != id))
|
||||||
{
|
{
|
||||||
assert(replace_if_exists);
|
assert(replace_if_exists);
|
||||||
removeNoLock(id_by_name, /* throw_if_not_exists= */ true);
|
removeNoLock(id_by_name, /* throw_if_not_exists= */ true); // NOLINT
|
||||||
}
|
}
|
||||||
|
|
||||||
if (id_collision)
|
if (id_collision)
|
||||||
@ -128,7 +128,7 @@ bool MemoryAccessStorage::insertNoLock(const UUID & id, const AccessEntityPtr &
|
|||||||
}
|
}
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
removeNoLock(id, /* throw_if_not_exists= */ true);
|
removeNoLock(id, /* throw_if_not_exists= */ true); // NOLINT
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Do insertion.
|
/// Do insertion.
|
||||||
@ -238,7 +238,7 @@ void MemoryAccessStorage::removeAllExceptNoLock(const boost::container::flat_set
|
|||||||
const auto & id = it->first;
|
const auto & id = it->first;
|
||||||
++it; /// We must go to the next element in the map `entries_by_id` here because otherwise removeNoLock() can invalidate our iterator.
|
++it; /// We must go to the next element in the map `entries_by_id` here because otherwise removeNoLock() can invalidate our iterator.
|
||||||
if (!ids_to_keep.contains(id))
|
if (!ids_to_keep.contains(id))
|
||||||
removeNoLock(id, /* throw_if_not_exists */ true);
|
removeNoLock(id, /* throw_if_not_exists */ true); // NOLINT
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user