diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md index 82a16d0589f..6540b60476f 100644 --- a/.github/PULL_REQUEST_TEMPLATE.md +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -13,13 +13,4 @@ Changelog entry (a user-readable short description of the changes that goes to C ... -Detailed description / Documentation draft: -... - - -> By adding documentation, you'll allow users to try your new feature immediately, not when someone else will have time to document it later. Documentation is necessary for all features that affect user experience in any way. You can add brief documentation draft above, or add documentation right into your patch as Markdown files in [docs](https://github.com/ClickHouse/ClickHouse/tree/master/docs) folder. - -> If you are doing this for the first time, it's recommended to read the lightweight [Contributing to ClickHouse Documentation](https://github.com/ClickHouse/ClickHouse/tree/master/docs/README.md) guide first. - - > Information about CI checks: https://clickhouse.tech/docs/en/development/continuous-integration/ diff --git a/.github/actionlint.yml b/.github/actionlint.yml index 8083186117f..0f88f30d42c 100644 --- a/.github/actionlint.yml +++ b/.github/actionlint.yml @@ -1,8 +1,9 @@ self-hosted-runner: labels: - builder + - func-tester + - func-tester-aarch64 - fuzzer-unit-tester - stress-tester - style-checker - - func-tester-aarch64 - - func-tester + - style-checker-aarch64 diff --git a/.github/workflows/backport_branches.yml b/.github/workflows/backport_branches.yml index 749c248af82..75f8a63368d 100644 --- a/.github/workflows/backport_branches.yml +++ b/.github/workflows/backport_branches.yml @@ -10,7 +10,7 @@ on: # yamllint disable-line rule:truthy - 'backport/**' jobs: DockerHubPushAarch64: - runs-on: [self-hosted, func-tester-aarch64] + runs-on: [self-hosted, style-checker-aarch64] steps: - name: Clear repository run: | diff --git a/.github/workflows/docs_check.yml b/.github/workflows/docs_check.yml index 633e654d656..d5b56bfef32 100644 --- a/.github/workflows/docs_check.yml +++ b/.github/workflows/docs_check.yml @@ -30,7 +30,7 @@ jobs: python3 run_check.py DockerHubPushAarch64: needs: CheckLabels - runs-on: [self-hosted, func-tester-aarch64] + runs-on: [self-hosted, style-checker-aarch64] steps: - name: Clear repository run: | diff --git a/.github/workflows/docs_release.yml b/.github/workflows/docs_release.yml new file mode 100644 index 00000000000..66838a05552 --- /dev/null +++ b/.github/workflows/docs_release.yml @@ -0,0 +1,121 @@ +name: DocsReleaseChecks + +env: + # Force the stdout and stderr streams to be unbuffered + PYTHONUNBUFFERED: 1 + +concurrency: + group: master-release + cancel-in-progress: true +on: # yamllint disable-line rule:truthy + push: + branches: + - master + paths: + - 'docs/**' + - 'website/**' + - 'benchmark/**' + - 'docker/**' + - '.github/**' + workflow_dispatch: +jobs: + DockerHubPushAarch64: + runs-on: [self-hosted, style-checker-aarch64] + steps: + - name: Clear repository + run: | + sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE" + - name: Check out repository code + uses: actions/checkout@v2 + - name: Images check + run: | + cd "$GITHUB_WORKSPACE/tests/ci" + python3 docker_images_check.py --suffix aarch64 + - name: Upload images files to artifacts + uses: actions/upload-artifact@v2 + with: + name: changed_images_aarch64 + path: ${{ runner.temp }}/docker_images_check/changed_images_aarch64.json + DockerHubPushAmd64: + runs-on: [self-hosted, style-checker] + steps: + - name: Clear repository + run: | + sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE" + - name: Check out repository code + uses: actions/checkout@v2 + - name: Images check + run: | + cd "$GITHUB_WORKSPACE/tests/ci" + python3 docker_images_check.py --suffix amd64 + - name: Upload images files to artifacts + uses: actions/upload-artifact@v2 + with: + name: changed_images_amd64 + path: ${{ runner.temp }}/docker_images_check/changed_images_amd64.json + DockerHubPush: + needs: [DockerHubPushAmd64, DockerHubPushAarch64] + runs-on: [self-hosted, style-checker] + steps: + - name: Clear repository + run: | + sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE" + - name: Check out repository code + uses: actions/checkout@v2 + - name: Download changed aarch64 images + uses: actions/download-artifact@v2 + with: + name: changed_images_aarch64 + path: ${{ runner.temp }} + - name: Download changed amd64 images + uses: actions/download-artifact@v2 + with: + name: changed_images_amd64 + path: ${{ runner.temp }} + - name: Images check + run: | + cd "$GITHUB_WORKSPACE/tests/ci" + python3 docker_manifests_merge.py --suffix amd64 --suffix aarch64 + - name: Upload images files to artifacts + uses: actions/upload-artifact@v2 + with: + name: changed_images + path: ${{ runner.temp }}/changed_images.json + DocsRelease: + needs: DockerHubPush + runs-on: [self-hosted, func-tester] + steps: + - name: Set envs + # https://docs.github.com/en/actions/learn-github-actions/workflow-commands-for-github-actions#multiline-strings + run: | + cat >> "$GITHUB_ENV" << 'EOF' + TEMP_PATH=${{runner.temp}}/docs_release + REPO_COPY=${{runner.temp}}/docs_release/ClickHouse + CLOUDFLARE_TOKEN=${{secrets.CLOUDFLARE}} + ROBOT_CLICKHOUSE_SSH_KEY<> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/docs_release - REPO_COPY=${{runner.temp}}/docs_release/ClickHouse - CLOUDFLARE_TOKEN=${{secrets.CLOUDFLARE}} - ROBOT_CLICKHOUSE_SSH_KEY<> "$GITHUB_ENV" << 'EOF' + JFROG_API_KEY=${{ secrets.JFROG_KEY_API_PACKAGES }} + TEMP_PATH=${{runner.temp}}/release_packages + REPO_COPY=${{runner.temp}}/release_packages/ClickHouse + EOF + - name: Check out repository code + uses: actions/checkout@v2 + - name: Download packages and push to Artifactory + env: + run: | + rm -rf "$TEMP_PATH" && mkdir -p "$REPO_COPY" + cp -r "$GITHUB_WORKSPACE" "$REPO_COPY" + cd "$REPO_COPY" + python3 ./tests/ci/push_to_artifactory.py --release "${{ github.ref }}" \ + --commit '${{ github.sha }}' --all + - name: Upload packages to release assets + uses: svenstaro/upload-release-action@v2 + with: + repo_token: ${{ secrets.GITHUB_TOKEN }} + file: ${{runner.temp}}/release_packages/* + overwrite: true + tag: ${{ github.ref }} + file_glob: true diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index 4ab2638069c..d916699acc2 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -13,7 +13,7 @@ on: # yamllint disable-line rule:truthy jobs: DockerHubPushAarch64: - runs-on: [self-hosted, func-tester-aarch64] + runs-on: [self-hosted, style-checker-aarch64] steps: - name: Clear repository run: | diff --git a/CHANGELOG.md b/CHANGELOG.md index 87860deea9d..1e4ea95c08c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -12,7 +12,7 @@ * A tool for collecting diagnostics data if you need support. [#33175](https://github.com/ClickHouse/ClickHouse/pull/33175) ([Alexander Burmak](https://github.com/Alex-Burmak)). * Automatic cluster discovery via Zoo/Keeper. It allows to add replicas to the cluster without changing configuration on every server. [#31442](https://github.com/ClickHouse/ClickHouse/pull/31442) ([vdimir](https://github.com/vdimir)). * Implement hive table engine to access apache hive from clickhouse. This implements: [#29245](https://github.com/ClickHouse/ClickHouse/issues/29245). [#31104](https://github.com/ClickHouse/ClickHouse/pull/31104) ([taiyang-li](https://github.com/taiyang-li)). -* Add aggregate functions `cramersV`, `cramersVBiasCorrected`, `theilsU` and `contingency`. These functions calculate dependency (measure of association) between categorical values. All these functions are using cross-tab (histogram on pairs) for implementation. You can imagine it like a correlation coefficient but for any discrete values (not necessary numbers). [#33366](https://github.com/ClickHouse/ClickHouse/pull/33366) ([alexey-milovidov](https://github.com/alexey-milovidov)). Initial implementation by TODO +* Add aggregate functions `cramersV`, `cramersVBiasCorrected`, `theilsU` and `contingency`. These functions calculate dependency (measure of association) between categorical values. All these functions are using cross-tab (histogram on pairs) for implementation. You can imagine it like a correlation coefficient but for any discrete values (not necessary numbers). [#33366](https://github.com/ClickHouse/ClickHouse/pull/33366) ([alexey-milovidov](https://github.com/alexey-milovidov)). Initial implementation by [Vanyok-All-is-OK](https://github.com/Vanyok-All-is-OK) and [antikvist](https://github.com/antikvist). * Added table function `hdfsCluster` which allows processing files from HDFS in parallel from many nodes in a specified cluster, similarly to `s3Cluster`. [#32400](https://github.com/ClickHouse/ClickHouse/pull/32400) ([Zhichang Yu](https://github.com/yuzhichang)). * Adding support for disks backed by Azure Blob Storage, in a similar way it has been done for disks backed by AWS S3. [#31505](https://github.com/ClickHouse/ClickHouse/pull/31505) ([Jakub Kuklis](https://github.com/jkuklis)). * Allow `COMMENT` in `CREATE VIEW` (for all VIEW kinds). [#31062](https://github.com/ClickHouse/ClickHouse/pull/31062) ([Vasily Nemkov](https://github.com/Enmk)). @@ -23,7 +23,6 @@ * Added function `arrayLast`. Closes [#33390](https://github.com/ClickHouse/ClickHouse/issues/33390). [#33415](https://github.com/ClickHouse/ClickHouse/pull/33415) Added function `arrayLastIndex`. [#33465](https://github.com/ClickHouse/ClickHouse/pull/33465) ([Maksim Kita](https://github.com/kitaisreal)). * Add function `decodeURLFormComponent` slightly different to `decodeURLComponent`. Close [#10298](https://github.com/ClickHouse/ClickHouse/issues/10298). [#33451](https://github.com/ClickHouse/ClickHouse/pull/33451) ([SuperDJY](https://github.com/cmsxbc)). * Allow to split `GraphiteMergeTree` rollup rules for plain/tagged metrics (optional rule_type field). [#33494](https://github.com/ClickHouse/ClickHouse/pull/33494) ([Michail Safronov](https://github.com/msaf1980)). -* Potential issue, cannot be exploited: integer overflow may happen in array resize. [#33024](https://github.com/ClickHouse/ClickHouse/pull/33024) ([varadarajkumar](https://github.com/varadarajkumar)). #### Performance Improvement @@ -78,7 +77,6 @@ * Validate config keys for external dictionaries. [#33095](https://github.com/ClickHouse/ClickHouse/issues/33095#issuecomment-1000577517). [#33130](https://github.com/ClickHouse/ClickHouse/pull/33130) ([Kseniia Sumarokova](https://github.com/kssenii)). * Send profile info inside `clickhouse-local`. Closes [#33093](https://github.com/ClickHouse/ClickHouse/issues/33093). [#33097](https://github.com/ClickHouse/ClickHouse/pull/33097) ([Kseniia Sumarokova](https://github.com/kssenii)). * Short circuit evaluation: support for function `throwIf`. Closes [#32969](https://github.com/ClickHouse/ClickHouse/issues/32969). [#32973](https://github.com/ClickHouse/ClickHouse/pull/32973) ([Maksim Kita](https://github.com/kitaisreal)). -* Added `Date32` date type support in dictionaries. Closes [#32913](https://github.com/ClickHouse/ClickHouse/issues/32913). [#32971](https://github.com/ClickHouse/ClickHouse/pull/32971) ([Maksim Kita](https://github.com/kitaisreal)). * (This only happens in unofficial builds). Fixed segfault when inserting data into compressed Decimal, String, FixedString and Array columns. This closes [#32939](https://github.com/ClickHouse/ClickHouse/issues/32939). [#32940](https://github.com/ClickHouse/ClickHouse/pull/32940) ([N. Kolotov](https://github.com/nkolotov)). * Added support for specifying subquery as SQL user defined function. Example: `CREATE FUNCTION test AS () -> (SELECT 1)`. Closes [#30755](https://github.com/ClickHouse/ClickHouse/issues/30755). [#32758](https://github.com/ClickHouse/ClickHouse/pull/32758) ([Maksim Kita](https://github.com/kitaisreal)). * Improve gRPC compression support for [#28671](https://github.com/ClickHouse/ClickHouse/issues/28671). [#32747](https://github.com/ClickHouse/ClickHouse/pull/32747) ([Vitaly Baranov](https://github.com/vitlibar)). @@ -100,6 +98,7 @@ * Use `--database` option for clickhouse-local. [#32797](https://github.com/ClickHouse/ClickHouse/pull/32797) ([Kseniia Sumarokova](https://github.com/kssenii)). * Fix surprisingly bad code in SQL ordinary function `file`. Now it supports symlinks. [#32640](https://github.com/ClickHouse/ClickHouse/pull/32640) ([alexey-milovidov](https://github.com/alexey-milovidov)). * Updating `modification_time` for data part in `system.parts` after part movement [#32964](https://github.com/ClickHouse/ClickHouse/issues/32964). [#32965](https://github.com/ClickHouse/ClickHouse/pull/32965) ([save-my-heart](https://github.com/save-my-heart)). +* Potential issue, cannot be exploited: integer overflow may happen in array resize. [#33024](https://github.com/ClickHouse/ClickHouse/pull/33024) ([varadarajkumar](https://github.com/varadarajkumar)). #### Build/Testing/Packaging Improvement diff --git a/CMakeLists.txt b/CMakeLists.txt index 4c9a972a4e4..b11ea650dc7 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -104,9 +104,8 @@ message (STATUS "CMAKE_BUILD_TYPE: ${CMAKE_BUILD_TYPE}") string (TOUPPER ${CMAKE_BUILD_TYPE} CMAKE_BUILD_TYPE_UC) option(USE_STATIC_LIBRARIES "Disable to use shared libraries" ON) -option(MAKE_STATIC_LIBRARIES "Disable to make shared libraries" ${USE_STATIC_LIBRARIES}) -if (NOT MAKE_STATIC_LIBRARIES) +if (NOT USE_STATIC_LIBRARIES) # DEVELOPER ONLY. # Faster linking if turned on. option(SPLIT_SHARED_LIBRARIES "Keep all internal libraries as separate .so files") @@ -115,11 +114,11 @@ if (NOT MAKE_STATIC_LIBRARIES) "Make several binaries (clickhouse-server, clickhouse-client etc.) instead of one bundled") endif () -if (MAKE_STATIC_LIBRARIES AND SPLIT_SHARED_LIBRARIES) - message(FATAL_ERROR "Defining SPLIT_SHARED_LIBRARIES=1 without MAKE_STATIC_LIBRARIES=0 has no effect.") +if (USE_STATIC_LIBRARIES AND SPLIT_SHARED_LIBRARIES) + message(FATAL_ERROR "Defining SPLIT_SHARED_LIBRARIES=1 without USE_STATIC_LIBRARIES=0 has no effect.") endif() -if (NOT MAKE_STATIC_LIBRARIES AND SPLIT_SHARED_LIBRARIES) +if (NOT USE_STATIC_LIBRARIES AND SPLIT_SHARED_LIBRARIES) set(BUILD_SHARED_LIBS 1 CACHE INTERNAL "") endif () @@ -139,7 +138,6 @@ if (ENABLE_FUZZING) set (ENABLE_CLICKHOUSE_ODBC_BRIDGE OFF) set (ENABLE_LIBRARIES 0) set (ENABLE_SSL 1) - set (USE_INTERNAL_SSL_LIBRARY 1) set (USE_UNWIND ON) set (ENABLE_EMBEDDED_COMPILER 0) set (ENABLE_EXAMPLES 0) @@ -152,7 +150,6 @@ if (ENABLE_FUZZING) # For codegen_select_fuzzer set (ENABLE_PROTOBUF 1) - set (USE_INTERNAL_PROTOBUF_LIBRARY 1) endif() # Global libraries @@ -203,21 +200,13 @@ endif () option(ENABLE_TESTS "Provide unit_test_dbms target with Google.Test unit tests" ON) option(ENABLE_EXAMPLES "Build all example programs in 'examples' subdirectories" OFF) -if (OS_LINUX AND (ARCH_AMD64 OR ARCH_AARCH64) AND MAKE_STATIC_LIBRARIES AND NOT SPLIT_SHARED_LIBRARIES AND NOT USE_MUSL) +if (OS_LINUX AND (ARCH_AMD64 OR ARCH_AARCH64) AND USE_STATIC_LIBRARIES AND NOT SPLIT_SHARED_LIBRARIES AND NOT USE_MUSL) # Only for Linux, x86_64 or aarch64. option(GLIBC_COMPATIBILITY "Enable compatibility with older glibc libraries." ON) elseif(GLIBC_COMPATIBILITY) message (${RECONFIGURE_MESSAGE_LEVEL} "Glibc compatibility cannot be enabled in current configuration") endif () -if (GLIBC_COMPATIBILITY) - # NOTE: we may also want to check glibc version and add -include only for 2.32+ - # however this is extra complexity, especially for cross compiling. - # And anyway it should not break anything for <2.32. - set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -include ${CMAKE_CURRENT_SOURCE_DIR}/base/glibc-compatibility/glibc-compat-2.32.h") - set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -include ${CMAKE_CURRENT_SOURCE_DIR}/base/glibc-compatibility/glibc-compat-2.32.h") -endif() - # Make sure the final executable has symbols exported set (CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -rdynamic") @@ -446,7 +435,7 @@ endif () set (CMAKE_POSTFIX_VARIABLE "CMAKE_${CMAKE_BUILD_TYPE_UC}_POSTFIX") -if (MAKE_STATIC_LIBRARIES) +if (USE_STATIC_LIBRARIES) set (CMAKE_POSITION_INDEPENDENT_CODE OFF) if (OS_LINUX AND NOT ARCH_ARM) # Slightly more efficient code can be generated @@ -482,87 +471,10 @@ endif () message (STATUS "Building for: ${CMAKE_SYSTEM} ${CMAKE_SYSTEM_PROCESSOR} ${CMAKE_LIBRARY_ARCHITECTURE} ; USE_STATIC_LIBRARIES=${USE_STATIC_LIBRARIES} - MAKE_STATIC_LIBRARIES=${MAKE_STATIC_LIBRARIES} SPLIT_SHARED=${SPLIT_SHARED_LIBRARIES} CCACHE=${CCACHE_FOUND} ${CCACHE_VERSION}") include (GNUInstallDirs) -include (cmake/contrib_finder.cmake) - -find_contrib_lib(double-conversion) # Must be before parquet -include (cmake/find/ssl.cmake) -include (cmake/find/ldap.cmake) # after ssl -include (cmake/find/icu.cmake) -include (cmake/find/xz.cmake) -include (cmake/find/zlib.cmake) -include (cmake/find/zstd.cmake) -include (cmake/find/ltdl.cmake) # for odbc -# openssl, zlib before poco -include (cmake/find/sparsehash.cmake) -include (cmake/find/re2.cmake) -include (cmake/find/krb5.cmake) -include (cmake/find/libgsasl.cmake) -include (cmake/find/cyrus-sasl.cmake) -include (cmake/find/rdkafka.cmake) -include (cmake/find/libuv.cmake) # for amqpcpp and cassandra -include (cmake/find/amqpcpp.cmake) -include (cmake/find/capnp.cmake) -include (cmake/find/llvm.cmake) -include (cmake/find/h3.cmake) -include (cmake/find/libxml2.cmake) -include (cmake/find/brotli.cmake) -include (cmake/find/protobuf.cmake) -include (cmake/find/thrift.cmake) -include (cmake/find/grpc.cmake) -include (cmake/find/pdqsort.cmake) -include (cmake/find/miniselect.cmake) -include (cmake/find/hdfs3.cmake) # uses protobuf -include (cmake/find/poco.cmake) -include (cmake/find/curl.cmake) -include (cmake/find/s3.cmake) -include (cmake/find/blob_storage.cmake) -include (cmake/find/base64.cmake) -include (cmake/find/parquet.cmake) # uses protobuf and thrift -include (cmake/find/simdjson.cmake) -include (cmake/find/fast_float.cmake) -include (cmake/find/rapidjson.cmake) -include (cmake/find/fastops.cmake) -include (cmake/find/odbc.cmake) -include (cmake/find/nanodbc.cmake) -include (cmake/find/sqlite.cmake) -include (cmake/find/rocksdb.cmake) -include (cmake/find/libpqxx.cmake) -include (cmake/find/nuraft.cmake) -include (cmake/find/yaml-cpp.cmake) -include (cmake/find/s2geometry.cmake) -include (cmake/find/nlp.cmake) -include (cmake/find/bzip2.cmake) -include (cmake/find/filelog.cmake) - -if(NOT USE_INTERNAL_PARQUET_LIBRARY) - set (ENABLE_ORC OFF CACHE INTERNAL "") -endif() -include (cmake/find/orc.cmake) - -include (cmake/find/avro.cmake) -include (cmake/find/msgpack.cmake) -include (cmake/find/cassandra.cmake) -include (cmake/find/sentry.cmake) -include (cmake/find/datasketches.cmake) -include (cmake/find/libprotobuf-mutator.cmake) -include (cmake/find/hive-metastore.cmake) - -set (USE_INTERNAL_CITYHASH_LIBRARY ON CACHE INTERNAL "") -find_contrib_lib(cityhash) - -find_contrib_lib(farmhash) - -if (ENABLE_TESTS) - include (cmake/find/gtest.cmake) -endif () - -# Need to process before "contrib" dir: -include (cmake/find/mysqlclient.cmake) # When testing for memory leaks with Valgrind, don't link tcmalloc or jemalloc. @@ -606,7 +518,7 @@ macro (add_executable target) # - _je_zone_register due to JEMALLOC_PRIVATE_NAMESPACE=je_ under OS X. # - but jemalloc-cmake does not run private_namespace.sh # so symbol name should be _zone_register - if (ENABLE_JEMALLOC AND MAKE_STATIC_LIBRARIES AND OS_DARWIN) + if (ENABLE_JEMALLOC AND USE_STATIC_LIBRARIES AND OS_DARWIN) set_property(TARGET ${target} APPEND PROPERTY LINK_OPTIONS -u_zone_register) endif() endif() @@ -625,6 +537,4 @@ add_subdirectory (programs) add_subdirectory (tests) add_subdirectory (utils) -include (cmake/print_include_directories.cmake) - include (cmake/sanitize_target_link_libraries.cmake) diff --git a/README.md b/README.md index e12238577a7..f433b457861 100644 --- a/README.md +++ b/README.md @@ -10,5 +10,6 @@ ClickHouse® is an open-source column-oriented database management system that a * [YouTube channel](https://www.youtube.com/c/ClickHouseDB) has a lot of content about ClickHouse in video format. * [Slack](https://join.slack.com/t/clickhousedb/shared_invite/zt-rxm3rdrk-lIUmhLC3V8WTaL0TGxsOmg) and [Telegram](https://telegram.me/clickhouse_en) allow chatting with ClickHouse users in real-time. * [Blog](https://clickhouse.com/blog/en/) contains various ClickHouse-related articles, as well as announcements and reports about events. -* [Code Browser](https://clickhouse.com/codebrowser/html_report/ClickHouse/index.html) with syntax highlight and navigation. +* [Code Browser (Woboq)](https://clickhouse.com/codebrowser/html_report/ClickHouse/index.html) with syntax highlight and navigation. +* [Code Browser (github.dev)](https://github.dev/ClickHouse/ClickHouse) with syntax highlight, powered by github.dev. * [Contacts](https://clickhouse.com/company/#contact) can help to get your questions answered if there are any. diff --git a/base/base/CMakeLists.txt b/base/base/CMakeLists.txt index bc82e502e79..3e6f174c6dc 100644 --- a/base/base/CMakeLists.txt +++ b/base/base/CMakeLists.txt @@ -24,7 +24,7 @@ if (ENABLE_REPLXX) endif () if (USE_DEBUG_HELPERS) - get_target_property(MAGIC_ENUM_INCLUDE_DIR magic_enum INTERFACE_INCLUDE_DIRECTORIES) + get_target_property(MAGIC_ENUM_INCLUDE_DIR ch_contrib::magic_enum INTERFACE_INCLUDE_DIRECTORIES) # CMake generator expression will do insane quoting when it encounters special character like quotes, spaces, etc. # Prefixing "SHELL:" will force it to use the original text. set (INCLUDE_DEBUG_HELPERS "SHELL:-I\"${MAGIC_ENUM_INCLUDE_DIR}\" -include \"${ClickHouse_SOURCE_DIR}/base/base/iostream_debug_helpers.h\"") @@ -40,29 +40,25 @@ else () target_compile_definitions(common PUBLIC WITH_COVERAGE=0) endif () -if (USE_INTERNAL_CCTZ) - set_source_files_properties(DateLUTImpl.cpp PROPERTIES COMPILE_DEFINITIONS USE_INTERNAL_CCTZ) -endif() - target_include_directories(common PUBLIC .. "${CMAKE_CURRENT_BINARY_DIR}/..") -if (OS_DARWIN AND NOT MAKE_STATIC_LIBRARIES) +if (OS_DARWIN AND NOT USE_STATIC_LIBRARIES) target_link_libraries(common PUBLIC -Wl,-U,_inside_main) endif() target_link_libraries (common PUBLIC - ${CITYHASH_LIBRARIES} + ch_contrib::cityhash boost::headers_only boost::system Poco::Net Poco::Net::SSL Poco::Util Poco::Foundation - replxx - cctz - fmt - magic_enum + ch_contrib::replxx + ch_contrib::cctz + ch_contrib::fmt + ch_contrib::magic_enum ) if (ENABLE_TESTS) diff --git a/base/bridge/IBridge.cpp b/base/bridge/IBridge.cpp index 553973b645d..4c808278ed0 100644 --- a/base/bridge/IBridge.cpp +++ b/base/bridge/IBridge.cpp @@ -9,6 +9,7 @@ #include #include +#include #include #include #include diff --git a/base/daemon/CMakeLists.txt b/base/daemon/CMakeLists.txt index 6ef87db6a61..ae8f51cabd3 100644 --- a/base/daemon/CMakeLists.txt +++ b/base/daemon/CMakeLists.txt @@ -6,12 +6,12 @@ add_library (daemon target_include_directories (daemon PUBLIC ..) -if (OS_DARWIN AND NOT MAKE_STATIC_LIBRARIES) +if (OS_DARWIN AND NOT USE_STATIC_LIBRARIES) target_link_libraries (daemon PUBLIC -Wl,-undefined,dynamic_lookup) endif() target_link_libraries (daemon PUBLIC loggers PRIVATE clickhouse_common_io clickhouse_common_config common ${EXECINFO_LIBRARIES}) -if (USE_SENTRY) - target_link_libraries (daemon PRIVATE ${SENTRY_LIBRARY}) +if (TARGET ch_contrib::sentry) + target_link_libraries (daemon PRIVATE ch_contrib::sentry) endif () diff --git a/base/glibc-compatibility/CMakeLists.txt b/base/glibc-compatibility/CMakeLists.txt index 4fc2a002cd8..ddec09121e1 100644 --- a/base/glibc-compatibility/CMakeLists.txt +++ b/base/glibc-compatibility/CMakeLists.txt @@ -37,7 +37,7 @@ if (GLIBC_COMPATIBILITY) target_include_directories(glibc-compatibility PRIVATE libcxxabi ${musl_arch_include_dir}) - if (NOT USE_STATIC_LIBRARIES AND NOT MAKE_STATIC_LIBRARIES) + if (NOT USE_STATIC_LIBRARIES AND NOT USE_STATIC_LIBRARIES) target_compile_options(glibc-compatibility PRIVATE -fPIC) endif () diff --git a/base/glibc-compatibility/glibc-compat-2.32.h b/base/glibc-compatibility/glibc-compat-2.32.h deleted file mode 100644 index 53ed34d60fa..00000000000 --- a/base/glibc-compatibility/glibc-compat-2.32.h +++ /dev/null @@ -1,50 +0,0 @@ -/// In glibc 2.32 new version of some symbols had been added [1]: -/// -/// $ nm -D clickhouse | fgrep -e @GLIBC_2.32 -/// U pthread_getattr_np@GLIBC_2.32 -/// U pthread_sigmask@GLIBC_2.32 -/// -/// [1]: https://www.spinics.net/lists/fedora-devel/msg273044.html -/// -/// Right now ubuntu 20.04 is used as official image for building -/// ClickHouse, however once it will be switched someone may not be happy -/// with that fact that he/she cannot use official binaries anymore because -/// they have glibc < 2.32. -/// -/// To avoid this dependency, let's force previous version of those -/// symbols from glibc. -/// -/// Also note, that the following approach had been tested: -/// a) -Wl,--wrap -- but it goes into endless recursion whey you try to do -/// something like this: -/// -/// int __pthread_getattr_np_compact(pthread_t thread, pthread_attr_t *attr); -/// GLIBC_COMPAT_SYMBOL(__pthread_getattr_np_compact, pthread_getattr_np) -/// int __pthread_getattr_np_compact(pthread_t thread, pthread_attr_t *attr); -/// int __wrap_pthread_getattr_np(pthread_t thread, pthread_attr_t *attr) -/// { -/// return __pthread_getattr_np_compact(thread, attr); -/// } -/// -/// int __pthread_sigmask_compact(int how, const sigset_t *set, sigset_t *oldset); -/// GLIBC_COMPAT_SYMBOL(__pthread_sigmask_compact, pthread_sigmask) -/// int __pthread_sigmask_compact(int how, const sigset_t *set, sigset_t *oldset); -/// int __wrap_pthread_sigmask(int how, const sigset_t *set, sigset_t *oldset) -/// { -/// return __pthread_sigmask_compact(how, set, oldset); -/// } -/// -/// b) -Wl,--defsym -- same problems (and you cannot use version of symbol with -/// version in the expression) -/// c) this approach -- simply add this file with -include directive. - -#if defined(__amd64__) -#define GLIBC_COMPAT_SYMBOL(func) __asm__(".symver " #func "," #func "@GLIBC_2.2.5"); -#elif defined(__aarch64__) -#define GLIBC_COMPAT_SYMBOL(func) __asm__(".symver " #func "," #func "@GLIBC_2.17"); -#else -#error Your platform is not supported. -#endif - -GLIBC_COMPAT_SYMBOL(pthread_sigmask) -GLIBC_COMPAT_SYMBOL(pthread_getattr_np) diff --git a/cmake/Modules/FindArrow.cmake b/cmake/Modules/FindArrow.cmake deleted file mode 100644 index 5bd111de1e3..00000000000 --- a/cmake/Modules/FindArrow.cmake +++ /dev/null @@ -1,433 +0,0 @@ -# https://github.com/apache/arrow/blob/master/cpp/cmake_modules/FindArrow.cmake - -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -# - Find Arrow (arrow/api.h, libarrow.a, libarrow.so) -# This module defines -# ARROW_FOUND, whether Arrow has been found -# ARROW_FULL_SO_VERSION, full shared object version of found Arrow "100.0.0" -# ARROW_IMPORT_LIB, path to libarrow's import library (Windows only) -# ARROW_INCLUDE_DIR, directory containing headers -# ARROW_LIBS, deprecated. Use ARROW_LIB_DIR instead -# ARROW_LIB_DIR, directory containing Arrow libraries -# ARROW_SHARED_IMP_LIB, deprecated. Use ARROW_IMPORT_LIB instead -# ARROW_SHARED_LIB, path to libarrow's shared library -# ARROW_SO_VERSION, shared object version of found Arrow such as "100" -# ARROW_STATIC_LIB, path to libarrow.a -# ARROW_VERSION, version of found Arrow -# ARROW_VERSION_MAJOR, major version of found Arrow -# ARROW_VERSION_MINOR, minor version of found Arrow -# ARROW_VERSION_PATCH, patch version of found Arrow - -if(DEFINED ARROW_FOUND) - return() -endif() - -include(FindPkgConfig) -include(FindPackageHandleStandardArgs) - -set(ARROW_SEARCH_LIB_PATH_SUFFIXES) -if(CMAKE_LIBRARY_ARCHITECTURE) - list(APPEND ARROW_SEARCH_LIB_PATH_SUFFIXES "lib/${CMAKE_LIBRARY_ARCHITECTURE}") -endif() -list(APPEND ARROW_SEARCH_LIB_PATH_SUFFIXES - "lib64" - "lib32" - "lib" - "bin") -set(ARROW_CONFIG_SUFFIXES - "_RELEASE" - "_RELWITHDEBINFO" - "_MINSIZEREL" - "_DEBUG" - "") -if(CMAKE_BUILD_TYPE) - string(TOUPPER ${CMAKE_BUILD_TYPE} ARROW_CONFIG_SUFFIX_PREFERRED) - set(ARROW_CONFIG_SUFFIX_PREFERRED "_${ARROW_CONFIG_SUFFIX_PREFERRED}") - list(INSERT ARROW_CONFIG_SUFFIXES 0 "${ARROW_CONFIG_SUFFIX_PREFERRED}") -endif() - -if(NOT DEFINED ARROW_MSVC_STATIC_LIB_SUFFIX) - if(MSVC) - set(ARROW_MSVC_STATIC_LIB_SUFFIX "_static") - else() - set(ARROW_MSVC_STATIC_LIB_SUFFIX "") - endif() -endif() - -# Internal function. -# -# Set shared library name for ${base_name} to ${output_variable}. -# -# Example: -# arrow_build_shared_library_name(ARROW_SHARED_LIBRARY_NAME arrow) -# # -> ARROW_SHARED_LIBRARY_NAME=libarrow.so on Linux -# # -> ARROW_SHARED_LIBRARY_NAME=libarrow.dylib on macOS -# # -> ARROW_SHARED_LIBRARY_NAME=arrow.dll with MSVC on Windows -# # -> ARROW_SHARED_LIBRARY_NAME=libarrow.dll with MinGW on Windows -function(arrow_build_shared_library_name output_variable base_name) - set(${output_variable} - "${CMAKE_SHARED_LIBRARY_PREFIX}${base_name}${CMAKE_SHARED_LIBRARY_SUFFIX}" - PARENT_SCOPE) -endfunction() - -# Internal function. -# -# Set import library name for ${base_name} to ${output_variable}. -# This is useful only for MSVC build. Import library is used only -# with MSVC build. -# -# Example: -# arrow_build_import_library_name(ARROW_IMPORT_LIBRARY_NAME arrow) -# # -> ARROW_IMPORT_LIBRARY_NAME=arrow on Linux (meaningless) -# # -> ARROW_IMPORT_LIBRARY_NAME=arrow on macOS (meaningless) -# # -> ARROW_IMPORT_LIBRARY_NAME=arrow.lib with MSVC on Windows -# # -> ARROW_IMPORT_LIBRARY_NAME=libarrow.dll.a with MinGW on Windows -function(arrow_build_import_library_name output_variable base_name) - set(${output_variable} - "${CMAKE_IMPORT_LIBRARY_PREFIX}${base_name}${CMAKE_IMPORT_LIBRARY_SUFFIX}" - PARENT_SCOPE) -endfunction() - -# Internal function. -# -# Set static library name for ${base_name} to ${output_variable}. -# -# Example: -# arrow_build_static_library_name(ARROW_STATIC_LIBRARY_NAME arrow) -# # -> ARROW_STATIC_LIBRARY_NAME=libarrow.a on Linux -# # -> ARROW_STATIC_LIBRARY_NAME=libarrow.a on macOS -# # -> ARROW_STATIC_LIBRARY_NAME=arrow.lib with MSVC on Windows -# # -> ARROW_STATIC_LIBRARY_NAME=libarrow.dll.a with MinGW on Windows -function(arrow_build_static_library_name output_variable base_name) - set( - ${output_variable} - "${CMAKE_STATIC_LIBRARY_PREFIX}${base_name}${ARROW_MSVC_STATIC_LIB_SUFFIX}${CMAKE_STATIC_LIBRARY_SUFFIX}" - PARENT_SCOPE) -endfunction() - -# Internal function. -# -# Set macro value for ${macro_name} in ${header_content} to ${output_variable}. -# -# Example: -# arrow_extract_macro_value(version_major -# "ARROW_VERSION_MAJOR" -# "#define ARROW_VERSION_MAJOR 1.0.0") -# # -> version_major=1.0.0 -function(arrow_extract_macro_value output_variable macro_name header_content) - string(REGEX MATCH "#define +${macro_name} +[^\r\n]+" macro_definition - "${header_content}") - string(REGEX - REPLACE "^#define +${macro_name} +(.+)$" "\\1" macro_value "${macro_definition}") - set(${output_variable} "${macro_value}" PARENT_SCOPE) -endfunction() - -# Internal macro only for arrow_find_package. -# -# Find package in HOME. -macro(arrow_find_package_home) - find_path(${prefix}_include_dir "${header_path}" - PATHS "${home}" - PATH_SUFFIXES "include" - NO_DEFAULT_PATH) - set(include_dir "${${prefix}_include_dir}") - set(${prefix}_INCLUDE_DIR "${include_dir}" PARENT_SCOPE) - - if(MSVC) - set(CMAKE_SHARED_LIBRARY_SUFFIXES_ORIGINAL ${CMAKE_FIND_LIBRARY_SUFFIXES}) - # .dll isn't found by find_library with MSVC because .dll isn't included in - # CMAKE_FIND_LIBRARY_SUFFIXES. - list(APPEND CMAKE_FIND_LIBRARY_SUFFIXES "${CMAKE_SHARED_LIBRARY_SUFFIX}") - endif() - find_library(${prefix}_shared_lib - NAMES "${shared_lib_name}" - PATHS "${home}" - PATH_SUFFIXES ${ARROW_SEARCH_LIB_PATH_SUFFIXES} - NO_DEFAULT_PATH) - if(MSVC) - set(CMAKE_SHARED_LIBRARY_SUFFIXES ${CMAKE_FIND_LIBRARY_SUFFIXES_ORIGINAL}) - endif() - set(shared_lib "${${prefix}_shared_lib}") - set(${prefix}_SHARED_LIB "${shared_lib}" PARENT_SCOPE) - if(shared_lib) - add_library(${target_shared} SHARED IMPORTED) - set_target_properties(${target_shared} PROPERTIES IMPORTED_LOCATION "${shared_lib}") - if(include_dir) - set_target_properties(${target_shared} - PROPERTIES INTERFACE_INCLUDE_DIRECTORIES "${include_dir}") - endif() - find_library(${prefix}_import_lib - NAMES "${import_lib_name}" - PATHS "${home}" - PATH_SUFFIXES ${ARROW_SEARCH_LIB_PATH_SUFFIXES} - NO_DEFAULT_PATH) - set(import_lib "${${prefix}_import_lib}") - set(${prefix}_IMPORT_LIB "${import_lib}" PARENT_SCOPE) - if(import_lib) - set_target_properties(${target_shared} PROPERTIES IMPORTED_IMPLIB "${import_lib}") - endif() - endif() - - find_library(${prefix}_static_lib - NAMES "${static_lib_name}" - PATHS "${home}" - PATH_SUFFIXES ${ARROW_SEARCH_LIB_PATH_SUFFIXES} - NO_DEFAULT_PATH) - set(static_lib "${${prefix}_static_lib}") - set(${prefix}_STATIC_LIB "${static_lib}" PARENT_SCOPE) - if(static_lib) - add_library(${target_static} STATIC IMPORTED) - set_target_properties(${target_static} PROPERTIES IMPORTED_LOCATION "${static_lib}") - if(include_dir) - set_target_properties(${target_static} - PROPERTIES INTERFACE_INCLUDE_DIRECTORIES "${include_dir}") - endif() - endif() -endmacro() - -# Internal macro only for arrow_find_package. -# -# Find package by CMake package configuration. -macro(arrow_find_package_cmake_package_configuration) - find_package(${cmake_package_name} CONFIG) - if(${cmake_package_name}_FOUND) - set(${prefix}_USE_CMAKE_PACKAGE_CONFIG TRUE PARENT_SCOPE) - if(TARGET ${target_shared}) - foreach(suffix ${ARROW_CONFIG_SUFFIXES}) - get_target_property(shared_lib ${target_shared} IMPORTED_LOCATION${suffix}) - if(shared_lib) - # Remove shared library version: - # libarrow.so.100.0.0 -> libarrow.so - # Because ARROW_HOME and pkg-config approaches don't add - # shared library version. - string(REGEX - REPLACE "(${CMAKE_SHARED_LIBRARY_SUFFIX})[.0-9]+$" "\\1" shared_lib - "${shared_lib}") - set(${prefix}_SHARED_LIB "${shared_lib}" PARENT_SCOPE) - break() - endif() - endforeach() - endif() - if(TARGET ${target_static}) - foreach(suffix ${ARROW_CONFIG_SUFFIXES}) - get_target_property(static_lib ${target_static} IMPORTED_LOCATION${suffix}) - if(static_lib) - set(${prefix}_STATIC_LIB "${static_lib}" PARENT_SCOPE) - break() - endif() - endforeach() - endif() - endif() -endmacro() - -# Internal macro only for arrow_find_package. -# -# Find package by pkg-config. -macro(arrow_find_package_pkg_config) - pkg_check_modules(${prefix}_PC ${pkg_config_name}) - if(${prefix}_PC_FOUND) - set(${prefix}_USE_PKG_CONFIG TRUE PARENT_SCOPE) - - set(include_dir "${${prefix}_PC_INCLUDEDIR}") - set(lib_dir "${${prefix}_PC_LIBDIR}") - set(shared_lib_paths "${${prefix}_PC_LINK_LIBRARIES}") - # Use the first shared library path as the IMPORTED_LOCATION - # for ${target_shared}. This assumes that the first shared library - # path is the shared library path for this module. - list(GET shared_lib_paths 0 first_shared_lib_path) - # Use the rest shared library paths as the INTERFACE_LINK_LIBRARIES - # for ${target_shared}. This assumes that the rest shared library - # paths are dependency library paths for this module. - list(LENGTH shared_lib_paths n_shared_lib_paths) - if(n_shared_lib_paths LESS_EQUAL 1) - set(rest_shared_lib_paths) - else() - list(SUBLIST - shared_lib_paths - 1 - -1 - rest_shared_lib_paths) - endif() - - set(${prefix}_VERSION "${${prefix}_PC_VERSION}" PARENT_SCOPE) - set(${prefix}_INCLUDE_DIR "${include_dir}" PARENT_SCOPE) - set(${prefix}_SHARED_LIB "${first_shared_lib_path}" PARENT_SCOPE) - - add_library(${target_shared} SHARED IMPORTED) - set_target_properties(${target_shared} - PROPERTIES INTERFACE_INCLUDE_DIRECTORIES - "${include_dir}" - INTERFACE_LINK_LIBRARIES - "${rest_shared_lib_paths}" - IMPORTED_LOCATION - "${first_shared_lib_path}") - get_target_property(shared_lib ${target_shared} IMPORTED_LOCATION) - - find_library(${prefix}_static_lib - NAMES "${static_lib_name}" - PATHS "${lib_dir}" - NO_DEFAULT_PATH) - set(static_lib "${${prefix}_static_lib}") - set(${prefix}_STATIC_LIB "${static_lib}" PARENT_SCOPE) - if(static_lib) - add_library(${target_static} STATIC IMPORTED) - set_target_properties(${target_static} - PROPERTIES INTERFACE_INCLUDE_DIRECTORIES "${include_dir}" - IMPORTED_LOCATION "${static_lib}") - endif() - endif() -endmacro() - -function(arrow_find_package - prefix - home - base_name - header_path - cmake_package_name - pkg_config_name) - arrow_build_shared_library_name(shared_lib_name ${base_name}) - arrow_build_import_library_name(import_lib_name ${base_name}) - arrow_build_static_library_name(static_lib_name ${base_name}) - - set(target_shared ${base_name}_shared) - set(target_static ${base_name}_static) - - if(home) - arrow_find_package_home() - set(${prefix}_FIND_APPROACH "HOME: ${home}" PARENT_SCOPE) - else() - arrow_find_package_cmake_package_configuration() - if(${cmake_package_name}_FOUND) - set(${prefix}_FIND_APPROACH - "CMake package configuration: ${cmake_package_name}" - PARENT_SCOPE) - else() - arrow_find_package_pkg_config() - set(${prefix}_FIND_APPROACH "pkg-config: ${pkg_config_name}" PARENT_SCOPE) - endif() - endif() - - if(NOT include_dir) - if(TARGET ${target_shared}) - get_target_property(include_dir ${target_shared} INTERFACE_INCLUDE_DIRECTORIES) - elseif(TARGET ${target_static}) - get_target_property(include_dir ${target_static} INTERFACE_INCLUDE_DIRECTORIES) - endif() - endif() - if(include_dir) - set(${prefix}_INCLUDE_DIR "${include_dir}" PARENT_SCOPE) - endif() - - if(shared_lib) - get_filename_component(lib_dir "${shared_lib}" DIRECTORY) - elseif(static_lib) - get_filename_component(lib_dir "${static_lib}" DIRECTORY) - else() - set(lib_dir NOTFOUND) - endif() - set(${prefix}_LIB_DIR "${lib_dir}" PARENT_SCOPE) - # For backward compatibility - set(${prefix}_LIBS "${lib_dir}" PARENT_SCOPE) -endfunction() - -if(NOT "$ENV{ARROW_HOME}" STREQUAL "") - file(TO_CMAKE_PATH "$ENV{ARROW_HOME}" ARROW_HOME) -endif() -arrow_find_package(ARROW - "${ARROW_HOME}" - arrow - arrow/api.h - Arrow - arrow) - -if(ARROW_HOME) - if(ARROW_INCLUDE_DIR) - file(READ "${ARROW_INCLUDE_DIR}/arrow/util/config.h" ARROW_CONFIG_H_CONTENT) - arrow_extract_macro_value(ARROW_VERSION_MAJOR "ARROW_VERSION_MAJOR" - "${ARROW_CONFIG_H_CONTENT}") - arrow_extract_macro_value(ARROW_VERSION_MINOR "ARROW_VERSION_MINOR" - "${ARROW_CONFIG_H_CONTENT}") - arrow_extract_macro_value(ARROW_VERSION_PATCH "ARROW_VERSION_PATCH" - "${ARROW_CONFIG_H_CONTENT}") - if("${ARROW_VERSION_MAJOR}" STREQUAL "" - OR "${ARROW_VERSION_MINOR}" STREQUAL "" - OR "${ARROW_VERSION_PATCH}" STREQUAL "") - set(ARROW_VERSION "0.0.0") - else() - set(ARROW_VERSION - "${ARROW_VERSION_MAJOR}.${ARROW_VERSION_MINOR}.${ARROW_VERSION_PATCH}") - endif() - - arrow_extract_macro_value(ARROW_SO_VERSION_QUOTED "ARROW_SO_VERSION" - "${ARROW_CONFIG_H_CONTENT}") - string(REGEX REPLACE "^\"(.+)\"$" "\\1" ARROW_SO_VERSION "${ARROW_SO_VERSION_QUOTED}") - arrow_extract_macro_value(ARROW_FULL_SO_VERSION_QUOTED "ARROW_FULL_SO_VERSION" - "${ARROW_CONFIG_H_CONTENT}") - string(REGEX - REPLACE "^\"(.+)\"$" "\\1" ARROW_FULL_SO_VERSION - "${ARROW_FULL_SO_VERSION_QUOTED}") - endif() -else() - if(ARROW_USE_CMAKE_PACKAGE_CONFIG) - find_package(Arrow CONFIG) - elseif(ARROW_USE_PKG_CONFIG) - pkg_get_variable(ARROW_SO_VERSION arrow so_version) - pkg_get_variable(ARROW_FULL_SO_VERSION arrow full_so_version) - endif() -endif() - -set(ARROW_ABI_VERSION ${ARROW_SO_VERSION}) - -mark_as_advanced(ARROW_ABI_VERSION - ARROW_CONFIG_SUFFIXES - ARROW_FULL_SO_VERSION - ARROW_IMPORT_LIB - ARROW_INCLUDE_DIR - ARROW_LIBS - ARROW_LIB_DIR - ARROW_SEARCH_LIB_PATH_SUFFIXES - ARROW_SHARED_IMP_LIB - ARROW_SHARED_LIB - ARROW_SO_VERSION - ARROW_STATIC_LIB - ARROW_VERSION - ARROW_VERSION_MAJOR - ARROW_VERSION_MINOR - ARROW_VERSION_PATCH) - -find_package_handle_standard_args(Arrow REQUIRED_VARS - # The first required variable is shown - # in the found message. So this list is - # not sorted alphabetically. - ARROW_INCLUDE_DIR - ARROW_LIB_DIR - ARROW_FULL_SO_VERSION - ARROW_SO_VERSION - VERSION_VAR - ARROW_VERSION) -set(ARROW_FOUND ${Arrow_FOUND}) - -if(Arrow_FOUND AND NOT Arrow_FIND_QUIETLY) - message(STATUS "Arrow version: ${ARROW_VERSION} (${ARROW_FIND_APPROACH})") - message(STATUS "Arrow SO and ABI version: ${ARROW_SO_VERSION}") - message(STATUS "Arrow full SO version: ${ARROW_FULL_SO_VERSION}") - message(STATUS "Found the Arrow core shared library: ${ARROW_SHARED_LIB}") - message(STATUS "Found the Arrow core import library: ${ARROW_IMPORT_LIB}") - message(STATUS "Found the Arrow core static library: ${ARROW_STATIC_LIB}") -endif() diff --git a/cmake/Modules/FindICU.cmake b/cmake/Modules/FindICU.cmake deleted file mode 100644 index 0e61b3dcf29..00000000000 --- a/cmake/Modules/FindICU.cmake +++ /dev/null @@ -1,394 +0,0 @@ -# Distributed under the OSI-approved BSD 3-Clause License. See accompanying -# file Copyright.txt or https://cmake.org/licensing for details. - -#.rst: -# FindICU -# ------- -# -# Find the International Components for Unicode (ICU) libraries and -# programs. -# -# This module supports multiple components. -# Components can include any of: ``data``, ``i18n``, ``io``, ``le``, -# ``lx``, ``test``, ``tu`` and ``uc``. -# -# Note that on Windows ``data`` is named ``dt`` and ``i18n`` is named -# ``in``; any of the names may be used, and the appropriate -# platform-specific library name will be automatically selected. -# -# This module reports information about the ICU installation in -# several variables. General variables:: -# -# ICU_VERSION - ICU release version -# ICU_FOUND - true if the main programs and libraries were found -# ICU_LIBRARIES - component libraries to be linked -# ICU_INCLUDE_DIRS - the directories containing the ICU headers -# -# Imported targets:: -# -# ICU:: -# -# Where ```` is the name of an ICU component, for example -# ``ICU::i18n``. -# -# ICU programs are reported in:: -# -# ICU_GENCNVAL_EXECUTABLE - path to gencnval executable -# ICU_ICUINFO_EXECUTABLE - path to icuinfo executable -# ICU_GENBRK_EXECUTABLE - path to genbrk executable -# ICU_ICU-CONFIG_EXECUTABLE - path to icu-config executable -# ICU_GENRB_EXECUTABLE - path to genrb executable -# ICU_GENDICT_EXECUTABLE - path to gendict executable -# ICU_DERB_EXECUTABLE - path to derb executable -# ICU_PKGDATA_EXECUTABLE - path to pkgdata executable -# ICU_UCONV_EXECUTABLE - path to uconv executable -# ICU_GENCFU_EXECUTABLE - path to gencfu executable -# ICU_MAKECONV_EXECUTABLE - path to makeconv executable -# ICU_GENNORM2_EXECUTABLE - path to gennorm2 executable -# ICU_GENCCODE_EXECUTABLE - path to genccode executable -# ICU_GENSPREP_EXECUTABLE - path to gensprep executable -# ICU_ICUPKG_EXECUTABLE - path to icupkg executable -# ICU_GENCMN_EXECUTABLE - path to gencmn executable -# -# ICU component libraries are reported in:: -# -# ICU__FOUND - ON if component was found -# ICU__LIBRARIES - libraries for component -# -# ICU datafiles are reported in:: -# -# ICU_MAKEFILE_INC - Makefile.inc -# ICU_PKGDATA_INC - pkgdata.inc -# -# Note that ```` is the uppercased name of the component. -# -# This module reads hints about search results from:: -# -# ICU_ROOT - the root of the ICU installation -# -# The environment variable ``ICU_ROOT`` may also be used; the -# ICU_ROOT variable takes precedence. -# -# The following cache variables may also be set:: -# -# ICU_

_EXECUTABLE - the path to executable

-# ICU_INCLUDE_DIR - the directory containing the ICU headers -# ICU__LIBRARY - the library for component -# -# .. note:: -# -# In most cases none of the above variables will require setting, -# unless multiple ICU versions are available and a specific version -# is required. -# -# Other variables one may set to control this module are:: -# -# ICU_DEBUG - Set to ON to enable debug output from FindICU. - -# Written by Roger Leigh - -set(icu_programs - gencnval - icuinfo - genbrk - icu-config - genrb - gendict - derb - pkgdata - uconv - gencfu - makeconv - gennorm2 - genccode - gensprep - icupkg - gencmn) - -set(icu_data - Makefile.inc - pkgdata.inc) - -# The ICU checks are contained in a function due to the large number -# of temporary variables needed. -function(_ICU_FIND) - # Set up search paths, taking compiler into account. Search ICU_ROOT, - # with ICU_ROOT in the environment as a fallback if unset. - if(ICU_ROOT) - list(APPEND icu_roots "${ICU_ROOT}") - else() - if(NOT "$ENV{ICU_ROOT}" STREQUAL "") - file(TO_CMAKE_PATH "$ENV{ICU_ROOT}" NATIVE_PATH) - list(APPEND icu_roots "${NATIVE_PATH}") - set(ICU_ROOT "${NATIVE_PATH}" - CACHE PATH "Location of the ICU installation" FORCE) - endif() - endif() - - # Find include directory - list(APPEND icu_include_suffixes "include") - find_path(ICU_INCLUDE_DIR - NAMES "unicode/utypes.h" - HINTS ${icu_roots} - PATH_SUFFIXES ${icu_include_suffixes} - DOC "ICU include directory") - set(ICU_INCLUDE_DIR "${ICU_INCLUDE_DIR}" PARENT_SCOPE) - - # Get version - if(ICU_INCLUDE_DIR AND EXISTS "${ICU_INCLUDE_DIR}/unicode/uvernum.h") - file(STRINGS "${ICU_INCLUDE_DIR}/unicode/uvernum.h" icu_header_str - REGEX "^#define[\t ]+U_ICU_VERSION[\t ]+\".*\".*") - - string(REGEX REPLACE "^#define[\t ]+U_ICU_VERSION[\t ]+\"([^ \\n]*)\".*" - "\\1" icu_version_string "${icu_header_str}") - set(ICU_VERSION "${icu_version_string}") - set(ICU_VERSION "${icu_version_string}" PARENT_SCOPE) - unset(icu_header_str) - unset(icu_version_string) - endif() - - if(CMAKE_SIZEOF_VOID_P EQUAL 8) - # 64-bit binary directory - set(_bin64 "bin64") - # 64-bit library directory - set(_lib64 "lib64") - endif() - - - # Find all ICU programs - list(APPEND icu_binary_suffixes "${_bin64}" "bin") - foreach(program ${icu_programs}) - string(TOUPPER "${program}" program_upcase) - set(cache_var "ICU_${program_upcase}_EXECUTABLE") - set(program_var "ICU_${program_upcase}_EXECUTABLE") - find_program("${cache_var}" "${program}" - HINTS ${icu_roots} - PATH_SUFFIXES ${icu_binary_suffixes} - DOC "ICU ${program} executable") - mark_as_advanced(cache_var) - set("${program_var}" "${${cache_var}}" PARENT_SCOPE) - endforeach() - - # Find all ICU libraries - list(APPEND icu_library_suffixes "${_lib64}" "lib") - set(ICU_REQUIRED_LIBS_FOUND ON) - foreach(component ${ICU_FIND_COMPONENTS}) - string(TOUPPER "${component}" component_upcase) - set(component_cache "ICU_${component_upcase}_LIBRARY") - set(component_cache_release "${component_cache}_RELEASE") - set(component_cache_debug "${component_cache}_DEBUG") - set(component_found "${component_upcase}_FOUND") - set(component_libnames "icu${component}") - set(component_debug_libnames "icu${component}d") - - # Special case deliberate library naming mismatches between Unix - # and Windows builds - unset(component_libnames) - unset(component_debug_libnames) - list(APPEND component_libnames "icu${component}") - list(APPEND component_debug_libnames "icu${component}d") - if(component STREQUAL "data") - list(APPEND component_libnames "icudt") - # Note there is no debug variant at present - list(APPEND component_debug_libnames "icudtd") - endif() - if(component STREQUAL "dt") - list(APPEND component_libnames "icudata") - # Note there is no debug variant at present - list(APPEND component_debug_libnames "icudatad") - endif() - if(component STREQUAL "i18n") - list(APPEND component_libnames "icuin") - list(APPEND component_debug_libnames "icuind") - endif() - if(component STREQUAL "in") - list(APPEND component_libnames "icui18n") - list(APPEND component_debug_libnames "icui18nd") - endif() - - find_library("${component_cache_release}" ${component_libnames} - HINTS ${icu_roots} - PATH_SUFFIXES ${icu_library_suffixes} - DOC "ICU ${component} library (release)") - find_library("${component_cache_debug}" ${component_debug_libnames} - HINTS ${icu_roots} - PATH_SUFFIXES ${icu_library_suffixes} - DOC "ICU ${component} library (debug)") - include(SelectLibraryConfigurations) - select_library_configurations(ICU_${component_upcase}) - mark_as_advanced("${component_cache_release}" "${component_cache_debug}") - if(${component_cache}) - set("${component_found}" ON) - list(APPEND ICU_LIBRARY "${${component_cache}}") - endif() - mark_as_advanced("${component_found}") - set("${component_cache}" "${${component_cache}}" PARENT_SCOPE) - set("${component_found}" "${${component_found}}" PARENT_SCOPE) - if(${component_found}) - if (ICU_FIND_REQUIRED_${component}) - list(APPEND ICU_LIBS_FOUND "${component} (required)") - else() - list(APPEND ICU_LIBS_FOUND "${component} (optional)") - endif() - else() - if (ICU_FIND_REQUIRED_${component}) - set(ICU_REQUIRED_LIBS_FOUND OFF) - list(APPEND ICU_LIBS_NOTFOUND "${component} (required)") - else() - list(APPEND ICU_LIBS_NOTFOUND "${component} (optional)") - endif() - endif() - endforeach() - set(_ICU_REQUIRED_LIBS_FOUND "${ICU_REQUIRED_LIBS_FOUND}" PARENT_SCOPE) - set(ICU_LIBRARY "${ICU_LIBRARY}" PARENT_SCOPE) - - # Find all ICU data files - if(CMAKE_LIBRARY_ARCHITECTURE) - list(APPEND icu_data_suffixes - "${_lib64}/${CMAKE_LIBRARY_ARCHITECTURE}/icu/${ICU_VERSION}" - "lib/${CMAKE_LIBRARY_ARCHITECTURE}/icu/${ICU_VERSION}" - "${_lib64}/${CMAKE_LIBRARY_ARCHITECTURE}/icu" - "lib/${CMAKE_LIBRARY_ARCHITECTURE}/icu") - endif() - list(APPEND icu_data_suffixes - "${_lib64}/icu/${ICU_VERSION}" - "lib/icu/${ICU_VERSION}" - "${_lib64}/icu" - "lib/icu") - foreach(data ${icu_data}) - string(TOUPPER "${data}" data_upcase) - string(REPLACE "." "_" data_upcase "${data_upcase}") - set(cache_var "ICU_${data_upcase}") - set(data_var "ICU_${data_upcase}") - find_file("${cache_var}" "${data}" - HINTS ${icu_roots} - PATH_SUFFIXES ${icu_data_suffixes} - DOC "ICU ${data} data file") - mark_as_advanced(cache_var) - set("${data_var}" "${${cache_var}}" PARENT_SCOPE) - endforeach() - - if(NOT ICU_FIND_QUIETLY) - if(ICU_LIBS_FOUND) - message(STATUS "Found the following ICU libraries:") - foreach(found ${ICU_LIBS_FOUND}) - message(STATUS " ${found}") - endforeach() - endif() - if(ICU_LIBS_NOTFOUND) - message(STATUS "The following ICU libraries were not found:") - foreach(notfound ${ICU_LIBS_NOTFOUND}) - message(STATUS " ${notfound}") - endforeach() - endif() - endif() - - if(ICU_DEBUG) - message(STATUS "--------FindICU.cmake search debug--------") - message(STATUS "ICU binary path search order: ${icu_roots}") - message(STATUS "ICU include path search order: ${icu_roots}") - message(STATUS "ICU library path search order: ${icu_roots}") - message(STATUS "----------------") - endif() -endfunction() - -_ICU_FIND() - -include(FindPackageHandleStandardArgs) -FIND_PACKAGE_HANDLE_STANDARD_ARGS(ICU - FOUND_VAR ICU_FOUND - REQUIRED_VARS ICU_INCLUDE_DIR - ICU_LIBRARY - _ICU_REQUIRED_LIBS_FOUND - VERSION_VAR ICU_VERSION - FAIL_MESSAGE "Failed to find all ICU components") - -unset(_ICU_REQUIRED_LIBS_FOUND) - -if(ICU_FOUND) - set(ICU_INCLUDE_DIRS "${ICU_INCLUDE_DIR}") - set(ICU_LIBRARIES "${ICU_LIBRARY}") - foreach(_ICU_component ${ICU_FIND_COMPONENTS}) - string(TOUPPER "${_ICU_component}" _ICU_component_upcase) - set(_ICU_component_cache "ICU_${_ICU_component_upcase}_LIBRARY") - set(_ICU_component_cache_release "ICU_${_ICU_component_upcase}_LIBRARY_RELEASE") - set(_ICU_component_cache_debug "ICU_${_ICU_component_upcase}_LIBRARY_DEBUG") - set(_ICU_component_lib "ICU_${_ICU_component_upcase}_LIBRARIES") - set(_ICU_component_found "${_ICU_component_upcase}_FOUND") - set(_ICU_imported_target "ICU::${_ICU_component}") - if(${_ICU_component_found}) - set("${_ICU_component_lib}" "${${_ICU_component_cache}}") - if(NOT TARGET ${_ICU_imported_target}) - add_library(${_ICU_imported_target} UNKNOWN IMPORTED) - if(ICU_INCLUDE_DIR) - set_target_properties(${_ICU_imported_target} PROPERTIES - INTERFACE_INCLUDE_DIRECTORIES "${ICU_INCLUDE_DIR}") - endif() - if(EXISTS "${${_ICU_component_cache}}") - set_target_properties(${_ICU_imported_target} PROPERTIES - IMPORTED_LINK_INTERFACE_LANGUAGES "CXX" - IMPORTED_LOCATION "${${_ICU_component_cache}}") - endif() - if(EXISTS "${${_ICU_component_cache_release}}") - set_property(TARGET ${_ICU_imported_target} APPEND PROPERTY - IMPORTED_CONFIGURATIONS RELEASE) - set_target_properties(${_ICU_imported_target} PROPERTIES - IMPORTED_LINK_INTERFACE_LANGUAGES_RELEASE "CXX" - IMPORTED_LOCATION_RELEASE "${${_ICU_component_cache_release}}") - endif() - if(EXISTS "${${_ICU_component_cache_debug}}") - set_property(TARGET ${_ICU_imported_target} APPEND PROPERTY - IMPORTED_CONFIGURATIONS DEBUG) - set_target_properties(${_ICU_imported_target} PROPERTIES - IMPORTED_LINK_INTERFACE_LANGUAGES_DEBUG "CXX" - IMPORTED_LOCATION_DEBUG "${${_ICU_component_cache_debug}}") - endif() - endif() - endif() - unset(_ICU_component_upcase) - unset(_ICU_component_cache) - unset(_ICU_component_lib) - unset(_ICU_component_found) - unset(_ICU_imported_target) - endforeach() -endif() - -if(ICU_DEBUG) - message(STATUS "--------FindICU.cmake results debug--------") - message(STATUS "ICU found: ${ICU_FOUND}") - message(STATUS "ICU_VERSION number: ${ICU_VERSION}") - message(STATUS "ICU_ROOT directory: ${ICU_ROOT}") - message(STATUS "ICU_INCLUDE_DIR directory: ${ICU_INCLUDE_DIR}") - message(STATUS "ICU_LIBRARIES: ${ICU_LIBRARIES}") - - foreach(program IN LISTS icu_programs) - string(TOUPPER "${program}" program_upcase) - set(program_lib "ICU_${program_upcase}_EXECUTABLE") - message(STATUS "${program} program: ${${program_lib}}") - unset(program_upcase) - unset(program_lib) - endforeach() - - foreach(data IN LISTS icu_data) - string(TOUPPER "${data}" data_upcase) - string(REPLACE "." "_" data_upcase "${data_upcase}") - set(data_lib "ICU_${data_upcase}") - message(STATUS "${data} data: ${${data_lib}}") - unset(data_upcase) - unset(data_lib) - endforeach() - - foreach(component IN LISTS ICU_FIND_COMPONENTS) - string(TOUPPER "${component}" component_upcase) - set(component_lib "ICU_${component_upcase}_LIBRARIES") - set(component_found "${component_upcase}_FOUND") - message(STATUS "${component} library found: ${${component_found}}") - message(STATUS "${component} library: ${${component_lib}}") - unset(component_upcase) - unset(component_lib) - unset(component_found) - endforeach() - message(STATUS "----------------") -endif() - -unset(icu_programs) diff --git a/cmake/Modules/FindOpenLDAP.cmake b/cmake/Modules/FindOpenLDAP.cmake deleted file mode 100644 index 9c6262fa245..00000000000 --- a/cmake/Modules/FindOpenLDAP.cmake +++ /dev/null @@ -1,55 +0,0 @@ -# Find OpenLDAP libraries. -# -# Can be configured with: -# OPENLDAP_ROOT_DIR - path to the OpenLDAP installation prefix -# OPENLDAP_USE_STATIC_LIBS - look for static version of the libraries -# OPENLDAP_USE_REENTRANT_LIBS - look for thread-safe version of the libraries -# -# Sets values of: -# OPENLDAP_FOUND - TRUE if found -# OPENLDAP_INCLUDE_DIRS - paths to the include directories -# OPENLDAP_LIBRARIES - paths to the libldap and liblber libraries -# OPENLDAP_LDAP_LIBRARY - paths to the libldap library -# OPENLDAP_LBER_LIBRARY - paths to the liblber library -# - -if(OPENLDAP_USE_STATIC_LIBS) - set(_orig_CMAKE_FIND_LIBRARY_SUFFIXES ${CMAKE_FIND_LIBRARY_SUFFIXES}) - if(WIN32) - set(CMAKE_FIND_LIBRARY_SUFFIXES ".lib" ".a" ${CMAKE_FIND_LIBRARY_SUFFIXES}) - else() - set(CMAKE_FIND_LIBRARY_SUFFIXES ".a") - endif() -endif() - -set(_r_suffix) -if(OPENLDAP_USE_REENTRANT_LIBS) - set(_r_suffix "_r") -endif() - -if(OPENLDAP_ROOT_DIR) - find_path(OPENLDAP_INCLUDE_DIRS NAMES "ldap.h" "lber.h" PATHS "${OPENLDAP_ROOT_DIR}" PATH_SUFFIXES "include" NO_DEFAULT_PATH) - find_library(OPENLDAP_LDAP_LIBRARY NAMES "ldap${_r_suffix}" PATHS "${OPENLDAP_ROOT_DIR}" PATH_SUFFIXES "lib" NO_DEFAULT_PATH) - find_library(OPENLDAP_LBER_LIBRARY NAMES "lber" PATHS "${OPENLDAP_ROOT_DIR}" PATH_SUFFIXES "lib" NO_DEFAULT_PATH) -else() - find_path(OPENLDAP_INCLUDE_DIRS NAMES "ldap.h" "lber.h") - find_library(OPENLDAP_LDAP_LIBRARY NAMES "ldap${_r_suffix}") - find_library(OPENLDAP_LBER_LIBRARY NAMES "lber") -endif() - -unset(_r_suffix) - -set(OPENLDAP_LIBRARIES ${OPENLDAP_LDAP_LIBRARY} ${OPENLDAP_LBER_LIBRARY}) - -include(FindPackageHandleStandardArgs) -find_package_handle_standard_args( - OpenLDAP DEFAULT_MSG - OPENLDAP_INCLUDE_DIRS OPENLDAP_LDAP_LIBRARY OPENLDAP_LBER_LIBRARY -) - -mark_as_advanced(OPENLDAP_INCLUDE_DIRS OPENLDAP_LIBRARIES OPENLDAP_LDAP_LIBRARY OPENLDAP_LBER_LIBRARY) - -if(OPENLDAP_USE_STATIC_LIBS) - set(CMAKE_FIND_LIBRARY_SUFFIXES ${_orig_CMAKE_FIND_LIBRARY_SUFFIXES}) - unset(_orig_CMAKE_FIND_LIBRARY_SUFFIXES) -endif() diff --git a/cmake/Modules/FindParquet.cmake b/cmake/Modules/FindParquet.cmake deleted file mode 100644 index 654020c0b87..00000000000 --- a/cmake/Modules/FindParquet.cmake +++ /dev/null @@ -1,132 +0,0 @@ -# https://github.com/apache/arrow/blob/master/cpp/cmake_modules/FindParquet.cmake - -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -# - Find Parquet (parquet/api/reader.h, libparquet.a, libparquet.so) -# -# This module requires Arrow from which it uses -# arrow_find_package() -# -# This module defines -# PARQUET_FOUND, whether Parquet has been found -# PARQUET_IMPORT_LIB, path to libparquet's import library (Windows only) -# PARQUET_INCLUDE_DIR, directory containing headers -# PARQUET_LIBS, deprecated. Use PARQUET_LIB_DIR instead -# PARQUET_LIB_DIR, directory containing Parquet libraries -# PARQUET_SHARED_IMP_LIB, deprecated. Use PARQUET_IMPORT_LIB instead -# PARQUET_SHARED_LIB, path to libparquet's shared library -# PARQUET_SO_VERSION, shared object version of found Parquet such as "100" -# PARQUET_STATIC_LIB, path to libparquet.a - -if(DEFINED PARQUET_FOUND) - return() -endif() - -set(find_package_arguments) -if(${CMAKE_FIND_PACKAGE_NAME}_FIND_VERSION) - list(APPEND find_package_arguments "${${CMAKE_FIND_PACKAGE_NAME}_FIND_VERSION}") -endif() -if(${CMAKE_FIND_PACKAGE_NAME}_FIND_REQUIRED) - list(APPEND find_package_arguments REQUIRED) -endif() -if(${CMAKE_FIND_PACKAGE_NAME}_FIND_QUIETLY) - list(APPEND find_package_arguments QUIET) -endif() -find_package(Arrow ${find_package_arguments}) - -if(NOT "$ENV{PARQUET_HOME}" STREQUAL "") - file(TO_CMAKE_PATH "$ENV{PARQUET_HOME}" PARQUET_HOME) -endif() - -if((NOT PARQUET_HOME) AND ARROW_HOME) - set(PARQUET_HOME ${ARROW_HOME}) -endif() - -if(ARROW_FOUND) - arrow_find_package(PARQUET - "${PARQUET_HOME}" - parquet - parquet/api/reader.h - Parquet - parquet) - if(PARQUET_HOME) - if(PARQUET_INCLUDE_DIR) - file(READ "${PARQUET_INCLUDE_DIR}/parquet/parquet_version.h" - PARQUET_VERSION_H_CONTENT) - arrow_extract_macro_value(PARQUET_VERSION_MAJOR "PARQUET_VERSION_MAJOR" - "${PARQUET_VERSION_H_CONTENT}") - arrow_extract_macro_value(PARQUET_VERSION_MINOR "PARQUET_VERSION_MINOR" - "${PARQUET_VERSION_H_CONTENT}") - arrow_extract_macro_value(PARQUET_VERSION_PATCH "PARQUET_VERSION_PATCH" - "${PARQUET_VERSION_H_CONTENT}") - if("${PARQUET_VERSION_MAJOR}" STREQUAL "" - OR "${PARQUET_VERSION_MINOR}" STREQUAL "" - OR "${PARQUET_VERSION_PATCH}" STREQUAL "") - set(PARQUET_VERSION "0.0.0") - else() - set(PARQUET_VERSION - "${PARQUET_VERSION_MAJOR}.${PARQUET_VERSION_MINOR}.${PARQUET_VERSION_PATCH}") - endif() - - arrow_extract_macro_value(PARQUET_SO_VERSION_QUOTED "PARQUET_SO_VERSION" - "${PARQUET_VERSION_H_CONTENT}") - string(REGEX - REPLACE "^\"(.+)\"$" "\\1" PARQUET_SO_VERSION "${PARQUET_SO_VERSION_QUOTED}") - arrow_extract_macro_value(PARQUET_FULL_SO_VERSION_QUOTED "PARQUET_FULL_SO_VERSION" - "${PARQUET_VERSION_H_CONTENT}") - string(REGEX - REPLACE "^\"(.+)\"$" "\\1" PARQUET_FULL_SO_VERSION - "${PARQUET_FULL_SO_VERSION_QUOTED}") - endif() - else() - if(PARQUET_USE_CMAKE_PACKAGE_CONFIG) - find_package(Parquet CONFIG) - elseif(PARQUET_USE_PKG_CONFIG) - pkg_get_variable(PARQUET_SO_VERSION parquet so_version) - pkg_get_variable(PARQUET_FULL_SO_VERSION parquet full_so_version) - endif() - endif() - set(PARQUET_ABI_VERSION "${PARQUET_SO_VERSION}") -endif() - -mark_as_advanced(PARQUET_ABI_VERSION - PARQUET_IMPORT_LIB - PARQUET_INCLUDE_DIR - PARQUET_LIBS - PARQUET_LIB_DIR - PARQUET_SHARED_IMP_LIB - PARQUET_SHARED_LIB - PARQUET_SO_VERSION - PARQUET_STATIC_LIB - PARQUET_VERSION) - -find_package_handle_standard_args(Parquet - REQUIRED_VARS - PARQUET_INCLUDE_DIR - PARQUET_LIB_DIR - PARQUET_SO_VERSION - VERSION_VAR - PARQUET_VERSION) -set(PARQUET_FOUND ${Parquet_FOUND}) - -if(Parquet_FOUND AND NOT Parquet_FIND_QUIETLY) - message(STATUS "Parquet version: ${PARQUET_VERSION} (${PARQUET_FIND_APPROACH})") - message(STATUS "Found the Parquet shared library: ${PARQUET_SHARED_LIB}") - message(STATUS "Found the Parquet import library: ${PARQUET_IMPORT_LIB}") - message(STATUS "Found the Parquet static library: ${PARQUET_STATIC_LIB}") -endif() diff --git a/cmake/Modules/Findcityhash.cmake b/cmake/Modules/Findcityhash.cmake deleted file mode 100644 index 5250df2e0a6..00000000000 --- a/cmake/Modules/Findcityhash.cmake +++ /dev/null @@ -1,44 +0,0 @@ -# - Try to find cityhash headers and libraries. -# -# Usage of this module as follows: -# -# find_package(cityhash) -# -# Variables used by this module, they can change the default behaviour and need -# to be set before calling find_package: -# -# CITYHASH_ROOT_DIR Set this variable to the root installation of -# cityhash if the module has problems finding -# the proper installation path. -# -# Variables defined by this module: -# -# CITYHASH_FOUND System has cityhash libs/headers -# CITYHASH_LIBRARIES The cityhash library/libraries -# CITYHASH_INCLUDE_DIR The location of cityhash headers - -find_path(CITYHASH_ROOT_DIR - NAMES include/city.h -) - -find_library(CITYHASH_LIBRARIES - NAMES cityhash - PATHS ${CITYHASH_ROOT_DIR}/lib ${CITYHASH_LIBRARIES_PATHS} -) - -find_path(CITYHASH_INCLUDE_DIR - NAMES city.h - PATHS ${CITYHASH_ROOT_DIR}/include ${CITYHASH_INCLUDE_PATHS} -) - -include(FindPackageHandleStandardArgs) -find_package_handle_standard_args(cityhash DEFAULT_MSG - CITYHASH_LIBRARIES - CITYHASH_INCLUDE_DIR -) - -mark_as_advanced( - CITYHASH_ROOT_DIR - CITYHASH_LIBRARIES - CITYHASH_INCLUDE_DIR -) diff --git a/cmake/Modules/Finddouble-conversion.cmake b/cmake/Modules/Finddouble-conversion.cmake deleted file mode 100644 index cb01be0f25b..00000000000 --- a/cmake/Modules/Finddouble-conversion.cmake +++ /dev/null @@ -1,44 +0,0 @@ -# - Try to find double-conversion headers and libraries. -# -# Usage of this module as follows: -# -# find_package(double-conversion) -# -# Variables used by this module, they can change the default behaviour and need -# to be set before calling find_package: -# -# DOUBLE_CONVERSION_ROOT_DIR Set this variable to the root installation of -# double-conversion if the module has problems finding -# the proper installation path. -# -# Variables defined by this module: -# -# DOUBLE_CONVERSION_FOUND System has double-conversion libs/headers -# DOUBLE_CONVERSION_LIBRARIES The double-conversion library/libraries -# DOUBLE_CONVERSION_INCLUDE_DIR The location of double-conversion headers - -find_path(DOUBLE_CONVERSION_ROOT_DIR - NAMES include/double-conversion/double-conversion.h -) - -find_library(DOUBLE_CONVERSION_LIBRARIES - NAMES double-conversion - PATHS ${DOUBLE_CONVERSION_ROOT_DIR}/lib ${BTRIE_CITYHASH_PATHS} -) - -find_path(DOUBLE_CONVERSION_INCLUDE_DIR - NAMES double-conversion/double-conversion.h - PATHS ${DOUBLE_CONVERSION_ROOT_DIR}/include ${DOUBLE_CONVERSION_INCLUDE_PATHS} -) - -include(FindPackageHandleStandardArgs) -find_package_handle_standard_args(double_conversion DEFAULT_MSG - DOUBLE_CONVERSION_LIBRARIES - DOUBLE_CONVERSION_INCLUDE_DIR -) - -mark_as_advanced( - DOUBLE_CONVERSION_ROOT_DIR - DOUBLE_CONVERSION_LIBRARIES - DOUBLE_CONVERSION_INCLUDE_DIR -) diff --git a/cmake/Modules/Findfarmhash.cmake b/cmake/Modules/Findfarmhash.cmake deleted file mode 100644 index 2b45fde2c67..00000000000 --- a/cmake/Modules/Findfarmhash.cmake +++ /dev/null @@ -1,44 +0,0 @@ -# - Try to find farmhash headers and libraries. -# -# Usage of this module as follows: -# -# find_package(farmhash) -# -# Variables used by this module, they can change the default behaviour and need -# to be set before calling find_package: -# -# FARMHASH_ROOT_DIR Set this variable to the root installation of -# farmhash if the module has problems finding -# the proper installation path. -# -# Variables defined by this module: -# -# FARMHASH_FOUND System has farmhash libs/headers -# FARMHASH_LIBRARIES The farmhash library/libraries -# FARMHASH_INCLUDE_DIR The location of farmhash headers - -find_path(FARMHASH_ROOT_DIR - NAMES include/farmhash.h -) - -find_library(FARMHASH_LIBRARIES - NAMES farmhash - PATHS ${FARMHASH_ROOT_DIR}/lib ${FARMHASH_LIBRARIES_PATHS} -) - -find_path(FARMHASH_INCLUDE_DIR - NAMES farmhash.h - PATHS ${FARMHASH_ROOT_DIR}/include ${FARMHASH_INCLUDE_PATHS} -) - -include(FindPackageHandleStandardArgs) -find_package_handle_standard_args(farmhash DEFAULT_MSG - FARMHASH_LIBRARIES - FARMHASH_INCLUDE_DIR -) - -mark_as_advanced( - FARMHASH_ROOT_DIR - FARMHASH_LIBRARIES - FARMHASH_INCLUDE_DIR -) diff --git a/cmake/Modules/FindgRPC.cmake b/cmake/Modules/FindgRPC.cmake deleted file mode 100644 index 945d307952b..00000000000 --- a/cmake/Modules/FindgRPC.cmake +++ /dev/null @@ -1,337 +0,0 @@ -#[[ -Defines the following variables: -``gRPC_FOUND`` - Whether the gRPC framework is found -``gRPC_INCLUDE_DIRS`` - The include directories of the gRPC framework, including the include directories of the C++ wrapper. -``gRPC_LIBRARIES`` - The libraries of the gRPC framework. -``gRPC_CPP_PLUGIN`` - The plugin for generating gRPC client and server C++ stubs from `.proto` files -``gRPC_PYTHON_PLUGIN`` - The plugin for generating gRPC client and server Python stubs from `.proto` files - -The following :prop_tgt:`IMPORTED` targets are also defined: -``grpc++`` -``grpc++_unsecure`` -``grpc_cpp_plugin`` -``grpc_python_plugin`` - -Set the following variables to adjust the behaviour of this script: -``gRPC_USE_UNSECURE_LIBRARIES`` - if set gRPC_LIBRARIES will be filled with the unsecure version of the libraries (i.e. without SSL) - instead of the secure ones. -``gRPC_DEBUG` - if set the debug message will be printed. - -Add custom commands to process ``.proto`` files to C++:: -protobuf_generate_grpc_cpp( - [DESCRIPTORS ] [EXPORT_MACRO ] [...]) - -``SRCS`` - Variable to define with autogenerated source files -``HDRS`` - Variable to define with autogenerated header files -``DESCRIPTORS`` - Variable to define with autogenerated descriptor files, if requested. -``EXPORT_MACRO`` - is a macro which should expand to ``__declspec(dllexport)`` or - ``__declspec(dllimport)`` depending on what is being compiled. -``ARGN`` - ``.proto`` files -#]] - -# Function to generate C++ files from .proto files. -# This function is a modified version of the function PROTOBUF_GENERATE_CPP() copied from https://github.com/Kitware/CMake/blob/master/Modules/FindProtobuf.cmake. -function(PROTOBUF_GENERATE_GRPC_CPP SRCS HDRS) - cmake_parse_arguments(protobuf_generate_grpc_cpp "" "EXPORT_MACRO;DESCRIPTORS" "" ${ARGN}) - - set(_proto_files "${protobuf_generate_grpc_cpp_UNPARSED_ARGUMENTS}") - if(NOT _proto_files) - message(SEND_ERROR "Error: PROTOBUF_GENERATE_GRPC_CPP() called without any proto files") - return() - endif() - - if(PROTOBUF_GENERATE_GRPC_CPP_APPEND_PATH) - set(_append_arg APPEND_PATH) - endif() - - if(protobuf_generate_grpc_cpp_DESCRIPTORS) - set(_descriptors DESCRIPTORS) - endif() - - if(DEFINED PROTOBUF_IMPORT_DIRS AND NOT DEFINED Protobuf_IMPORT_DIRS) - set(Protobuf_IMPORT_DIRS "${PROTOBUF_IMPORT_DIRS}") - endif() - - if(DEFINED Protobuf_IMPORT_DIRS) - set(_import_arg IMPORT_DIRS ${Protobuf_IMPORT_DIRS}) - endif() - - set(_outvar) - protobuf_generate_grpc(${_append_arg} ${_descriptors} LANGUAGE cpp EXPORT_MACRO ${protobuf_generate_cpp_EXPORT_MACRO} OUT_VAR _outvar ${_import_arg} PROTOS ${_proto_files}) - - set(${SRCS}) - set(${HDRS}) - if(protobuf_generate_grpc_cpp_DESCRIPTORS) - set(${protobuf_generate_grpc_cpp_DESCRIPTORS}) - endif() - - foreach(_file ${_outvar}) - if(_file MATCHES "cc$") - list(APPEND ${SRCS} ${_file}) - elseif(_file MATCHES "desc$") - list(APPEND ${protobuf_generate_grpc_cpp_DESCRIPTORS} ${_file}) - else() - list(APPEND ${HDRS} ${_file}) - endif() - endforeach() - set(${SRCS} ${${SRCS}} PARENT_SCOPE) - set(${HDRS} ${${HDRS}} PARENT_SCOPE) - if(protobuf_generate_grpc_cpp_DESCRIPTORS) - set(${protobuf_generate_grpc_cpp_DESCRIPTORS} "${${protobuf_generate_grpc_cpp_DESCRIPTORS}}" PARENT_SCOPE) - endif() -endfunction() - -# Helper function. -# This function is a modified version of the function protobuf_generate() copied from https://github.com/Kitware/CMake/blob/master/Modules/FindProtobuf.cmake. -function(protobuf_generate_grpc) - set(_options APPEND_PATH DESCRIPTORS) - set(_singleargs LANGUAGE OUT_VAR EXPORT_MACRO PROTOC_OUT_DIR) - if(COMMAND target_sources) - list(APPEND _singleargs TARGET) - endif() - set(_multiargs PROTOS IMPORT_DIRS GENERATE_EXTENSIONS) - - cmake_parse_arguments(protobuf_generate_grpc "${_options}" "${_singleargs}" "${_multiargs}" "${ARGN}") - - if(NOT protobuf_generate_grpc_PROTOS AND NOT protobuf_generate_grpc_TARGET) - message(SEND_ERROR "Error: protobuf_generate_grpc called without any targets or source files") - return() - endif() - - if(NOT protobuf_generate_grpc_OUT_VAR AND NOT protobuf_generate_grpc_TARGET) - message(SEND_ERROR "Error: protobuf_generate_grpc called without a target or output variable") - return() - endif() - - if(NOT protobuf_generate_grpc_LANGUAGE) - set(protobuf_generate_grpc_LANGUAGE cpp) - endif() - string(TOLOWER ${protobuf_generate_grpc_LANGUAGE} protobuf_generate_grpc_LANGUAGE) - - if(NOT protobuf_generate_grpc_PROTOC_OUT_DIR) - set(protobuf_generate_grpc_PROTOC_OUT_DIR ${CMAKE_CURRENT_BINARY_DIR}) - endif() - - if(protobuf_generate_grpc_EXPORT_MACRO AND protobuf_generate_grpc_LANGUAGE STREQUAL cpp) - set(_dll_export_decl "dllexport_decl=${protobuf_generate_grpc_EXPORT_MACRO}:") - endif() - - if(NOT protobuf_generate_grpc_GENERATE_EXTENSIONS) - if(protobuf_generate_grpc_LANGUAGE STREQUAL cpp) - set(protobuf_generate_grpc_GENERATE_EXTENSIONS .pb.h .pb.cc .grpc.pb.h .grpc.pb.cc) - elseif(protobuf_generate_grpc_LANGUAGE STREQUAL python) - set(protobuf_generate_grpc_GENERATE_EXTENSIONS _pb2.py) - else() - message(SEND_ERROR "Error: protobuf_generate_grpc given unknown Language ${LANGUAGE}, please provide a value for GENERATE_EXTENSIONS") - return() - endif() - endif() - - if(NOT protobuf_generate_grpc_PLUGIN) - if(protobuf_generate_grpc_LANGUAGE STREQUAL cpp) - set(protobuf_generate_grpc_PLUGIN "grpc_cpp_plugin") - elseif(protobuf_generate_grpc_LANGUAGE STREQUAL python) - set(protobuf_generate_grpc_PLUGIN "grpc_python_plugin") - else() - message(SEND_ERROR "Error: protobuf_generate_grpc given unknown Language ${LANGUAGE}, please provide a value for PLUGIN") - return() - endif() - endif() - - if(protobuf_generate_grpc_TARGET) - get_target_property(_source_list ${protobuf_generate_grpc_TARGET} SOURCES) - foreach(_file ${_source_list}) - if(_file MATCHES "proto$") - list(APPEND protobuf_generate_grpc_PROTOS ${_file}) - endif() - endforeach() - endif() - - if(NOT protobuf_generate_grpc_PROTOS) - message(SEND_ERROR "Error: protobuf_generate_grpc could not find any .proto files") - return() - endif() - - if(protobuf_generate_grpc_APPEND_PATH) - # Create an include path for each file specified - foreach(_file ${protobuf_generate_grpc_PROTOS}) - get_filename_component(_abs_file ${_file} ABSOLUTE) - get_filename_component(_abs_path ${_abs_file} PATH) - list(FIND _protobuf_include_path ${_abs_path} _contains_already) - if(${_contains_already} EQUAL -1) - list(APPEND _protobuf_include_path -I ${_abs_path}) - endif() - endforeach() - else() - set(_protobuf_include_path -I ${CMAKE_CURRENT_SOURCE_DIR}) - endif() - - foreach(DIR ${protobuf_generate_grpc_IMPORT_DIRS}) - get_filename_component(ABS_PATH ${DIR} ABSOLUTE) - list(FIND _protobuf_include_path ${ABS_PATH} _contains_already) - if(${_contains_already} EQUAL -1) - list(APPEND _protobuf_include_path -I ${ABS_PATH}) - endif() - endforeach() - - set(_generated_srcs_all) - foreach(_proto ${protobuf_generate_grpc_PROTOS}) - get_filename_component(_abs_file ${_proto} ABSOLUTE) - get_filename_component(_abs_dir ${_abs_file} DIRECTORY) - get_filename_component(_basename ${_proto} NAME_WE) - file(RELATIVE_PATH _rel_dir ${CMAKE_CURRENT_SOURCE_DIR} ${_abs_dir}) - - set(_possible_rel_dir) - if(NOT protobuf_generate_grpc_APPEND_PATH) - set(_possible_rel_dir ${_rel_dir}/) - endif() - - set(_generated_srcs) - foreach(_ext ${protobuf_generate_grpc_GENERATE_EXTENSIONS}) - list(APPEND _generated_srcs "${protobuf_generate_grpc_PROTOC_OUT_DIR}/${_possible_rel_dir}${_basename}${_ext}") - endforeach() - - if(protobuf_generate_grpc_DESCRIPTORS AND protobuf_generate_grpc_LANGUAGE STREQUAL cpp) - set(_descriptor_file "${CMAKE_CURRENT_BINARY_DIR}/${_basename}.desc") - set(_dll_desc_out "--descriptor_set_out=${_descriptor_file}") - list(APPEND _generated_srcs ${_descriptor_file}) - endif() - list(APPEND _generated_srcs_all ${_generated_srcs}) - - add_custom_command( - OUTPUT ${_generated_srcs} - COMMAND protobuf::protoc - ARGS --${protobuf_generate_grpc_LANGUAGE}_out ${_dll_export_decl}${protobuf_generate_grpc_PROTOC_OUT_DIR} - --grpc_out ${_dll_export_decl}${protobuf_generate_grpc_PROTOC_OUT_DIR} - --plugin=protoc-gen-grpc=$ - ${_dll_desc_out} ${_protobuf_include_path} ${_abs_file} - DEPENDS ${_abs_file} protobuf::protoc ${protobuf_generate_grpc_PLUGIN} - COMMENT "Running ${protobuf_generate_grpc_LANGUAGE} protocol buffer compiler on ${_proto}" - VERBATIM) - endforeach() - - set_source_files_properties(${_generated_srcs_all} PROPERTIES GENERATED TRUE) - if(protobuf_generate_grpc_OUT_VAR) - set(${protobuf_generate_grpc_OUT_VAR} ${_generated_srcs_all} PARENT_SCOPE) - endif() - if(protobuf_generate_grpc_TARGET) - target_sources(${protobuf_generate_grpc_TARGET} PRIVATE ${_generated_srcs_all}) - endif() -endfunction() - - -# Find the libraries. -if(gRPC_USE_STATIC_LIBS) - # Support preference of static libs by adjusting CMAKE_FIND_LIBRARY_SUFFIXES - set(_gRPC_ORIG_CMAKE_FIND_LIBRARY_SUFFIXES ${CMAKE_FIND_LIBRARY_SUFFIXES}) - if(WIN32) - set(CMAKE_FIND_LIBRARY_SUFFIXES .lib .a ${CMAKE_FIND_LIBRARY_SUFFIXES}) - else() - set(CMAKE_FIND_LIBRARY_SUFFIXES .a) - endif() -endif() - -find_library(gRPC_LIBRARY NAMES grpc) -find_library(gRPC_CPP_LIBRARY NAMES grpc++) -find_library(gRPC_UNSECURE_LIBRARY NAMES grpc_unsecure) -find_library(gRPC_CPP_UNSECURE_LIBRARY NAMES grpc++_unsecure) -find_library(gRPC_CARES_LIBRARY NAMES cares) - -set(gRPC_LIBRARIES) -if(gRPC_USE_UNSECURE_LIBRARIES) - if(gRPC_UNSECURE_LIBRARY) - set(gRPC_LIBRARIES ${gRPC_LIBRARIES} ${gRPC_UNSECURE_LIBRARY}) - endif() - if(gRPC_CPP_UNSECURE_LIBRARY) - set(gRPC_LIBRARIES ${gRPC_LIBRARIES} ${gRPC_CPP_UNSECURE_LIBRARY}) - endif() -else() - if(gRPC_LIBRARY) - set(gRPC_LIBRARIES ${gRPC_LIBRARIES} ${gRPC_LIBRARY}) - endif() - if(gRPC_CPP_UNSECURE_LIBRARY) - set(gRPC_LIBRARIES ${gRPC_LIBRARIES} ${gRPC_CPP_LIBRARY}) - endif() -endif() -set(gRPC_LIBRARIES ${gRPC_LIBRARIES} ${gRPC_CARES_LIBRARY}) - -# Restore the original find library ordering. -if(gRPC_USE_STATIC_LIBS) - set(CMAKE_FIND_LIBRARY_SUFFIXES ${_gRPC_ORIG_CMAKE_FIND_LIBRARY_SUFFIXES}) -endif() - -# Find the include directories. -find_path(gRPC_INCLUDE_DIR grpc/grpc.h) -find_path(gRPC_CPP_INCLUDE_DIR grpc++/grpc++.h) - -if(gRPC_INCLUDE_DIR AND gRPC_CPP_INCLUDE_DIR AND NOT(gRPC_INCLUDE_DIR STREQUAL gRPC_CPP_INCLUDE_DIR)) - set(gRPC_INCLUDE_DIRS ${gRPC_INCLUDE_DIR} ${gRPC_CPP_INCLUDE_DIR}) -elseif(gRPC_INCLUDE_DIR) - set(gRPC_INCLUDE_DIRS ${gRPC_INCLUDE_DIR}) -else() - set(gRPC_INCLUDE_DIRS ${gRPC_CPP_INCLUDE_DIR}) -endif() - -# Get full path to plugin. -find_program(gRPC_CPP_PLUGIN - NAMES grpc_cpp_plugin - DOC "The plugin for generating gRPC client and server C++ stubs from `.proto` files") - -find_program(gRPC_PYTHON_PLUGIN - NAMES grpc_python_plugin - DOC "The plugin for generating gRPC client and server Python stubs from `.proto` files") - -# Add imported targets. -if(gRPC_CPP_LIBRARY AND NOT TARGET grpc++) - add_library(grpc++ UNKNOWN IMPORTED) - set_target_properties(grpc++ PROPERTIES - IMPORTED_LOCATION "${gRPC_CPP_LIBRARY}") - set_target_properties(grpc++ PROPERTIES - INTERFACE_INCLUDE_DIRECTORIES ${gRPC_INCLUDE_DIRS}) -endif() - -if(gRPC_CPP_UNSECURE_LIBRARY AND NOT TARGET grpc++_unsecure) - add_library(grpc++_unsecure UNKNOWN IMPORTED) - set_target_properties(grpc++_unsecure PROPERTIES - IMPORTED_LOCATION "${gRPC_CPP_UNSECURE_LIBRARY}") - set_target_properties(grpc++_unsecure PROPERTIES - INTERFACE_INCLUDE_DIRECTORIES ${gRPC_INCLUDE_DIRS}) -endif() - -if(gRPC_CPP_PLUGIN AND NOT TARGET grpc_cpp_plugin) - add_executable(grpc_cpp_plugin IMPORTED) - set_target_properties(grpc_cpp_plugin PROPERTIES - IMPORTED_LOCATION "${gRPC_CPP_PLUGIN}") -endif() - -if(gRPC_PYTHON_PLUGIN AND NOT TARGET grpc_python_plugin) - add_executable(grpc_python_plugin IMPORTED) - set_target_properties(grpc_python_plugin PROPERTIES - IMPORTED_LOCATION "${gRPC_PYTHON_PLUGIN}") -endif() - -#include(FindPackageHandleStandardArgs.cmake) -FIND_PACKAGE_HANDLE_STANDARD_ARGS(gRPC - REQUIRED_VARS gRPC_LIBRARY gRPC_CPP_LIBRARY gRPC_UNSECURE_LIBRARY gRPC_CPP_UNSECURE_LIBRARY gRPC_CARES_LIBRARY - gRPC_INCLUDE_DIR gRPC_CPP_INCLUDE_DIR gRPC_CPP_PLUGIN gRPC_PYTHON_PLUGIN) - -if(gRPC_FOUND) - if(gRPC_DEBUG) - message(STATUS "gRPC: INCLUDE_DIRS=${gRPC_INCLUDE_DIRS}") - message(STATUS "gRPC: LIBRARIES=${gRPC_LIBRARIES}") - message(STATUS "gRPC: CPP_PLUGIN=${gRPC_CPP_PLUGIN}") - message(STATUS "gRPC: PYTHON_PLUGIN=${gRPC_PYTHON_PLUGIN}") - endif() -endif() diff --git a/cmake/contrib_finder.cmake b/cmake/contrib_finder.cmake deleted file mode 100644 index e97fda6a6f3..00000000000 --- a/cmake/contrib_finder.cmake +++ /dev/null @@ -1,23 +0,0 @@ -macro(find_contrib_lib LIB_NAME) - - string(TOLOWER ${LIB_NAME} LIB_NAME_LC) - string(TOUPPER ${LIB_NAME} LIB_NAME_UC) - string(REPLACE "-" "_" LIB_NAME_UC ${LIB_NAME_UC}) - - option (USE_INTERNAL_${LIB_NAME_UC}_LIBRARY "Use bundled library ${LIB_NAME} instead of system" ON) - - if (NOT USE_INTERNAL_${LIB_NAME_UC}_LIBRARY) - find_package ("${LIB_NAME}") - if (NOT ${LIB_NAME_UC}_FOUND) - message (${RECONFIGURE_MESSAGE_LEVEL} "Cannot use system ${LIB_NAME}") - endif() - endif () - - if (NOT ${LIB_NAME_UC}_FOUND) - set (USE_INTERNAL_${LIB_NAME_UC}_LIBRARY 1) - set (${LIB_NAME_UC}_LIBRARIES ${LIB_NAME_LC}) - set (${LIB_NAME_UC}_INCLUDE_DIR ${${LIB_NAME_UC}_CONTRIB_INCLUDE_DIR}) - endif () - - message (STATUS "Using ${LIB_NAME}: ${${LIB_NAME_UC}_INCLUDE_DIR} : ${${LIB_NAME_UC}_LIBRARIES}") -endmacro() diff --git a/cmake/cpu_features.cmake b/cmake/cpu_features.cmake index 20c61ead3d2..535d1b3c93e 100644 --- a/cmake/cpu_features.cmake +++ b/cmake/cpu_features.cmake @@ -134,7 +134,7 @@ else () set (COMPILER_FLAGS "${COMPILER_FLAGS} ${TEST_FLAG}") endif () - set (TEST_FLAG "-mavx512f -mavx512bw") + set (TEST_FLAG "-mavx512f -mavx512bw -mavx512vl") set (CMAKE_REQUIRED_FLAGS "${TEST_FLAG} -O0") check_cxx_source_compiles(" #include @@ -143,6 +143,8 @@ else () (void)a; auto b = _mm512_add_epi16(__m512i(), __m512i()); (void)b; + auto c = _mm_cmp_epi8_mask(__m128i(), __m128i(), 0); + (void)c; return 0; } " HAVE_AVX512) @@ -181,7 +183,7 @@ else () set (X86_INTRINSICS_FLAGS "${X86_INTRINSICS_FLAGS} -mbmi") endif () if (HAVE_AVX512) - set (X86_INTRINSICS_FLAGS "${X86_INTRINSICS_FLAGS} -mavx512f -mavx512bw -mprefer-vector-width=256") + set (X86_INTRINSICS_FLAGS "${X86_INTRINSICS_FLAGS} -mavx512f -mavx512bw -mavx512vl -mprefer-vector-width=256") endif () endif () endif () diff --git a/cmake/find/amqpcpp.cmake b/cmake/find/amqpcpp.cmake deleted file mode 100644 index e033bea439f..00000000000 --- a/cmake/find/amqpcpp.cmake +++ /dev/null @@ -1,29 +0,0 @@ -if (MISSING_INTERNAL_LIBUV_LIBRARY) - message (WARNING "Can't find internal libuv needed for AMQP-CPP library") - set (ENABLE_AMQPCPP OFF CACHE INTERNAL "") -endif() - -option(ENABLE_AMQPCPP "Enalbe AMQP-CPP" ${ENABLE_LIBRARIES}) - -if (NOT ENABLE_AMQPCPP) - return() -endif() - -if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/AMQP-CPP/src") - message (WARNING "submodule contrib/AMQP-CPP is missing. to fix try run: \n git submodule update --init") - message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal AMQP-CPP library") - set (USE_AMQPCPP 0) - return() -endif () - -set (USE_AMQPCPP 1) -set (AMQPCPP_LIBRARY amqp-cpp ${OPENSSL_LIBRARIES}) - -set (AMQPCPP_INCLUDE_DIR "${ClickHouse_SOURCE_DIR}/contrib/AMQP-CPP/include") -list (APPEND AMQPCPP_INCLUDE_DIR - "${LIBUV_INCLUDE_DIR}" - "${ClickHouse_SOURCE_DIR}/contrib/AMQP-CPP") - -list (APPEND AMQPCPP_LIBRARY "${LIBUV_LIBRARY}") - -message (STATUS "Using AMQP-CPP=${USE_AMQPCPP}: ${AMQPCPP_INCLUDE_DIR} : ${AMQPCPP_LIBRARY}") diff --git a/cmake/find/avro.cmake b/cmake/find/avro.cmake deleted file mode 100644 index a70fb92c122..00000000000 --- a/cmake/find/avro.cmake +++ /dev/null @@ -1,35 +0,0 @@ -# Needed when using Apache Avro serialization format -option (ENABLE_AVRO "Enable Avro" ${ENABLE_LIBRARIES}) - -if (NOT ENABLE_AVRO) - if (USE_INTERNAL_AVRO_LIBRARY) - message (${RECONFIGURE_MESSAGE_LEVEL} "Can't use internal avro library with ENABLE_AVRO=OFF") - endif() - return() -endif() - -option (USE_INTERNAL_AVRO_LIBRARY "Set to FALSE to use system avro library instead of bundled" ON) - -if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/avro/lang") - if (USE_INTERNAL_AVRO_LIBRARY) - message(WARNING "submodule contrib/avro is missing. to fix try run: \n git submodule update --init") - message (${RECONFIGURE_MESSAGE_LEVEL} "Cannot find internal avro") - set(USE_INTERNAL_AVRO_LIBRARY 0) - endif() - set(MISSING_INTERNAL_AVRO_LIBRARY 1) -endif() - -if (NOT USE_INTERNAL_AVRO_LIBRARY) - message (${RECONFIGURE_MESSAGE_LEVEL} "Using system avro library is not supported yet") -elseif(NOT MISSING_INTERNAL_AVRO_LIBRARY) - include(cmake/find/snappy.cmake) - set(AVROCPP_INCLUDE_DIR "${ClickHouse_SOURCE_DIR}/contrib/avro/lang/c++/include") - set(AVROCPP_LIBRARY avrocpp) - set(USE_INTERNAL_AVRO_LIBRARY 1) -endif () - -if (AVROCPP_LIBRARY AND AVROCPP_INCLUDE_DIR) - set(USE_AVRO 1) -endif() - -message (STATUS "Using avro=${USE_AVRO}: ${AVROCPP_INCLUDE_DIR} : ${AVROCPP_LIBRARY}") diff --git a/cmake/find/base64.cmake b/cmake/find/base64.cmake deleted file mode 100644 index ee12fbb11ba..00000000000 --- a/cmake/find/base64.cmake +++ /dev/null @@ -1,25 +0,0 @@ -if(ARCH_AMD64 OR ARCH_ARM) - option (ENABLE_BASE64 "Enable base64" ${ENABLE_LIBRARIES}) -elseif(ENABLE_BASE64) - message (${RECONFIGURE_MESSAGE_LEVEL} "base64 library is only supported on x86_64 and aarch64") -endif() - -if (NOT ENABLE_BASE64) - return() -endif() - -if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/base64/LICENSE") - set (MISSING_INTERNAL_BASE64_LIBRARY 1) - message (WARNING "submodule contrib/base64 is missing. to fix try run: \n git submodule update --init") -endif () - -if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/base64") - message (WARNING "submodule contrib/base64 is missing. to fix try run: \n git submodule update --init") -else() - set (BASE64_LIBRARY base64) - set (USE_BASE64 1) -endif() - -if (NOT USE_BASE64) - message (${RECONFIGURE_MESSAGE_LEVEL} "Cannot enable base64") -endif() diff --git a/cmake/find/blob_storage.cmake b/cmake/find/blob_storage.cmake deleted file mode 100644 index 4ad7296e95e..00000000000 --- a/cmake/find/blob_storage.cmake +++ /dev/null @@ -1,29 +0,0 @@ -option (ENABLE_AZURE_BLOB_STORAGE "Enable Azure blob storage" ${ENABLE_LIBRARIES}) - -if (ENABLE_AZURE_BLOB_STORAGE) - option(USE_INTERNAL_AZURE_BLOB_STORAGE_LIBRARY - "Set to FALSE to use system Azure SDK instead of bundled (OFF currently not implemented)" - ON) - - set(USE_AZURE_BLOB_STORAGE 1) - set(AZURE_BLOB_STORAGE_LIBRARY azure_sdk) - - if ((NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/azure/sdk" - OR NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/azure/cmake-modules") - AND USE_INTERNAL_AZURE_BLOB_STORAGE_LIBRARY) - message (WARNING "submodule contrib/azure is missing. to fix try run: \n git submodule update --init") - set(USE_INTERNAL_AZURE_BLOB_STORAGE_LIBRARY OFF) - set(USE_AZURE_BLOB_STORAGE 0) - endif () - - if (NOT USE_INTERNAL_SSL_LIBRARY AND USE_INTERNAL_AZURE_BLOB_STORAGE_LIBRARY) - message (FATAL_ERROR "Currently Blob Storage support can be built only with internal SSL library") - endif() - - if (NOT USE_INTERNAL_CURL AND USE_INTERNAL_AZURE_BLOB_STORAGE_LIBRARY) - message (FATAL_ERROR "Currently Blob Storage support can be built only with internal curl library") - endif() - -endif() - -message (STATUS "Using Azure Blob Storage - ${USE_AZURE_BLOB_STORAGE}") diff --git a/cmake/find/brotli.cmake b/cmake/find/brotli.cmake deleted file mode 100644 index 6469ec04f45..00000000000 --- a/cmake/find/brotli.cmake +++ /dev/null @@ -1,42 +0,0 @@ -option (ENABLE_BROTLI "Enable brotli" ${ENABLE_LIBRARIES}) - -if (NOT ENABLE_BROTLI) - if (USE_INTERNAL_BROTLI_LIBRARY) - message (${RECONFIGURE_MESSAGE_LEVEL} "Can't use internal brotly library with ENABLE_BROTLI=OFF") - endif() - return() -endif() - -option (USE_INTERNAL_BROTLI_LIBRARY "Set to FALSE to use system libbrotli library instead of bundled" ON) - -if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/brotli/c/include/brotli/decode.h") - if (USE_INTERNAL_BROTLI_LIBRARY) - message (WARNING "submodule contrib/brotli is missing. to fix try run: \n git submodule update --init") - message (${RECONFIGURE_MESSAGE_LEVEL} "Cannot find internal brotli") - set (USE_INTERNAL_BROTLI_LIBRARY 0) - endif () - set (MISSING_INTERNAL_BROTLI_LIBRARY 1) -endif () - -if(NOT USE_INTERNAL_BROTLI_LIBRARY) - find_library(BROTLI_LIBRARY_COMMON brotlicommon) - find_library(BROTLI_LIBRARY_DEC brotlidec) - find_library(BROTLI_LIBRARY_ENC brotlienc) - find_path(BROTLI_INCLUDE_DIR NAMES brotli/decode.h brotli/encode.h brotli/port.h brotli/types.h PATHS ${BROTLI_INCLUDE_PATHS}) - if(BROTLI_LIBRARY_DEC AND BROTLI_LIBRARY_ENC AND BROTLI_LIBRARY_COMMON) - set(BROTLI_LIBRARY ${BROTLI_LIBRARY_DEC} ${BROTLI_LIBRARY_ENC} ${BROTLI_LIBRARY_COMMON}) - else() - message (${RECONFIGURE_MESSAGE_LEVEL} "Cannot use system brotli") - endif() -endif() - -if (BROTLI_LIBRARY AND BROTLI_INCLUDE_DIR) - set (USE_BROTLI 1) -elseif (NOT MISSING_INTERNAL_BROTLI_LIBRARY) - set (BROTLI_INCLUDE_DIR "${ClickHouse_SOURCE_DIR}/contrib/brotli/c/include") - set (USE_INTERNAL_BROTLI_LIBRARY 1) - set (BROTLI_LIBRARY brotli) - set (USE_BROTLI 1) -endif () - -message (STATUS "Using brotli=${USE_BROTLI}: ${BROTLI_INCLUDE_DIR} : ${BROTLI_LIBRARY}") diff --git a/cmake/find/bzip2.cmake b/cmake/find/bzip2.cmake deleted file mode 100644 index 5e6a6fb5841..00000000000 --- a/cmake/find/bzip2.cmake +++ /dev/null @@ -1,19 +0,0 @@ -option(ENABLE_BZIP2 "Enable bzip2 compression support" ${ENABLE_LIBRARIES}) - -if (NOT ENABLE_BZIP2) - message (STATUS "bzip2 compression disabled") - return() -endif() - -if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/bzip2/bzlib.h") - message (WARNING "submodule contrib/bzip2 is missing. to fix try run: \n git submodule update --init") - message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal bzip2 library") - set (USE_NLP 0) - return() -endif () - -set (USE_BZIP2 1) -set (BZIP2_INCLUDE_DIR "${ClickHouse_SOURCE_DIR}/contrib/bzip2") -set (BZIP2_LIBRARY bzip2) - -message (STATUS "Using bzip2=${USE_BZIP2}: ${BZIP2_INCLUDE_DIR} : ${BZIP2_LIBRARY}") diff --git a/cmake/find/capnp.cmake b/cmake/find/capnp.cmake deleted file mode 100644 index fa62c64105f..00000000000 --- a/cmake/find/capnp.cmake +++ /dev/null @@ -1,42 +0,0 @@ -option (ENABLE_CAPNP "Enable Cap'n Proto" ${ENABLE_LIBRARIES}) - -if (NOT ENABLE_CAPNP) - if (USE_INTERNAL_CAPNP_LIBRARY) - message (${RECONFIGURE_MESSAGE_LEVEL} "Can't use internal capnproto library with ENABLE_CAPNP=OFF") - endif() - return() -endif() - -option (USE_INTERNAL_CAPNP_LIBRARY "Set to FALSE to use system capnproto library instead of bundled" ON) - -if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/capnproto/c++") - if(USE_INTERNAL_CAPNP_LIBRARY) - message(WARNING "submodule contrib/capnproto is missing. to fix try run: \n git submodule update --init") - message(${RECONFIGURE_MESSAGE_LEVEL} "cannot find internal capnproto") - set(USE_INTERNAL_CAPNP_LIBRARY 0) - endif() - set(MISSING_INTERNAL_CAPNP_LIBRARY 1) -endif() - -# FIXME: refactor to use `add_library(… IMPORTED)` if possible. -if (NOT USE_INTERNAL_CAPNP_LIBRARY) - find_library (KJ kj) - find_library (CAPNP capnp) - find_library (CAPNPC capnpc) - - if(KJ AND CAPNP AND CAPNPC) - set (CAPNP_LIBRARIES ${CAPNPC} ${CAPNP} ${KJ}) - else() - message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find system capnproto") - endif() -endif() - -if (CAPNP_LIBRARIES) - set (USE_CAPNP 1) -elseif(NOT MISSING_INTERNAL_CAPNP_LIBRARY) - set (CAPNP_LIBRARIES capnpc) - set (USE_CAPNP 1) - set (USE_INTERNAL_CAPNP_LIBRARY 1) -endif () - -message (STATUS "Using capnp=${USE_CAPNP}: ${CAPNP_LIBRARIES}") diff --git a/cmake/find/cassandra.cmake b/cmake/find/cassandra.cmake deleted file mode 100644 index 7fcbdbb90a5..00000000000 --- a/cmake/find/cassandra.cmake +++ /dev/null @@ -1,34 +0,0 @@ -if (MISSING_INTERNAL_LIBUV_LIBRARY) - message (WARNING "Disabling cassandra due to missing libuv") - set (ENABLE_CASSANDRA OFF CACHE INTERNAL "") -endif() - -option(ENABLE_CASSANDRA "Enable Cassandra" ${ENABLE_LIBRARIES}) - -if (NOT ENABLE_CASSANDRA) - return() -endif() - -if (APPLE) - set(CMAKE_MACOSX_RPATH ON) -endif() - -if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/cassandra") - message (ERROR "submodule contrib/cassandra is missing. to fix try run: \n git submodule update --init") - message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal Cassandra") - set (USE_CASSANDRA 0) - return() -endif() - -set (USE_CASSANDRA 1) -set (CASSANDRA_INCLUDE_DIR - "${ClickHouse_SOURCE_DIR}/contrib/cassandra/include/") -if (MAKE_STATIC_LIBRARIES) - set (CASSANDRA_LIBRARY cassandra_static) -else() - set (CASSANDRA_LIBRARY cassandra) -endif() - -set (CASS_ROOT_DIR "${ClickHouse_SOURCE_DIR}/contrib/cassandra") - -message (STATUS "Using cassandra=${USE_CASSANDRA}: ${CASSANDRA_INCLUDE_DIR} : ${CASSANDRA_LIBRARY}") diff --git a/cmake/find/curl.cmake b/cmake/find/curl.cmake deleted file mode 100644 index 577b13698c2..00000000000 --- a/cmake/find/curl.cmake +++ /dev/null @@ -1,35 +0,0 @@ -option (ENABLE_CURL "Enable curl" ${ENABLE_LIBRARIES}) - -if (NOT ENABLE_CURL) - if (USE_INTERNAL_CURL) - message (${RECONFIGURE_MESSAGE_LEVEL} "Can't use internal curl with ENABLE_CURL=OFF") - endif() - return() -endif() - -option (USE_INTERNAL_CURL "Use internal curl library" ON) - -if (NOT USE_INTERNAL_CURL) - find_package (CURL) - if (NOT CURL_FOUND) - message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find system curl") - endif() -endif() - -if (NOT CURL_FOUND) - set (USE_INTERNAL_CURL 1) - set (CURL_LIBRARY_DIR "${ClickHouse_SOURCE_DIR}/contrib/curl") - - # find_package(CURL) compatibility for the following packages that uses - # find_package(CURL)/include(FindCURL): - # - sentry-native - set (CURL_FOUND ON CACHE BOOL "") - set (CURL_ROOT_DIR ${CURL_LIBRARY_DIR} CACHE PATH "") - set (CURL_INCLUDE_DIR ${CURL_LIBRARY_DIR}/include CACHE PATH "") - set (CURL_INCLUDE_DIRS ${CURL_LIBRARY_DIR}/include CACHE PATH "") - set (CURL_LIBRARY curl CACHE STRING "") - set (CURL_LIBRARIES ${CURL_LIBRARY} CACHE STRING "") - set (CURL_VERSION_STRING 7.67.0 CACHE STRING "") -endif () - -message (STATUS "Using curl: ${CURL_INCLUDE_DIRS} : ${CURL_LIBRARIES}") diff --git a/cmake/find/cxx.cmake b/cmake/find/cxx.cmake index d1f62f0ecea..7d93bf05fc7 100644 --- a/cmake/find/cxx.cmake +++ b/cmake/find/cxx.cmake @@ -1,71 +1,8 @@ -option (USE_LIBCXX "Use libc++ and libc++abi instead of libstdc++" ON) - -if (NOT USE_LIBCXX) - if (USE_INTERNAL_LIBCXX_LIBRARY) - message (${RECONFIGURE_MESSAGE_LEVEL} "Cannot use internal libcxx with USE_LIBCXX=OFF") - endif() - - target_link_libraries(global-libs INTERFACE -l:libstdc++.a -l:libstdc++fs.a) # Always link these libraries as static - target_link_libraries(global-libs INTERFACE ${EXCEPTION_HANDLING_LIBRARY}) - return() -endif() - -set(USE_INTERNAL_LIBCXX_LIBRARY_DEFAULT ON) - -option (USE_INTERNAL_LIBCXX_LIBRARY "Disable to use system libcxx and libcxxabi libraries instead of bundled" - ${USE_INTERNAL_LIBCXX_LIBRARY_DEFAULT}) - -if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/libcxx/src") - if (USE_INTERNAL_LIBCXX_LIBRARY) - message(WARNING "submodule contrib/libcxx is missing. to fix try run: \n git submodule update --init") - message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal libcxx") - set(USE_INTERNAL_LIBCXX_LIBRARY 0) - endif() - set(USE_INTERNAL_LIBCXX_LIBRARY_DEFAULT 0) - set(MISSING_INTERNAL_LIBCXX_LIBRARY 1) -endif() - set (CMAKE_CXX_FLAGS_DEBUG "${CMAKE_CXX_FLAGS_DEBUG} -D_LIBCPP_DEBUG=0") # More checks in debug build. -if (NOT USE_INTERNAL_LIBCXX_LIBRARY) - find_library (LIBCXX_LIBRARY c++) - find_library (LIBCXXFS_LIBRARY c++fs) - find_library (LIBCXXABI_LIBRARY c++abi) +add_subdirectory(contrib/libcxxabi-cmake) +add_subdirectory(contrib/libcxx-cmake) - if(LIBCXX_LIBRARY AND LIBCXXABI_LIBRARY) # c++fs is now a part of the libc++ - set (HAVE_LIBCXX 1) - else () - message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find system libcxx") - endif() +# Exception handling library is embedded into libcxxabi. - if(NOT LIBCXXFS_LIBRARY) - set(LIBCXXFS_LIBRARY ${LIBCXX_LIBRARY}) - endif() - - set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -stdlib=libc++") - - target_link_libraries(global-libs INTERFACE ${EXCEPTION_HANDLING_LIBRARY}) -endif () - -if (NOT HAVE_LIBCXX AND NOT MISSING_INTERNAL_LIBCXX_LIBRARY) - set (LIBCXX_LIBRARY cxx) - set (LIBCXXABI_LIBRARY cxxabi) - add_subdirectory(contrib/libcxxabi-cmake) - add_subdirectory(contrib/libcxx-cmake) - - # Exception handling library is embedded into libcxxabi. - - set (HAVE_LIBCXX 1) - set(USE_INTERNAL_LIBCXX_LIBRARY 1) -endif () - -if (HAVE_LIBCXX) - target_link_libraries(global-libs INTERFACE ${LIBCXX_LIBRARY} ${LIBCXXABI_LIBRARY} ${LIBCXXFS_LIBRARY}) - - message (STATUS "Using libcxx: ${LIBCXX_LIBRARY}") - message (STATUS "Using libcxxfs: ${LIBCXXFS_LIBRARY}") - message (STATUS "Using libcxxabi: ${LIBCXXABI_LIBRARY}") -else() - target_link_libraries(global-libs INTERFACE -l:libstdc++.a -l:libstdc++fs.a) # Always link these libraries as static - target_link_libraries(global-libs INTERFACE ${EXCEPTION_HANDLING_LIBRARY}) -endif() +target_link_libraries(global-libs INTERFACE cxx cxxabi) diff --git a/cmake/find/cyrus-sasl.cmake b/cmake/find/cyrus-sasl.cmake deleted file mode 100644 index f0c088995b0..00000000000 --- a/cmake/find/cyrus-sasl.cmake +++ /dev/null @@ -1,23 +0,0 @@ -if (${ENABLE_LIBRARIES} AND ${ENABLE_KRB5}) - set (DEFAULT_ENABLE_CYRUS_SASL 1) -else() - set (DEFAULT_ENABLE_CYRUS_SASL 0) -endif() - -OPTION(ENABLE_CYRUS_SASL "Enable cyrus-sasl" ${DEFAULT_ENABLE_CYRUS_SASL}) -if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/cyrus-sasl/README") - message (WARNING "submodule contrib/cyrus-sasl is missing. to fix try run: \n git submodule update --init") - set (ENABLE_CYRUS_SASL 0) -endif () - -if (ENABLE_CYRUS_SASL) - - set (USE_CYRUS_SASL 1) - set (CYRUS_SASL_LIBRARY sasl2) - - set (CYRUS_SASL_INCLUDE_DIR "${ClickHouse_SOURCE_DIR}/contrib/cyrus-sasl/include") - - -endif () - -message (STATUS "Using cyrus-sasl: krb5=${USE_KRB5}: ${CYRUS_SASL_INCLUDE_DIR} : ${CYRUS_SASL_LIBRARY}") diff --git a/cmake/find/datasketches.cmake b/cmake/find/datasketches.cmake deleted file mode 100644 index 3d0bb1d1f95..00000000000 --- a/cmake/find/datasketches.cmake +++ /dev/null @@ -1,29 +0,0 @@ -option (ENABLE_DATASKETCHES "Enable DataSketches" ${ENABLE_LIBRARIES}) - -if (ENABLE_DATASKETCHES) - -option (USE_INTERNAL_DATASKETCHES_LIBRARY "Set to FALSE to use system DataSketches library instead of bundled" ON) - -if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/datasketches-cpp/theta/CMakeLists.txt") - if (USE_INTERNAL_DATASKETCHES_LIBRARY) - message(WARNING "submodule contrib/datasketches-cpp is missing. to fix try run: \n git submodule update --init") - endif() - set(MISSING_INTERNAL_DATASKETCHES_LIBRARY 1) - set(USE_INTERNAL_DATASKETCHES_LIBRARY 0) -endif() - -if (USE_INTERNAL_DATASKETCHES_LIBRARY) - set(DATASKETCHES_LIBRARY theta) - set(DATASKETCHES_INCLUDE_DIR "${ClickHouse_SOURCE_DIR}/contrib/datasketches-cpp/common/include" "${ClickHouse_SOURCE_DIR}/contrib/datasketches-cpp/theta/include") -elseif (NOT MISSING_INTERNAL_DATASKETCHES_LIBRARY) - find_library(DATASKETCHES_LIBRARY theta) - find_path(DATASKETCHES_INCLUDE_DIR NAMES theta_sketch.hpp PATHS ${DATASKETCHES_INCLUDE_PATHS}) -endif() - -if (DATASKETCHES_LIBRARY AND DATASKETCHES_INCLUDE_DIR) - set(USE_DATASKETCHES 1) -endif() - -endif() - -message (STATUS "Using datasketches=${USE_DATASKETCHES}: ${DATASKETCHES_INCLUDE_DIR} : ${DATASKETCHES_LIBRARY}") diff --git a/cmake/find/fast_float.cmake b/cmake/find/fast_float.cmake deleted file mode 100644 index 3e8b7cc5280..00000000000 --- a/cmake/find/fast_float.cmake +++ /dev/null @@ -1,6 +0,0 @@ -if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/fast_float/include/fast_float/fast_float.h") - message (FATAL_ERROR "submodule contrib/fast_float is missing. to fix try run: \n git submodule update --init") -endif () - -set(FAST_FLOAT_LIBRARY fast_float) -set(FAST_FLOAT_INCLUDE_DIR "${ClickHouse_SOURCE_DIR}/contrib/fast_float/include/") diff --git a/cmake/find/fastops.cmake b/cmake/find/fastops.cmake deleted file mode 100644 index 72426eb5912..00000000000 --- a/cmake/find/fastops.cmake +++ /dev/null @@ -1,24 +0,0 @@ -if(ARCH_AMD64 AND NOT OS_FREEBSD AND NOT OS_DARWIN) - option(ENABLE_FASTOPS "Enable fast vectorized mathematical functions library by Mikhail Parakhin" ${ENABLE_LIBRARIES}) -elseif(ENABLE_FASTOPS) - message (${RECONFIGURE_MESSAGE_LEVEL} "Fastops library is supported on x86_64 only, and not FreeBSD or Darwin") -endif() - -if(NOT ENABLE_FASTOPS) - set(USE_FASTOPS 0) - return() -endif() - -if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/fastops/fastops/fastops.h") - message(WARNING "submodule contrib/fastops is missing. to fix try run: \n git submodule update --init") - message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal fastops library") - set(MISSING_INTERNAL_FASTOPS_LIBRARY 1) -endif() - -if(NOT MISSING_INTERNAL_FASTOPS_LIBRARY) - set(USE_FASTOPS 1) - set(FASTOPS_INCLUDE_DIR ${ClickHouse_SOURCE_DIR}/contrib/fastops/) - set(FASTOPS_LIBRARY fastops) -endif() - -message(STATUS "Using fastops=${USE_FASTOPS}: ${FASTOPS_INCLUDE_DIR} : ${FASTOPS_LIBRARY}") diff --git a/cmake/find/filelog.cmake b/cmake/find/filelog.cmake deleted file mode 100644 index f545ee9d0ed..00000000000 --- a/cmake/find/filelog.cmake +++ /dev/null @@ -1,8 +0,0 @@ -# StorageFileLog only support Linux platform -if (OS_LINUX) - set (USE_FILELOG 1) - message (STATUS "Using StorageFileLog = 1") -else() - message(STATUS "StorageFileLog is only supported on Linux") -endif () - diff --git a/cmake/find/gperf.cmake b/cmake/find/gperf.cmake deleted file mode 100644 index 9b806598c57..00000000000 --- a/cmake/find/gperf.cmake +++ /dev/null @@ -1,16 +0,0 @@ -if(NOT DEFINED ENABLE_GPERF OR ENABLE_GPERF) - # Check if gperf was installed - find_program(GPERF gperf) - if(GPERF) - option(ENABLE_GPERF "Use gperf function hash generator tool" ${ENABLE_LIBRARIES}) - endif() -endif() - -if (ENABLE_GPERF) - if(NOT GPERF) - message(FATAL_ERROR "Could not find the program gperf") - endif() - set(USE_GPERF 1) -endif() - -message(STATUS "Using gperf=${USE_GPERF}: ${GPERF}") diff --git a/cmake/find/grpc.cmake b/cmake/find/grpc.cmake deleted file mode 100644 index 92a85b0df04..00000000000 --- a/cmake/find/grpc.cmake +++ /dev/null @@ -1,72 +0,0 @@ -# disable grpc due to conflicts of abseil (required by grpc) dynamic annotations with libtsan.a -if (SANITIZE STREQUAL "thread" AND COMPILER_GCC) - set(ENABLE_GRPC_DEFAULT OFF) -else() - set(ENABLE_GRPC_DEFAULT ${ENABLE_LIBRARIES}) -endif() - -option(ENABLE_GRPC "Use gRPC" ${ENABLE_GRPC_DEFAULT}) - -if(NOT ENABLE_GRPC) - if(USE_INTERNAL_GRPC_LIBRARY) - message(${RECONFIGURE_MESSAGE_LEVEL} "Cannot use internal gRPC library with ENABLE_GRPC=OFF") - endif() - return() -endif() - -if(NOT USE_PROTOBUF) - message(WARNING "Cannot use gRPC library without protobuf") -endif() - -# Normally we use the internal gRPC framework. -# You can set USE_INTERNAL_GRPC_LIBRARY to OFF to force using the external gRPC framework, which should be installed in the system in this case. -# The external gRPC framework can be installed in the system by running -# sudo apt-get install libgrpc++-dev protobuf-compiler-grpc -option(USE_INTERNAL_GRPC_LIBRARY "Set to FALSE to use system gRPC library instead of bundled. (Experimental. Set to OFF on your own risk)" ON) - -if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/grpc/CMakeLists.txt") - if(USE_INTERNAL_GRPC_LIBRARY) - message(WARNING "submodule contrib/grpc is missing. to fix try run: \n git submodule update --init") - message(${RECONFIGURE_MESSAGE_LEVEL} "Can't use internal grpc") - set(USE_INTERNAL_GRPC_LIBRARY 0) - endif() - set(MISSING_INTERNAL_GRPC_LIBRARY 1) -endif() - -if(USE_SSL) - set(gRPC_USE_UNSECURE_LIBRARIES FALSE) -else() - set(gRPC_USE_UNSECURE_LIBRARIES TRUE) -endif() - -if(NOT USE_INTERNAL_GRPC_LIBRARY) - find_package(gRPC) - if(NOT gRPC_INCLUDE_DIRS OR NOT gRPC_LIBRARIES) - message(${RECONFIGURE_MESSAGE_LEVEL} "Can't find system gRPC library") - set(EXTERNAL_GRPC_LIBRARY_FOUND 0) - elseif(NOT gRPC_CPP_PLUGIN) - message(${RECONFIGURE_MESSAGE_LEVEL} "Can't find system grpc_cpp_plugin") - set(EXTERNAL_GRPC_LIBRARY_FOUND 0) - else() - set(EXTERNAL_GRPC_LIBRARY_FOUND 1) - set(USE_GRPC 1) - endif() -endif() - -if(NOT EXTERNAL_GRPC_LIBRARY_FOUND AND NOT MISSING_INTERNAL_GRPC_LIBRARY) - set(gRPC_INCLUDE_DIRS "${ClickHouse_SOURCE_DIR}/contrib/grpc/include") - if(gRPC_USE_UNSECURE_LIBRARIES) - set(gRPC_LIBRARIES grpc_unsecure grpc++_unsecure) - else() - set(gRPC_LIBRARIES grpc grpc++) - endif() - set(gRPC_CPP_PLUGIN $) - set(gRPC_PYTHON_PLUGIN $) - - include("${ClickHouse_SOURCE_DIR}/contrib/grpc-cmake/protobuf_generate_grpc.cmake") - - set(USE_INTERNAL_GRPC_LIBRARY 1) - set(USE_GRPC 1) -endif() - -message(STATUS "Using gRPC=${USE_GRPC}: ${gRPC_INCLUDE_DIRS} : ${gRPC_LIBRARIES} : ${gRPC_CPP_PLUGIN}") diff --git a/cmake/find/gtest.cmake b/cmake/find/gtest.cmake deleted file mode 100644 index 935744bcbd1..00000000000 --- a/cmake/find/gtest.cmake +++ /dev/null @@ -1,40 +0,0 @@ -# included only if ENABLE_TESTS=1 - -option (USE_INTERNAL_GTEST_LIBRARY "Set to FALSE to use system Google Test instead of bundled" ON) - -if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/googletest/googletest/CMakeLists.txt") - if (USE_INTERNAL_GTEST_LIBRARY) - message (WARNING "submodule contrib/googletest is missing. to fix try run: \n git submodule update --init") - message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal gtest") - set (USE_INTERNAL_GTEST_LIBRARY 0) - endif () - - set (MISSING_INTERNAL_GTEST_LIBRARY 1) -endif () - -if(NOT USE_INTERNAL_GTEST_LIBRARY) - # TODO: autodetect of GTEST_SRC_DIR by EXISTS /usr/src/googletest/CMakeLists.txt - if(NOT GTEST_SRC_DIR) - find_package(GTest) - if (NOT GTEST_INCLUDE_DIRS) - message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find system Google Test") - endif() - endif() -endif() - -if (NOT GTEST_SRC_DIR AND NOT GTEST_INCLUDE_DIRS AND NOT MISSING_INTERNAL_GTEST_LIBRARY) - set (USE_INTERNAL_GTEST_LIBRARY 1) - set (GTEST_MAIN_LIBRARIES gtest_main) - set (GTEST_LIBRARIES gtest) - set (GTEST_BOTH_LIBRARIES ${GTEST_MAIN_LIBRARIES} ${GTEST_LIBRARIES}) - set (GTEST_INCLUDE_DIRS ${ClickHouse_SOURCE_DIR}/contrib/googletest/googletest) -elseif(USE_INTERNAL_GTEST_LIBRARY) - message (${RECONFIGURE_MESSAGE_LEVEL} "Wouldn't use internal Google Test library") - set (USE_INTERNAL_GTEST_LIBRARY 0) -endif () - -if((GTEST_INCLUDE_DIRS AND GTEST_BOTH_LIBRARIES) OR GTEST_SRC_DIR) - set(USE_GTEST 1) -endif() - -message (STATUS "Using gtest=${USE_GTEST}: ${GTEST_INCLUDE_DIRS} : ${GTEST_BOTH_LIBRARIES} : ${GTEST_SRC_DIR}") diff --git a/cmake/find/h3.cmake b/cmake/find/h3.cmake deleted file mode 100644 index e692b431e90..00000000000 --- a/cmake/find/h3.cmake +++ /dev/null @@ -1,39 +0,0 @@ -option (ENABLE_H3 "Enable H3" ${ENABLE_LIBRARIES}) -if(NOT ENABLE_H3) - if(USE_INTERNAL_H3_LIBRARY) - message (${RECONFIGURE_MESSAGE_LEVEL} "Cannot use internal h3 library with ENABLE_H3=OFF") - endif () - return() -endif() - -option(USE_INTERNAL_H3_LIBRARY "Set to FALSE to use system h3 library instead of bundled" - ON) # we are not aware of any distribution that provides h3 package - -if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/h3/src/h3lib/include/h3Index.h") - if(USE_INTERNAL_H3_LIBRARY) - message(WARNING "submodule contrib/h3 is missing. to fix try run: \n git submodule update --init") - message(${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal h3 library") - set(USE_INTERNAL_H3_LIBRARY 0) - endif() - set(MISSING_INTERNAL_H3_LIBRARY 1) -endif() - -if(NOT USE_INTERNAL_H3_LIBRARY) - find_library(H3_LIBRARY h3) - find_path(H3_INCLUDE_DIR NAMES h3/h3api.h PATHS ${H3_INCLUDE_PATHS}) - - if(NOT H3_LIBRARY OR NOT H3_INCLUDE_DIR) - message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find system h3 library") - endif() -endif() - -if (H3_LIBRARY AND H3_INCLUDE_DIR) - set (USE_H3 1) -elseif(NOT MISSING_INTERNAL_H3_LIBRARY) - set (H3_LIBRARY h3) - set (H3_INCLUDE_DIR "${ClickHouse_SOURCE_DIR}/contrib/h3/src/h3lib/include") - set (USE_H3 1) - set (USE_INTERNAL_H3_LIBRARY 1) -endif() - -message (STATUS "Using h3=${USE_H3}: ${H3_INCLUDE_DIR} : ${H3_LIBRARY}") diff --git a/cmake/find/hdfs3.cmake b/cmake/find/hdfs3.cmake deleted file mode 100644 index aac6b99dfa2..00000000000 --- a/cmake/find/hdfs3.cmake +++ /dev/null @@ -1,45 +0,0 @@ -if(NOT ARCH_ARM AND NOT OS_FREEBSD AND NOT APPLE AND USE_PROTOBUF AND NOT ARCH_PPC64LE) - option(ENABLE_HDFS "Enable HDFS" ${ENABLE_LIBRARIES}) -elseif(ENABLE_HDFS OR USE_INTERNAL_HDFS3_LIBRARY) - message (${RECONFIGURE_MESSAGE_LEVEL} "Cannot use HDFS3 with current configuration") -endif() - -if(NOT ENABLE_HDFS) - if(USE_INTERNAL_HDFS3_LIBRARY) - message (${RECONFIGURE_MESSAGE_LEVEL} "Cannot use internal HDFS3 library with ENABLE_HDFS3=OFF") - endif() - return() -endif() - -option(USE_INTERNAL_HDFS3_LIBRARY "Set to FALSE to use system HDFS3 instead of bundled (experimental - set to OFF on your own risk)" - ON) # We don't know any linux distribution with package for it - -if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/libhdfs3/include/hdfs/hdfs.h") - if(USE_INTERNAL_HDFS3_LIBRARY) - message(WARNING "submodule contrib/libhdfs3 is missing. to fix try run: \n git submodule update --init") - message (${RECONFIGURE_MESSAGE_LEVEL} "Cannot use internal HDFS3 library") - set(USE_INTERNAL_HDFS3_LIBRARY 0) - endif() - set(MISSING_INTERNAL_HDFS3_LIBRARY 1) -endif() - -if(NOT USE_INTERNAL_HDFS3_LIBRARY) - find_library(HDFS3_LIBRARY hdfs3) - find_path(HDFS3_INCLUDE_DIR NAMES hdfs/hdfs.h PATHS ${HDFS3_INCLUDE_PATHS}) - if(NOT HDFS3_LIBRARY OR NOT HDFS3_INCLUDE_DIR) - message (${RECONFIGURE_MESSAGE_LEVEL} "Cannot find system HDFS3 library") - endif() -endif() - -if(HDFS3_LIBRARY AND HDFS3_INCLUDE_DIR) - set(USE_HDFS 1) -elseif(NOT MISSING_INTERNAL_HDFS3_LIBRARY AND LIBGSASL_LIBRARY AND LIBXML2_LIBRARIES) - set(HDFS3_INCLUDE_DIR "${ClickHouse_SOURCE_DIR}/contrib/libhdfs3/include") - set(HDFS3_LIBRARY hdfs3) - set(USE_INTERNAL_HDFS3_LIBRARY 1) - set(USE_HDFS 1) -else() - message (${RECONFIGURE_MESSAGE_LEVEL} "Cannout enable HDFS3") -endif() - -message(STATUS "Using hdfs3=${USE_HDFS}: ${HDFS3_INCLUDE_DIR} : ${HDFS3_LIBRARY}") diff --git a/cmake/find/hive-metastore.cmake b/cmake/find/hive-metastore.cmake deleted file mode 100644 index bc283cf8bd2..00000000000 --- a/cmake/find/hive-metastore.cmake +++ /dev/null @@ -1,26 +0,0 @@ -option(ENABLE_HIVE "Enable Hive" ${ENABLE_LIBRARIES}) - -if (NOT ENABLE_HIVE) - message("Hive disabled") - return() -endif() - -if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/hive-metastore") - message(WARNING "submodule contrib/hive-metastore is missing. to fix try run: \n git submodule update --init") - set(USE_HIVE 0) -elseif (NOT USE_THRIFT) - message(WARNING "Thrift is not found, which is needed by Hive") - set(USE_HIVE 0) -elseif (NOT USE_HDFS) - message(WARNING "HDFS is not found, which is needed by Hive") - set(USE_HIVE 0) -elseif (NOT USE_ORC OR NOT USE_ARROW OR NOT USE_PARQUET) - message(WARNING "ORC/Arrow/Parquet is not found, which are needed by Hive") - set(USE_HIVE 0) -else() - set(USE_HIVE 1) - set(HIVE_METASTORE_INCLUDE_DIR ${ClickHouse_SOURCE_DIR}/contrib/hive-metastore) - set(HIVE_METASTORE_LIBRARY hivemetastore) -endif() - -message (STATUS "Using_Hive=${USE_HIVE}: ${HIVE_METASTORE_INCLUDE_DIR} : ${HIVE_METASTORE_LIBRARY}") diff --git a/cmake/find/icu.cmake b/cmake/find/icu.cmake deleted file mode 100644 index 5ba25e93875..00000000000 --- a/cmake/find/icu.cmake +++ /dev/null @@ -1,51 +0,0 @@ -if (OS_LINUX) - option(ENABLE_ICU "Enable ICU" ${ENABLE_LIBRARIES}) -else () - option(ENABLE_ICU "Enable ICU" 0) -endif () - -if (NOT ENABLE_ICU) - if(USE_INTERNAL_ICU_LIBRARY) - message (${RECONFIGURE_MESSAGE_LEVEL} "Cannot use internal icu library with ENABLE_ICU=OFF") - endif() - message(STATUS "Build without ICU (support for collations and charset conversion functions will be disabled)") - return() -endif() - -option (USE_INTERNAL_ICU_LIBRARY "Set to FALSE to use system ICU library instead of bundled" ON) - -if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/icu/icu4c/LICENSE") - if (USE_INTERNAL_ICU_LIBRARY) - message (WARNING "submodule contrib/icu is missing. to fix try run: \n git submodule update --init") - message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal ICU") - set (USE_INTERNAL_ICU_LIBRARY 0) - endif () - set (MISSING_INTERNAL_ICU_LIBRARY 1) -endif () - -if(NOT USE_INTERNAL_ICU_LIBRARY) - if (APPLE) - set(ICU_ROOT "/usr/local/opt/icu4c" CACHE STRING "") - endif() - find_package(ICU COMPONENTS i18n uc data) # TODO: remove Modules/FindICU.cmake after cmake 3.7 - #set (ICU_LIBRARIES ${ICU_I18N_LIBRARY} ${ICU_UC_LIBRARY} ${ICU_DATA_LIBRARY} CACHE STRING "") - if(ICU_FOUND) - set(USE_ICU 1) - else() - message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find system ICU") - endif() -endif() - -if (ICU_LIBRARY AND ICU_INCLUDE_DIR) - set (USE_ICU 1) -elseif (NOT MISSING_INTERNAL_ICU_LIBRARY) - set (USE_INTERNAL_ICU_LIBRARY 1) - set (ICU_LIBRARIES icui18n icuuc icudata) - set (USE_ICU 1) -endif () - -if(USE_ICU) - message(STATUS "Using icu=${USE_ICU}: ${ICU_INCLUDE_DIR} : ${ICU_LIBRARIES}") -else() - message(STATUS "Build without ICU (support for collations and charset conversion functions will be disabled)") -endif() diff --git a/cmake/find/krb5.cmake b/cmake/find/krb5.cmake deleted file mode 100644 index 24cc51325dc..00000000000 --- a/cmake/find/krb5.cmake +++ /dev/null @@ -1,25 +0,0 @@ -OPTION(ENABLE_KRB5 "Enable krb5" ${ENABLE_LIBRARIES}) - -if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/krb5/README") - message (WARNING "submodule contrib/krb5 is missing. to fix try run: \n git submodule update --init") - set (ENABLE_KRB5 0) -endif () - -if (NOT CMAKE_SYSTEM_NAME MATCHES "Linux" AND NOT (CMAKE_SYSTEM_NAME MATCHES "Darwin" AND NOT CMAKE_CROSSCOMPILING)) - message (WARNING "krb5 disabled in non-Linux and non-native-Darwin environments") - set (ENABLE_KRB5 0) -endif () - -if (ENABLE_KRB5) - - set (USE_KRB5 1) - set (KRB5_LIBRARY krb5) - - set (KRB5_INCLUDE_DIR - "${ClickHouse_SOURCE_DIR}/contrib/krb5/src/include" - "${ClickHouse_BINARY_DIR}/contrib/krb5-cmake/include" - ) - -endif () - -message (STATUS "Using krb5=${USE_KRB5}: ${KRB5_INCLUDE_DIR} : ${KRB5_LIBRARY}") diff --git a/cmake/find/ldap.cmake b/cmake/find/ldap.cmake deleted file mode 100644 index d0d1e54bfec..00000000000 --- a/cmake/find/ldap.cmake +++ /dev/null @@ -1,100 +0,0 @@ -option (ENABLE_LDAP "Enable LDAP" ${ENABLE_LIBRARIES}) - -if (NOT ENABLE_LDAP) - if(USE_INTERNAL_LDAP_LIBRARY) - message (${RECONFIGURE_MESSAGE_LEVEL} "Cannot use internal LDAP library with ENABLE_LDAP=OFF") - endif () - return() -endif() - -option (USE_INTERNAL_LDAP_LIBRARY "Set to FALSE to use system *LDAP library instead of bundled" ON) - -if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/openldap/README") - if (USE_INTERNAL_LDAP_LIBRARY) - message (WARNING "Submodule contrib/openldap is missing. To fix try running:\n git submodule update --init") - message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal LDAP library") - endif () - - set (USE_INTERNAL_LDAP_LIBRARY 0) - set (MISSING_INTERNAL_LDAP_LIBRARY 1) -endif () - -set (OPENLDAP_USE_STATIC_LIBS ${USE_STATIC_LIBRARIES}) -set (OPENLDAP_USE_REENTRANT_LIBS 1) - -if (NOT USE_INTERNAL_LDAP_LIBRARY) - if (OPENLDAP_USE_STATIC_LIBS) - message (WARNING "Unable to use external static OpenLDAP libraries, falling back to the bundled version.") - message (${RECONFIGURE_MESSAGE_LEVEL} "Unable to use external OpenLDAP") - set (USE_INTERNAL_LDAP_LIBRARY 1) - else () - if (APPLE AND NOT OPENLDAP_ROOT_DIR) - set (OPENLDAP_ROOT_DIR "/usr/local/opt/openldap") - endif () - - find_package (OpenLDAP) - - if (NOT OPENLDAP_FOUND) - message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find system OpenLDAP") - endif() - endif () -endif () - -if (NOT OPENLDAP_FOUND AND NOT MISSING_INTERNAL_LDAP_LIBRARY) - string (TOLOWER "${CMAKE_SYSTEM_NAME}" _system_name) - string (TOLOWER "${CMAKE_SYSTEM_PROCESSOR}" _system_processor) - - if ( - "${_system_processor}" STREQUAL "amd64" OR - "${_system_processor}" STREQUAL "x64" - ) - set (_system_processor "x86_64") - elseif ( - "${_system_processor}" STREQUAL "arm64" - ) - set (_system_processor "aarch64") - endif () - - if ( - ( "${_system_name}" STREQUAL "linux" AND "${_system_processor}" STREQUAL "x86_64" ) OR - ( "${_system_name}" STREQUAL "linux" AND "${_system_processor}" STREQUAL "aarch64" ) OR - ( "${_system_name}" STREQUAL "linux" AND "${_system_processor}" STREQUAL "ppc64le" ) OR - ( "${_system_name}" STREQUAL "freebsd" AND "${_system_processor}" STREQUAL "x86_64" ) OR - ( "${_system_name}" STREQUAL "freebsd" AND "${_system_processor}" STREQUAL "aarch64" ) OR - ( "${_system_name}" STREQUAL "darwin" AND "${_system_processor}" STREQUAL "x86_64" ) OR - ( "${_system_name}" STREQUAL "darwin" AND "${_system_processor}" STREQUAL "aarch64" ) - ) - set (_ldap_supported_platform TRUE) - endif () - - if (NOT _ldap_supported_platform) - message (WARNING "LDAP support using the bundled library is not implemented for ${CMAKE_SYSTEM_NAME} ${CMAKE_SYSTEM_PROCESSOR} platform.") - message (${RECONFIGURE_MESSAGE_LEVEL} "Cannot enable LDAP support") - elseif (NOT USE_SSL) - message (WARNING "LDAP support using the bundled library is not possible if SSL is not used.") - message (${RECONFIGURE_MESSAGE_LEVEL} "Cannot enable LDAP support") - else () - set (USE_INTERNAL_LDAP_LIBRARY 1) - set (OPENLDAP_ROOT_DIR "${ClickHouse_SOURCE_DIR}/contrib/openldap") - set (OPENLDAP_INCLUDE_DIRS - "${ClickHouse_SOURCE_DIR}/contrib/openldap-cmake/${_system_name}_${_system_processor}/include" - "${ClickHouse_SOURCE_DIR}/contrib/openldap/include" - ) - # Below, 'ldap'/'ldap_r' and 'lber' will be resolved to - # the targets defined in contrib/openldap-cmake/CMakeLists.txt - if (OPENLDAP_USE_REENTRANT_LIBS) - set (OPENLDAP_LDAP_LIBRARY "ldap_r") - else () - set (OPENLDAP_LDAP_LIBRARY "ldap") - endif() - set (OPENLDAP_LBER_LIBRARY "lber") - set (OPENLDAP_LIBRARIES ${OPENLDAP_LDAP_LIBRARY} ${OPENLDAP_LBER_LIBRARY}) - set (OPENLDAP_FOUND 1) - endif () -endif () - -if (OPENLDAP_FOUND) - set (USE_LDAP 1) -endif () - -message (STATUS "Using ldap=${USE_LDAP}: ${OPENLDAP_INCLUDE_DIRS} : ${OPENLDAP_LIBRARIES}") diff --git a/cmake/find/libgsasl.cmake b/cmake/find/libgsasl.cmake deleted file mode 100644 index d4e1ebce629..00000000000 --- a/cmake/find/libgsasl.cmake +++ /dev/null @@ -1,40 +0,0 @@ -option(ENABLE_GSASL_LIBRARY "Enable gsasl library" ${ENABLE_LIBRARIES}) - -if (NOT ENABLE_GSASL_LIBRARY) - if(USE_INTERNAL_LIBGSASL_LIBRARY) - message (${RECONFIGURE_MESSAGE_LEVEL} "Cannot use internal libgsasl library with ENABLE_GSASL_LIBRARY=OFF") - endif() - return() -endif() - -option (USE_INTERNAL_LIBGSASL_LIBRARY "Set to FALSE to use system libgsasl library instead of bundled" ON) - -if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/libgsasl/src/gsasl.h") - if (USE_INTERNAL_LIBGSASL_LIBRARY) - message (WARNING "submodule contrib/libgsasl is missing. to fix try run: \n git submodule update --init") - message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal libgsasl") - set (USE_INTERNAL_LIBGSASL_LIBRARY 0) - endif () - set (MISSING_INTERNAL_LIBGSASL_LIBRARY 1) -endif () - -if (NOT USE_INTERNAL_LIBGSASL_LIBRARY) - find_library (LIBGSASL_LIBRARY gsasl) - find_path (LIBGSASL_INCLUDE_DIR NAMES gsasl.h PATHS ${LIBGSASL_INCLUDE_PATHS}) - if (NOT LIBGSASL_LIBRARY OR NOT LIBGSASL_INCLUDE_DIR) - message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find system libgsasl") - endif () -endif () - -if (LIBGSASL_LIBRARY AND LIBGSASL_INCLUDE_DIR) -elseif (NOT MISSING_INTERNAL_LIBGSASL_LIBRARY) - set (LIBGSASL_INCLUDE_DIR "${ClickHouse_SOURCE_DIR}/contrib/libgsasl/src" "${ClickHouse_SOURCE_DIR}/contrib/libgsasl/linux_x86_64/include") - set (USE_INTERNAL_LIBGSASL_LIBRARY 1) - set (LIBGSASL_LIBRARY gsasl) -endif () - -if(LIBGSASL_LIBRARY AND LIBGSASL_INCLUDE_DIR) - set (USE_LIBGSASL 1) -endif() - -message (STATUS "Using libgsasl=${USE_LIBGSASL}: ${LIBGSASL_INCLUDE_DIR} : ${LIBGSASL_LIBRARY}") diff --git a/cmake/find/libpqxx.cmake b/cmake/find/libpqxx.cmake deleted file mode 100644 index 68dddffde70..00000000000 --- a/cmake/find/libpqxx.cmake +++ /dev/null @@ -1,31 +0,0 @@ -option(ENABLE_LIBPQXX "Enalbe libpqxx" ${ENABLE_LIBRARIES}) - -if (NOT ENABLE_LIBPQXX) - return() -endif() - -if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/libpqxx/src") - message (WARNING "submodule contrib/libpqxx is missing. to fix try run: \n git submodule update --init") - message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal libpqxx library") - set (USE_LIBPQXX 0) - return() -endif() - -if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/libpq/include") - message (ERROR "submodule contrib/libpq is missing. to fix try run: \n git submodule update --init") - message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal libpq needed for libpqxx") - set (USE_LIBPQXX 0) - return() -endif() - -if (NOT USE_INTERNAL_SSL_LIBRARY) - set (USE_LIBPQXX 0) -else () -set (USE_LIBPQXX 1) -set (LIBPQXX_LIBRARY libpqxx) -set (LIBPQ_LIBRARY libpq) -set (LIBPQXX_INCLUDE_DIR "${ClickHouse_SOURCE_DIR}/contrib/libpqxx/include") -set (LIBPQ_ROOT_DIR "${ClickHouse_SOURCE_DIR}/contrib/libpq") -message (STATUS "Using libpqxx=${USE_LIBPQXX}: ${LIBPQXX_INCLUDE_DIR} : ${LIBPQXX_LIBRARY}") -message (STATUS "Using libpq: ${LIBPQ_ROOT_DIR} : ${LIBPQ_INCLUDE_DIR} : ${LIBPQ_LIBRARY}") -endif() diff --git a/cmake/find/libprotobuf-mutator.cmake b/cmake/find/libprotobuf-mutator.cmake deleted file mode 100644 index a308db67c8b..00000000000 --- a/cmake/find/libprotobuf-mutator.cmake +++ /dev/null @@ -1,11 +0,0 @@ -option(USE_LIBPROTOBUF_MUTATOR "Enable libprotobuf-mutator" ${ENABLE_FUZZING}) - -if (NOT USE_LIBPROTOBUF_MUTATOR) - return() -endif() - -set(LibProtobufMutator_SOURCE_DIR "${ClickHouse_SOURCE_DIR}/contrib/libprotobuf-mutator") - -if (NOT EXISTS "${LibProtobufMutator_SOURCE_DIR}/README.md") - message (ERROR "submodule contrib/libprotobuf-mutator is missing. to fix try run: \n git submodule update --init") -endif() diff --git a/cmake/find/libuv.cmake b/cmake/find/libuv.cmake deleted file mode 100644 index c94dfd50b76..00000000000 --- a/cmake/find/libuv.cmake +++ /dev/null @@ -1,22 +0,0 @@ -if (OS_DARWIN AND COMPILER_GCC) - message (WARNING "libuv cannot be built with GCC in macOS due to a bug: https://gcc.gnu.org/bugzilla/show_bug.cgi?id=93082") - SET(MISSING_INTERNAL_LIBUV_LIBRARY 1) - return() -endif() - -if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/libuv") - message (WARNING "submodule contrib/libuv is missing. to fix try run: \n git submodule update --init") - SET(MISSING_INTERNAL_LIBUV_LIBRARY 1) - return() -endif() - -if (MAKE_STATIC_LIBRARIES) - set (LIBUV_LIBRARY uv_a) -else() - set (LIBUV_LIBRARY uv) -endif() - -set (LIBUV_ROOT_DIR "${ClickHouse_SOURCE_DIR}/contrib/libuv") -set (LIBUV_INCLUDE_DIR "${LIBUV_ROOT_DIR}/include") - -message (STATUS "Using libuv: ${LIBUV_ROOT_DIR} : ${LIBUV_LIBRARY}") diff --git a/cmake/find/libxml2.cmake b/cmake/find/libxml2.cmake deleted file mode 100644 index e9fe7780d39..00000000000 --- a/cmake/find/libxml2.cmake +++ /dev/null @@ -1,34 +0,0 @@ -option (USE_INTERNAL_LIBXML2_LIBRARY "Set to FALSE to use system libxml2 library instead of bundled" ON) - -if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/libxml2/libxml.h") - if (USE_INTERNAL_LIBXML2_LIBRARY) - message (WARNING "submodule contrib/libxml2 is missing. to fix try run: \n git submodule update --init") - message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal libxml") - set (USE_INTERNAL_LIBXML2_LIBRARY 0) - endif () - set (MISSING_INTERNAL_LIBXML2_LIBRARY 1) -endif () - -if (NOT USE_INTERNAL_LIBXML2_LIBRARY) - find_package (LibXml2) - #find_library (LIBXML2_LIBRARY libxml2) - #find_path (LIBXML2_INCLUDE_DIR NAMES libxml.h PATHS ${LIBXML2_INCLUDE_PATHS}) - - if (NOT LIBXML2_LIBRARY OR NOT LIBXML2_INCLUDE_DIR) - message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find system libxml2") - endif () - - if (USE_STATIC_LIBRARIES) - find_package(LibLZMA) - set (LIBXML2_LIBRARIES ${LIBXML2_LIBRARIES} ${LIBLZMA_LIBRARIES}) - endif () -endif () - -if (LIBXML2_LIBRARY AND LIBXML2_INCLUDE_DIR) -elseif (NOT MISSING_INTERNAL_LIBXML2_LIBRARY) - set (LIBXML2_INCLUDE_DIR ${ClickHouse_SOURCE_DIR}/contrib/libxml2/include ${ClickHouse_SOURCE_DIR}/contrib/libxml2-cmake/linux_x86_64/include) - set (USE_INTERNAL_LIBXML2_LIBRARY 1) - set (LIBXML2_LIBRARIES libxml2) -endif () - -message (STATUS "Using libxml2: ${LIBXML2_INCLUDE_DIR} : ${LIBXML2_LIBRARIES}") diff --git a/cmake/find/llvm.cmake b/cmake/find/llvm.cmake deleted file mode 100644 index ece5d5434a0..00000000000 --- a/cmake/find/llvm.cmake +++ /dev/null @@ -1,79 +0,0 @@ -if (APPLE OR NOT ARCH_AMD64 OR SANITIZE STREQUAL "undefined") - set (ENABLE_EMBEDDED_COMPILER_DEFAULT OFF) -else() - set (ENABLE_EMBEDDED_COMPILER_DEFAULT ON) -endif() - -option (ENABLE_EMBEDDED_COMPILER "Enable support for 'compile_expressions' option for query execution" ${ENABLE_EMBEDDED_COMPILER_DEFAULT}) - -if (NOT ENABLE_EMBEDDED_COMPILER) - set (USE_EMBEDDED_COMPILER 0) - return() -endif() - -if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/llvm/llvm/CMakeLists.txt") - message (${RECONFIGURE_MESSAGE_LEVEL} "submodule /contrib/llvm is missing. to fix try run: \n git submodule update --init") -endif () - -set (USE_EMBEDDED_COMPILER 1) - -set (LLVM_FOUND 1) -set (LLVM_VERSION "12.0.0bundled") -set (LLVM_INCLUDE_DIRS - "${ClickHouse_SOURCE_DIR}/contrib/llvm/llvm/include" - "${ClickHouse_BINARY_DIR}/contrib/llvm/llvm/include" -) -set (LLVM_LIBRARY_DIRS "${ClickHouse_BINARY_DIR}/contrib/llvm/llvm") - -message(STATUS "LLVM include Directory: ${LLVM_INCLUDE_DIRS}") -message(STATUS "LLVM library Directory: ${LLVM_LIBRARY_DIRS}") -message(STATUS "LLVM C++ compiler flags: ${LLVM_CXXFLAGS}") - -# This list was generated by listing all LLVM libraries, compiling the binary and removing all libraries while it still compiles. -set (REQUIRED_LLVM_LIBRARIES -LLVMExecutionEngine -LLVMRuntimeDyld -LLVMX86CodeGen -LLVMX86Desc -LLVMX86Info -LLVMAsmPrinter -LLVMDebugInfoDWARF -LLVMGlobalISel -LLVMSelectionDAG -LLVMMCDisassembler -LLVMPasses -LLVMCodeGen -LLVMipo -LLVMBitWriter -LLVMInstrumentation -LLVMScalarOpts -LLVMAggressiveInstCombine -LLVMInstCombine -LLVMVectorize -LLVMTransformUtils -LLVMTarget -LLVMAnalysis -LLVMProfileData -LLVMObject -LLVMBitReader -LLVMCore -LLVMRemarks -LLVMBitstreamReader -LLVMMCParser -LLVMMC -LLVMBinaryFormat -LLVMDebugInfoCodeView -LLVMSupport -LLVMDemangle -) - -#function(llvm_libs_all REQUIRED_LLVM_LIBRARIES) -# llvm_map_components_to_libnames (result all) -# if (USE_STATIC_LIBRARIES OR NOT "LLVM" IN_LIST result) -# list (REMOVE_ITEM result "LTO" "LLVM") -# else() -# set (result "LLVM") -# endif () -# list (APPEND result ${CMAKE_DL_LIBS} ${ZLIB_LIBRARIES}) -# set (${REQUIRED_LLVM_LIBRARIES} ${result} PARENT_SCOPE) -#endfunction() diff --git a/cmake/find/ltdl.cmake b/cmake/find/ltdl.cmake deleted file mode 100644 index b48a3630222..00000000000 --- a/cmake/find/ltdl.cmake +++ /dev/null @@ -1,5 +0,0 @@ -if (ENABLE_ODBC AND NOT USE_INTERNAL_ODBC_LIBRARY) - set (LTDL_PATHS "/usr/local/opt/libtool/lib") - find_library (LTDL_LIBRARY ltdl PATHS ${LTDL_PATHS} REQUIRED) - message (STATUS "Using ltdl: ${LTDL_LIBRARY}") -endif () diff --git a/cmake/find/miniselect.cmake b/cmake/find/miniselect.cmake deleted file mode 100644 index 0a50c9bf4a8..00000000000 --- a/cmake/find/miniselect.cmake +++ /dev/null @@ -1,2 +0,0 @@ -set(MINISELECT_INCLUDE_DIR ${ClickHouse_SOURCE_DIR}/contrib/miniselect/include) -message(STATUS "Using miniselect: ${MINISELECT_INCLUDE_DIR}") diff --git a/cmake/find/msgpack.cmake b/cmake/find/msgpack.cmake deleted file mode 100644 index ac52740c774..00000000000 --- a/cmake/find/msgpack.cmake +++ /dev/null @@ -1,37 +0,0 @@ -option (ENABLE_MSGPACK "Enable msgpack library" ${ENABLE_LIBRARIES}) - -if(NOT ENABLE_MSGPACK) - if(USE_INTERNAL_MSGPACK_LIBRARY) - message (${RECONFIGURE_MESSAGE_LEVEL} "Cannot use internal msgpack with ENABLE_MSGPACK=OFF") - endif() - return() -endif() - -option (USE_INTERNAL_MSGPACK_LIBRARY "Set to FALSE to use system msgpack library instead of bundled" ON) - -if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/msgpack-c/include/msgpack.hpp") - if(USE_INTERNAL_MSGPACK_LIBRARY) - message(WARNING "Submodule contrib/msgpack-c is missing. To fix try run: \n git submodule update --init") - message (${RECONFIGURE_MESSAGE_LEVEL} "Cannot use internal msgpack") - set(USE_INTERNAL_MSGPACK_LIBRARY 0) - endif() - set(MISSING_INTERNAL_MSGPACK_LIBRARY 1) -endif() - -if(NOT USE_INTERNAL_MSGPACK_LIBRARY) - find_path(MSGPACK_INCLUDE_DIR NAMES msgpack.hpp PATHS ${MSGPACK_INCLUDE_PATHS}) - if(NOT MSGPACK_INCLUDE_DIR) - message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find system msgpack") - endif() -endif() - -if(NOT MSGPACK_INCLUDE_DIR AND NOT MISSING_INTERNAL_MSGPACK_LIBRARY) - set(MSGPACK_INCLUDE_DIR "${ClickHouse_SOURCE_DIR}/contrib/msgpack-c/include") - set(USE_INTERNAL_MSGPACK_LIBRARY 1) -endif() - -if (MSGPACK_INCLUDE_DIR) - set(USE_MSGPACK 1) -endif() - -message(STATUS "Using msgpack=${USE_MSGPACK}: ${MSGPACK_INCLUDE_DIR}") diff --git a/cmake/find/mysqlclient.cmake b/cmake/find/mysqlclient.cmake deleted file mode 100644 index 746775410cb..00000000000 --- a/cmake/find/mysqlclient.cmake +++ /dev/null @@ -1,78 +0,0 @@ -if(OS_LINUX AND OPENSSL_FOUND) - option(ENABLE_MYSQL "Enable MySQL" ${ENABLE_LIBRARIES}) -else () - option(ENABLE_MYSQL "Enable MySQL" FALSE) -endif () - -if(NOT ENABLE_MYSQL) - if (USE_INTERNAL_MYSQL_LIBRARY) - message (${RECONFIGURE_MESSAGE_LEVEL} "Cannot use internal mysql library with ENABLE_MYSQL=OFF") - endif () - message (STATUS "Build without mysqlclient (support for MYSQL dictionary source will be disabled)") - return() -endif() - -option(USE_INTERNAL_MYSQL_LIBRARY "Set to FALSE to use system mysqlclient library instead of bundled" ON) - -if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/mariadb-connector-c/README") - if(USE_INTERNAL_MYSQL_LIBRARY) - message(WARNING "submodule contrib/mariadb-connector-c is missing. to fix try run: \n git submodule update --init") - message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal mysql library") - set(USE_INTERNAL_MYSQL_LIBRARY 0) - endif() - set(MISSING_INTERNAL_MYSQL_LIBRARY 1) -endif() - -if (NOT USE_INTERNAL_MYSQL_LIBRARY) - set (MYSQL_LIB_PATHS - "/usr/local/opt/mysql/lib" - "/usr/local/lib" - "/usr/local/lib64" - "/usr/local/lib/mariadb" # macos brew mariadb-connector-c - "/usr/mysql/lib" - "/usr/mysql/lib64" - "/usr/lib" - "/usr/lib64" - "/lib" - "/lib64") - - set (MYSQL_INCLUDE_PATHS - "/usr/local/opt/mysql/include" - "/usr/mysql/include" - "/usr/local/include" - "/usr/include/mariadb" - "/usr/include/mysql" - "/usr/include") - - find_path (MYSQL_INCLUDE_DIR NAMES mysql.h mysql/mysql.h mariadb/mysql.h PATHS ${MYSQL_INCLUDE_PATHS} PATH_SUFFIXES mysql) - - if (USE_STATIC_LIBRARIES) - find_library (STATIC_MYSQLCLIENT_LIB NAMES mariadbclient mysqlclient PATHS ${MYSQL_LIB_PATHS} PATH_SUFFIXES mysql) - else () - find_library (MYSQLCLIENT_LIBRARIES NAMES mariadb mariadbclient mysqlclient PATHS ${MYSQL_LIB_PATHS} PATH_SUFFIXES mysql) - endif () - - if (MYSQL_INCLUDE_DIR AND (STATIC_MYSQLCLIENT_LIB OR MYSQLCLIENT_LIBRARIES)) - set (USE_MYSQL 1) - set (MYSQLXX_LIBRARY mysqlxx) - if (APPLE) - # /usr/local/include/mysql/mysql_com.h:1011:10: fatal error: mysql/udf_registration_types.h: No such file or directory - set(MYSQL_INCLUDE_DIR ${MYSQL_INCLUDE_DIR} ${MYSQL_INCLUDE_DIR}/mysql) - endif () - else () - message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find system mysql library") - endif () -endif () - -if (NOT USE_MYSQL AND NOT MISSING_INTERNAL_MYSQL_LIBRARY) - set (MYSQLCLIENT_LIBRARIES mariadbclient) - set (MYSQLXX_LIBRARY mysqlxx) - set (USE_MYSQL 1) - set (USE_INTERNAL_MYSQL_LIBRARY 1) -endif() - -if (USE_MYSQL) - message (STATUS "Using mysqlclient=${USE_MYSQL}: ${MYSQL_INCLUDE_DIR} : ${MYSQLCLIENT_LIBRARIES}; staticlib=${STATIC_MYSQLCLIENT_LIB}") -else () - message (STATUS "Build without mysqlclient (support for MYSQL dictionary source will be disabled)") -endif () diff --git a/cmake/find/nanodbc.cmake b/cmake/find/nanodbc.cmake deleted file mode 100644 index 2fa60e71f55..00000000000 --- a/cmake/find/nanodbc.cmake +++ /dev/null @@ -1,16 +0,0 @@ -if (NOT ENABLE_ODBC) - return () -endif () - -if (NOT USE_INTERNAL_NANODBC_LIBRARY) - message (FATAL_ERROR "Only the bundled nanodbc library can be used") -endif () - -if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/nanodbc/nanodbc") - message (FATAL_ERROR "submodule contrib/nanodbc is missing. to fix try run: \n git submodule update --init") -endif() - -set (NANODBC_LIBRARY nanodbc) -set (NANODBC_INCLUDE_DIR "${ClickHouse_SOURCE_DIR}/contrib/nanodbc/nanodbc") - -message (STATUS "Using nanodbc: ${NANODBC_INCLUDE_DIR} : ${NANODBC_LIBRARY}") diff --git a/cmake/find/nlp.cmake b/cmake/find/nlp.cmake deleted file mode 100644 index 4b9311c6685..00000000000 --- a/cmake/find/nlp.cmake +++ /dev/null @@ -1,39 +0,0 @@ -option(ENABLE_NLP "Enable NLP functions support" ${ENABLE_LIBRARIES}) - -if (NOT ENABLE_NLP) - - message (STATUS "NLP functions disabled") - return() -endif() - -if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/libstemmer_c/Makefile") - message (WARNING "submodule contrib/libstemmer_c is missing. to fix try run: \n git submodule update --init") - message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal libstemmer_c library, NLP functions will be disabled") - set (USE_NLP 0) - return() -endif () - -if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/wordnet-blast/wnb") - message (WARNING "submodule contrib/wordnet-blast is missing. to fix try run: \n git submodule update --init") - message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal wordnet-blast library, NLP functions will be disabled") - set (USE_NLP 0) - return() -endif () - -if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/lemmagen-c/README.md") - message (WARNING "submodule contrib/lemmagen-c is missing. to fix try run: \n git submodule update --init") - message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal lemmagen-c library, NLP functions will be disabled") - set (USE_NLP 0) - return() -endif () - -if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/cld2/README.md") - message (WARNING "submodule contrib/cld2 is missing. to fix try run: \n git submodule update --init") - message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal cld2 library, NLP functions will be disabled") - set (USE_NLP 0) - return() -endif () - -set (USE_NLP 1) - -message (STATUS "Using Libraries for NLP functions: contrib/wordnet-blast, contrib/libstemmer_c, contrib/lemmagen-c, contrib/cld2") diff --git a/cmake/find/nuraft.cmake b/cmake/find/nuraft.cmake deleted file mode 100644 index c19f6774e7d..00000000000 --- a/cmake/find/nuraft.cmake +++ /dev/null @@ -1,24 +0,0 @@ -option(ENABLE_NURAFT "Enable NuRaft" ${ENABLE_LIBRARIES}) - -if (NOT ENABLE_NURAFT) - return() -endif() - -if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/NuRaft/src") - message (WARNING "submodule contrib/NuRaft is missing. to fix try run: \n git submodule update --init") - message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal NuRaft library") - set (USE_NURAFT 0) - return() -endif () - -if (NOT OS_FREEBSD) - set (USE_NURAFT 1) - set (NURAFT_LIBRARY nuraft) - - set (NURAFT_INCLUDE_DIR "${ClickHouse_SOURCE_DIR}/contrib/NuRaft/include") - - message (STATUS "Using NuRaft=${USE_NURAFT}: ${NURAFT_INCLUDE_DIR} : ${NURAFT_LIBRARY}") -else() - set (USE_NURAFT 0) - message (STATUS "Using internal NuRaft library on FreeBSD and Darwin is not supported") -endif() diff --git a/cmake/find/odbc.cmake b/cmake/find/odbc.cmake deleted file mode 100644 index 2f06cfed941..00000000000 --- a/cmake/find/odbc.cmake +++ /dev/null @@ -1,55 +0,0 @@ -option (ENABLE_ODBC "Enable ODBC library" ${ENABLE_LIBRARIES}) - -if (NOT OS_LINUX) - if (ENABLE_ODBC) - message(STATUS "ODBC is only supported on Linux") - endif() - set (ENABLE_ODBC OFF CACHE INTERNAL "") -endif () - -if (NOT ENABLE_ODBC) - if (USE_INTERNAL_ODBC_LIBRARY) - message (${RECONFIGURE_MESSAGE_LEVEL} "Can't use internal ODBC with ENABLE_ODBC=OFF") - endif() - - add_library (unixodbc INTERFACE) - target_compile_definitions (unixodbc INTERFACE USE_ODBC=0) - - message (STATUS "Not using unixodbc") - return() -endif() - -option (USE_INTERNAL_ODBC_LIBRARY "Use internal ODBC library" ON) - -if (NOT USE_INTERNAL_ODBC_LIBRARY) - find_library (LIBRARY_ODBC NAMES unixodbc odbc) - find_path (INCLUDE_ODBC sql.h) - - if(LIBRARY_ODBC AND INCLUDE_ODBC) - add_library (unixodbc INTERFACE) - set_target_properties (unixodbc PROPERTIES INTERFACE_LINK_LIBRARIES ${LIBRARY_ODBC}) - set_target_properties (unixodbc PROPERTIES INTERFACE_INCLUDE_DIRECTORIES ${INCLUDE_ODBC}) - set_target_properties (unixodbc PROPERTIES INTERFACE_COMPILE_DEFINITIONS USE_ODBC=1) - - if (USE_STATIC_LIBRARIES) - find_library(LTDL_LIBRARY ltdl) - if (LTDL_LIBRARY) - target_link_libraries(unixodbc INTERFACE ${LTDL_LIBRARY}) - endif() - endif() - - set(EXTERNAL_ODBC_LIBRARY_FOUND 1) - message (STATUS "Found odbc: ${LIBRARY_ODBC}") - else() - message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find system ODBC library") - set(EXTERNAL_ODBC_LIBRARY_FOUND 0) - endif() -endif() - -if (NOT EXTERNAL_ODBC_LIBRARY_FOUND) - set (USE_INTERNAL_ODBC_LIBRARY 1) -endif () - -set (USE_INTERNAL_NANODBC_LIBRARY 1) - -message (STATUS "Using unixodbc") diff --git a/cmake/find/orc.cmake b/cmake/find/orc.cmake deleted file mode 100644 index a5c3f57468a..00000000000 --- a/cmake/find/orc.cmake +++ /dev/null @@ -1,57 +0,0 @@ -option (ENABLE_ORC "Enable ORC" ${ENABLE_LIBRARIES}) - -if(NOT ENABLE_ORC) - if(USE_INTERNAL_ORC_LIBRARY) - message (${RECONFIGURE_MESSAGE_LEVEL} "Cannot use internal ORC library with ENABLE_ORC=OFF") - endif() - return() -endif() - -if (USE_INTERNAL_PARQUET_LIBRARY) - option(USE_INTERNAL_ORC_LIBRARY "Set to FALSE to use system ORC instead of bundled (experimental set to OFF on your own risk)" - ON) -elseif(USE_INTERNAL_ORC_LIBRARY) - message (${RECONFIGURE_MESSAGE_LEVEL} "Currently internal ORC can be build only with bundled Parquet") -endif() - -include(cmake/find/snappy.cmake) - -if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/orc/c++/include/orc/OrcFile.hh") - if(USE_INTERNAL_ORC_LIBRARY) - message(WARNING "submodule contrib/orc is missing. to fix try run: \n git submodule update --init") - message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal ORC") - set(USE_INTERNAL_ORC_LIBRARY 0) - endif() - set(MISSING_INTERNAL_ORC_LIBRARY 1) -endif () - -if (NOT USE_INTERNAL_ORC_LIBRARY) - find_package(orc) - if (NOT ORC_LIBRARY OR NOT ORC_INCLUDE_DIR) - message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find system ORC") - endif () -endif () - -#if (USE_INTERNAL_ORC_LIBRARY) -#find_path(CYRUS_SASL_INCLUDE_DIR sasl/sasl.h) -#find_library(CYRUS_SASL_SHARED_LIB sasl2) -#if (NOT CYRUS_SASL_INCLUDE_DIR OR NOT CYRUS_SASL_SHARED_LIB) -# set(USE_ORC 0) -#endif() -#endif() - -if (ORC_LIBRARY AND ORC_INCLUDE_DIR) - set(USE_ORC 1) -elseif(NOT MISSING_INTERNAL_ORC_LIBRARY AND ARROW_LIBRARY AND SNAPPY_LIBRARY) # (LIBGSASL_LIBRARY AND LIBXML2_LIBRARY) - set(ORC_INCLUDE_DIR "${ClickHouse_SOURCE_DIR}/contrib/orc/c++/include") - set(ORC_LIBRARY orc) - set(USE_ORC 1) - set(USE_INTERNAL_ORC_LIBRARY 1) -else() - message (${RECONFIGURE_MESSAGE_LEVEL} - "Can't enable ORC support - missing dependencies. Missing internal orc=${MISSING_INTERNAL_ORC_LIBRARY}. " - "arrow=${ARROW_LIBRARY} snappy=${SNAPPY_LIBRARY}") - set(USE_INTERNAL_ORC_LIBRARY 0) -endif() - -message (STATUS "Using internal=${USE_INTERNAL_ORC_LIBRARY} orc=${USE_ORC}: ${ORC_INCLUDE_DIR} : ${ORC_LIBRARY}") diff --git a/cmake/find/parquet.cmake b/cmake/find/parquet.cmake deleted file mode 100644 index 48c2bb7babb..00000000000 --- a/cmake/find/parquet.cmake +++ /dev/null @@ -1,168 +0,0 @@ -if (Protobuf_PROTOC_EXECUTABLE) - option (ENABLE_PARQUET "Enable parquet" ${ENABLE_LIBRARIES}) -elseif(ENABLE_PARQUET OR USE_INTERNAL_PARQUET_LIBRARY) - message (${RECONFIGURE_MESSAGE_LEVEL} "Can't use parquet without protoc executable") -endif() - -if (NOT ENABLE_PARQUET) - if(USE_INTERNAL_PARQUET_LIBRARY) - message (${RECONFIGURE_MESSAGE_LEVEL} "Cannot use internal parquet with ENABLE_PARQUET=OFF") - endif() - message(STATUS "Building without Parquet support") - return() -endif() - -if (NOT OS_FREEBSD) # Freebsd: ../contrib/arrow/cpp/src/arrow/util/bit-util.h:27:10: fatal error: endian.h: No such file or directory - option(USE_INTERNAL_PARQUET_LIBRARY "Set to FALSE to use system parquet library instead of bundled" ON) -elseif(USE_INTERNAL_PARQUET_LIBRARY) - message (${RECONFIGURE_MESSAGE_LEVEL} "Using internal parquet is not supported on freebsd") -endif() - -if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/arrow/cpp/CMakeLists.txt") - if(USE_INTERNAL_PARQUET_LIBRARY) - message(WARNING "submodule contrib/arrow (required for Parquet) is missing. to fix try run: \n git submodule update --init") - message (${RECONFIGURE_MESSAGE_LEVEL} "Can't use internal parquet library") - set(USE_INTERNAL_PARQUET_LIBRARY 0) - endif() - set(MISSING_INTERNAL_PARQUET_LIBRARY 1) -endif() - -if (NOT SNAPPY_LIBRARY) - include(cmake/find/snappy.cmake) -endif() - -if(NOT USE_INTERNAL_PARQUET_LIBRARY) - find_package(Arrow) - find_package(Parquet) - find_library(UTF8_PROC_LIBRARY utf8proc) - find_package(BZip2) - - if(USE_STATIC_LIBRARIES) - find_library(ARROW_DEPS_LIBRARY arrow_bundled_dependencies) - - if (ARROW_DEPS_LIBRARY) - set(ARROW_IMPORT_OBJ_DIR "${CMAKE_CURRENT_BINARY_DIR}/contrib/arrow-cmake/imported-objects") - set(ARROW_OTHER_OBJS - "${ARROW_IMPORT_OBJ_DIR}/jemalloc.pic.o" - "${ARROW_IMPORT_OBJ_DIR}/arena.pic.o" - "${ARROW_IMPORT_OBJ_DIR}/background_thread.pic.o" - "${ARROW_IMPORT_OBJ_DIR}/base.pic.o" - "${ARROW_IMPORT_OBJ_DIR}/bin.pic.o" - "${ARROW_IMPORT_OBJ_DIR}/bitmap.pic.o" - "${ARROW_IMPORT_OBJ_DIR}/ckh.pic.o" - "${ARROW_IMPORT_OBJ_DIR}/ctl.pic.o" - "${ARROW_IMPORT_OBJ_DIR}/div.pic.o" - "${ARROW_IMPORT_OBJ_DIR}/extent.pic.o" - "${ARROW_IMPORT_OBJ_DIR}/extent_dss.pic.o" - "${ARROW_IMPORT_OBJ_DIR}/extent_mmap.pic.o" - # skip hash - "${ARROW_IMPORT_OBJ_DIR}/hook.pic.o" - "${ARROW_IMPORT_OBJ_DIR}/large.pic.o" - "${ARROW_IMPORT_OBJ_DIR}/log.pic.o" - "${ARROW_IMPORT_OBJ_DIR}/malloc_io.pic.o" - "${ARROW_IMPORT_OBJ_DIR}/mutex.pic.o" - "${ARROW_IMPORT_OBJ_DIR}/mutex_pool.pic.o" - "${ARROW_IMPORT_OBJ_DIR}/nstime.pic.o" - "${ARROW_IMPORT_OBJ_DIR}/pages.pic.o" - # skip prng - "${ARROW_IMPORT_OBJ_DIR}/prof.pic.o" - "${ARROW_IMPORT_OBJ_DIR}/rtree.pic.o" - "${ARROW_IMPORT_OBJ_DIR}/stats.pic.o" - "${ARROW_IMPORT_OBJ_DIR}/sc.pic.o" - "${ARROW_IMPORT_OBJ_DIR}/sz.pic.o" - "${ARROW_IMPORT_OBJ_DIR}/tcache.pic.o" - # skip ticker - "${ARROW_IMPORT_OBJ_DIR}/tsd.pic.o" - "${ARROW_IMPORT_OBJ_DIR}/test_hooks.pic.o" - "${ARROW_IMPORT_OBJ_DIR}/witness.pic.o" - ) - add_custom_command(OUTPUT ${ARROW_OTHER_OBJS} - COMMAND - mkdir -p "${ARROW_IMPORT_OBJ_DIR}" && - cd "${ARROW_IMPORT_OBJ_DIR}" && - "${CMAKE_AR}" x "${ARROW_DEPS_LIBRARY}" - ) - set_source_files_properties(jemalloc.pic.o PROPERTIES EXTERNAL_OBJECT true GENERATED true) - add_library(imported_arrow_deps STATIC ${ARROW_OTHER_OBJS}) - - set(ARROW_LIBRARY ${ARROW_STATIC_LIB} - imported_arrow_deps ${THRIFT_LIBRARY} ${UTF8_PROC_LIBRARY} ${BZIP2_LIBRARIES} ${SNAPPY_LIBRARY}) - else() - message(WARNING "Using external static Arrow does not always work. " - "Could not find arrow_bundled_dependencies.a. If compilation fails, " - "Try: -D\"USE_INTERNAL_PARQUET_LIBRARY\"=ON or -D\"ENABLE_PARQUET\"=OFF or " - "-D\"USE_STATIC_LIBRARIES\"=OFF") - set(ARROW_LIBRARY ${ARROW_STATIC_LIB}) - endif() - set(PARQUET_LIBRARY ${PARQUET_STATIC_LIB}) - else() - set(ARROW_LIBRARY ${ARROW_SHARED_LIB}) - set(PARQUET_LIBRARY ${PARQUET_SHARED_LIB}) - endif() - - if(ARROW_INCLUDE_DIR AND ARROW_LIBRARY AND PARQUET_INCLUDE_DIR AND PARQUET_LIBRARY AND THRIFT_LIBRARY AND UTF8_PROC_LIBRARY AND BZIP2_FOUND) - set(USE_PARQUET 1) - set(EXTERNAL_PARQUET_FOUND 1) - else() - message (${RECONFIGURE_MESSAGE_LEVEL} - "Can't find system parquet: arrow=${ARROW_INCLUDE_DIR}:${ARROW_LIBRARY} ;" - " parquet=${PARQUET_INCLUDE_DIR}:${PARQUET_LIBRARY} ;" - " thrift=${THRIFT_LIBRARY} ;") - set(EXTERNAL_PARQUET_FOUND 0) - endif() -endif() - -if(NOT EXTERNAL_PARQUET_FOUND AND NOT MISSING_INTERNAL_PARQUET_LIBRARY AND NOT OS_FREEBSD) - if(SNAPPY_LIBRARY) - set(CAN_USE_INTERNAL_PARQUET_LIBRARY 1) - else() - message (${RECONFIGURE_MESSAGE_LEVEL} "Can't use internal parquet library without snappy") - endif() - - include(CheckCXXSourceCompiles) - if(NOT USE_INTERNAL_DOUBLE_CONVERSION_LIBRARY) - set(CMAKE_REQUIRED_LIBRARIES ${DOUBLE_CONVERSION_LIBRARIES}) - set(CMAKE_REQUIRED_INCLUDES ${DOUBLE_CONVERSION_INCLUDE_DIR}) - check_cxx_source_compiles(" - #include - int main() { static const int flags_ = double_conversion::StringToDoubleConverter::ALLOW_CASE_INSENSIBILITY; return 0;} - " HAVE_DOUBLE_CONVERSION_ALLOW_CASE_INSENSIBILITY) - - if(NOT HAVE_DOUBLE_CONVERSION_ALLOW_CASE_INSENSIBILITY) # HAVE_STD_RANDOM_SHUFFLE - message (${RECONFIGURE_MESSAGE_LEVEL} "Disabling internal parquet library because arrow is broken (can't use old double_conversion)") - set(CAN_USE_INTERNAL_PARQUET_LIBRARY 0) - endif() - endif() - - if(NOT CAN_USE_INTERNAL_PARQUET_LIBRARY) - message (${RECONFIGURE_MESSAGE_LEVEL} "Can't use internal parquet") - set(USE_INTERNAL_PARQUET_LIBRARY 0) - else() - set(USE_INTERNAL_PARQUET_LIBRARY 1) - - if(MAKE_STATIC_LIBRARIES) - set(FLATBUFFERS_LIBRARY flatbuffers) - set(ARROW_LIBRARY arrow_static) - set(PARQUET_LIBRARY parquet_static) - else() - set(FLATBUFFERS_LIBRARY flatbuffers_shared) - set(ARROW_LIBRARY arrow_shared) - set(PARQUET_LIBRARY parquet_shared) - endif() - - set(USE_PARQUET 1) - set(USE_ORC 1) - set(USE_ARROW 1) - endif() -elseif(OS_FREEBSD) - message (${RECONFIGURE_MESSAGE_LEVEL} "Using internal parquet library on FreeBSD is not supported") -endif() - -if(USE_PARQUET) - message(STATUS "Using Parquet: arrow=${ARROW_LIBRARY}:${ARROW_INCLUDE_DIR} ;" - " parquet=${PARQUET_LIBRARY}:${PARQUET_INCLUDE_DIR} ;" - " thrift=${THRIFT_LIBRARY} ;" - " flatbuffers=${FLATBUFFERS_LIBRARY}") -else() - message(STATUS "Building without Parquet support") -endif() diff --git a/cmake/find/pdqsort.cmake b/cmake/find/pdqsort.cmake deleted file mode 100644 index 51461044cf9..00000000000 --- a/cmake/find/pdqsort.cmake +++ /dev/null @@ -1,2 +0,0 @@ -set(PDQSORT_INCLUDE_DIR ${ClickHouse_SOURCE_DIR}/contrib/pdqsort) -message(STATUS "Using pdqsort: ${PDQSORT_INCLUDE_DIR}") diff --git a/cmake/find/poco.cmake b/cmake/find/poco.cmake deleted file mode 100644 index 99233728152..00000000000 --- a/cmake/find/poco.cmake +++ /dev/null @@ -1,8 +0,0 @@ -option (USE_INTERNAL_POCO_LIBRARY "Use internal Poco library" ON) - -if (NOT USE_INTERNAL_POCO_LIBRARY) - find_path (ROOT_DIR NAMES Foundation/include/Poco/Poco.h include/Poco/Poco.h) - if (NOT ROOT_DIR) - message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find system poco") - endif() -endif () diff --git a/cmake/find/protobuf.cmake b/cmake/find/protobuf.cmake deleted file mode 100644 index a2ea8ae87fc..00000000000 --- a/cmake/find/protobuf.cmake +++ /dev/null @@ -1,62 +0,0 @@ -option(ENABLE_PROTOBUF "Enable protobuf" ${ENABLE_LIBRARIES}) - -if(NOT ENABLE_PROTOBUF) - if(USE_INTERNAL_PROTOBUF_LIBRARY) - message(${RECONFIGURE_MESSAGE_LEVEL} "Can't use internal protobuf with ENABLE_PROTOBUF=OFF") - endif() - return() -endif() - -# Normally we use the internal protobuf library. -# You can set USE_INTERNAL_PROTOBUF_LIBRARY to OFF to force using the external protobuf library, which should be installed in the system in this case. -# The external protobuf library can be installed in the system by running -# sudo apt-get install libprotobuf-dev protobuf-compiler libprotoc-dev -option(USE_INTERNAL_PROTOBUF_LIBRARY "Set to FALSE to use system protobuf instead of bundled. (Experimental. Set to OFF on your own risk)" ON) - -if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/protobuf/cmake/CMakeLists.txt") - if(USE_INTERNAL_PROTOBUF_LIBRARY) - message(WARNING "submodule contrib/protobuf is missing. to fix try run: \n git submodule update --init") - message(${RECONFIGURE_MESSAGE_LEVEL} "Can't use internal protobuf") - set(USE_INTERNAL_PROTOBUF_LIBRARY 0) - endif() - set(MISSING_INTERNAL_PROTOBUF_LIBRARY 1) -endif() - -if(NOT USE_INTERNAL_PROTOBUF_LIBRARY) - find_package(Protobuf) - if(NOT Protobuf_INCLUDE_DIR OR NOT Protobuf_LIBRARY) - message(${RECONFIGURE_MESSAGE_LEVEL} "Can't find system protobuf library") - set(EXTERNAL_PROTOBUF_LIBRARY_FOUND 0) - elseif(NOT Protobuf_PROTOC_EXECUTABLE) - message(${RECONFIGURE_MESSAGE_LEVEL} "Can't find system protobuf compiler") - set(EXTERNAL_PROTOBUF_LIBRARY_FOUND 0) - else() - set(EXTERNAL_PROTOBUF_LIBRARY_FOUND 1) - set(USE_PROTOBUF 1) - endif() -endif() - -if(NOT EXTERNAL_PROTOBUF_LIBRARY_FOUND AND NOT MISSING_INTERNAL_PROTOBUF_LIBRARY) - set(Protobuf_INCLUDE_DIR "${ClickHouse_SOURCE_DIR}/contrib/protobuf/src") - set(Protobuf_LIBRARY libprotobuf) - set(Protobuf_PROTOC_EXECUTABLE "$") - set(Protobuf_PROTOC_LIBRARY libprotoc) - - include("${ClickHouse_SOURCE_DIR}/contrib/protobuf-cmake/protobuf_generate.cmake") - - set(USE_INTERNAL_PROTOBUF_LIBRARY 1) - set(USE_PROTOBUF 1) -endif() - -if(OS_FREEBSD AND SANITIZE STREQUAL "address") - # ../contrib/protobuf/src/google/protobuf/arena_impl.h:45:10: fatal error: 'sanitizer/asan_interface.h' file not found - # #include - if(LLVM_INCLUDE_DIRS) - set(Protobuf_INCLUDE_DIR "${Protobuf_INCLUDE_DIR}" ${LLVM_INCLUDE_DIRS}) - else() - message(${RECONFIGURE_MESSAGE_LEVEL} "Can't use protobuf on FreeBSD with address sanitizer without LLVM") - set(USE_PROTOBUF 0) - endif() -endif() - -message(STATUS "Using protobuf=${USE_PROTOBUF}: ${Protobuf_INCLUDE_DIR} : ${Protobuf_LIBRARY} : ${Protobuf_PROTOC_EXECUTABLE} : ${Protobuf_PROTOC_LIBRARY}") diff --git a/cmake/find/rapidjson.cmake b/cmake/find/rapidjson.cmake deleted file mode 100644 index cdf6761446e..00000000000 --- a/cmake/find/rapidjson.cmake +++ /dev/null @@ -1,35 +0,0 @@ -option(ENABLE_RAPIDJSON "Use rapidjson" ${ENABLE_LIBRARIES}) -if(NOT ENABLE_RAPIDJSON) - if(USE_INTERNAL_RAPIDJSON_LIBRARY) - message (${RECONFIGURE_MESSAGE_LEVEL} "Can't use internal rapidjson library with ENABLE_RAPIDJSON=OFF") - endif() - return() -endif() - -option(USE_INTERNAL_RAPIDJSON_LIBRARY "Set to FALSE to use system rapidjson library instead of bundled" ON) - -if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/rapidjson/include/rapidjson/rapidjson.h") - if(USE_INTERNAL_RAPIDJSON_LIBRARY) - message(WARNING "submodule contrib/rapidjson is missing. to fix try run: \n git submodule update --init") - message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal rapidjson library") - set(USE_INTERNAL_RAPIDJSON_LIBRARY 0) - endif() - set(MISSING_INTERNAL_RAPIDJSON_LIBRARY 1) -endif() - -if(NOT USE_INTERNAL_RAPIDJSON_LIBRARY) - find_path(RAPIDJSON_INCLUDE_DIR NAMES rapidjson/rapidjson.h PATHS ${RAPIDJSON_INCLUDE_PATHS}) - if(NOT RAPIDJSON_INCLUDE_DIR) - message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find system rapidjson") - endif() -endif() - -if(RAPIDJSON_INCLUDE_DIR) - set(USE_RAPIDJSON 1) -elseif(NOT MISSING_INTERNAL_RAPIDJSON_LIBRARY) - set(RAPIDJSON_INCLUDE_DIR "${ClickHouse_SOURCE_DIR}/contrib/rapidjson/include") - set(USE_INTERNAL_RAPIDJSON_LIBRARY 1) - set(USE_RAPIDJSON 1) -endif() - -message(STATUS "Using rapidjson=${USE_RAPIDJSON}: ${RAPIDJSON_INCLUDE_DIR}") diff --git a/cmake/find/rdkafka.cmake b/cmake/find/rdkafka.cmake deleted file mode 100644 index cad267bacff..00000000000 --- a/cmake/find/rdkafka.cmake +++ /dev/null @@ -1,68 +0,0 @@ -option (ENABLE_RDKAFKA "Enable kafka" ${ENABLE_LIBRARIES}) - -if (NOT ENABLE_RDKAFKA) - if (USE_INTERNAL_RDKAFKA_LIBRARY) - message (${RECONFIGURE_MESSAGE_LEVEL} "Can't use internal librdkafka with ENABLE_RDKAFKA=OFF") - endif() - return() -endif() - -option (USE_INTERNAL_RDKAFKA_LIBRARY "Set to FALSE to use system librdkafka instead of the bundled" ON) - -if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/cppkafka/src") - if(USE_INTERNAL_RDKAFKA_LIBRARY) - message (WARNING "submodule contrib/cppkafka is missing. to fix try run: \n git submodule update --init") - message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal cppkafka") - set (USE_INTERNAL_RDKAFKA_LIBRARY 0) - endif() - set (MISSING_INTERNAL_CPPKAFKA_LIBRARY 1) -endif () - -if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/librdkafka/src") - if(USE_INTERNAL_RDKAFKA_LIBRARY OR MISSING_INTERNAL_CPPKAFKA_LIBRARY) - message (WARNING "submodule contrib/librdkafka is missing. to fix try run: \n git submodule update --init") - message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal rdkafka") - set (USE_INTERNAL_RDKAFKA_LIBRARY 0) - endif() - set (MISSING_INTERNAL_RDKAFKA_LIBRARY 1) -endif () - -if (NOT USE_INTERNAL_RDKAFKA_LIBRARY) - find_library (RDKAFKA_LIB rdkafka) - find_path (RDKAFKA_INCLUDE_DIR NAMES librdkafka/rdkafka.h PATHS ${RDKAFKA_INCLUDE_PATHS}) - if (NOT RDKAFKA_LIB OR NOT RDKAFKA_INCLUDE_DIR) - message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find system librdkafka") - endif() - - if (USE_STATIC_LIBRARIES AND NOT OS_FREEBSD) - find_library (SASL2_LIBRARY sasl2) - if (NOT SASL2_LIBRARY) - message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find system sasl2 library needed for static librdkafka") - endif() - endif () - set (CPPKAFKA_LIBRARY cppkafka) -endif () - -if (RDKAFKA_LIB AND RDKAFKA_INCLUDE_DIR) - set (USE_RDKAFKA 1) - add_library (rdkafka_imp UNKNOWN IMPORTED) - set_target_properties (rdkafka_imp PROPERTIES IMPORTED_LOCATION ${RDKAFKA_LIB}) - set_target_properties (rdkafka_imp PROPERTIES INTERFACE_INCLUDE_DIRECTORIES ${RDKAFKA_INCLUDE_DIR}) - - set (RDKAFKA_LIBRARY rdkafka_imp ${OPENSSL_LIBRARIES}) - set (CPPKAFKA_LIBRARY cppkafka) - if (SASL2_LIBRARY) - list (APPEND RDKAFKA_LIBRARY ${SASL2_LIBRARY}) - endif () - if (LZ4_LIBRARY) - list (APPEND RDKAFKA_LIBRARY ${LZ4_LIBRARY}) - endif () -elseif (NOT MISSING_INTERNAL_RDKAFKA_LIBRARY AND NOT MISSING_INTERNAL_CPPKAFKA_LIBRARY) - set (USE_INTERNAL_RDKAFKA_LIBRARY 1) - set (RDKAFKA_INCLUDE_DIR "${ClickHouse_SOURCE_DIR}/contrib/librdkafka/src") - set (RDKAFKA_LIBRARY rdkafka) - set (CPPKAFKA_LIBRARY cppkafka) - set (USE_RDKAFKA 1) -endif () - -message (STATUS "Using librdkafka=${USE_RDKAFKA}: ${RDKAFKA_INCLUDE_DIR} : ${RDKAFKA_LIBRARY} ${CPPKAFKA_LIBRARY}") diff --git a/cmake/find/re2.cmake b/cmake/find/re2.cmake deleted file mode 100644 index ed5c72d13fa..00000000000 --- a/cmake/find/re2.cmake +++ /dev/null @@ -1,41 +0,0 @@ -option (USE_INTERNAL_RE2_LIBRARY "Set to FALSE to use system re2 library instead of bundled [slower]" ON) - -if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/re2/re2") - if(USE_INTERNAL_RE2_LIBRARY) - message(WARNING "submodule contrib/re2 is missing. to fix try run: \n git submodule update --init") - message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal re2 library") - endif() - set(USE_INTERNAL_RE2_LIBRARY 0) - set(MISSING_INTERNAL_RE2_LIBRARY 1) -endif() - -if (NOT USE_INTERNAL_RE2_LIBRARY) - find_library (RE2_LIBRARY re2) - find_path (RE2_INCLUDE_DIR NAMES re2/re2.h PATHS ${RE2_INCLUDE_PATHS}) - if (NOT RE2_LIBRARY OR NOT RE2_INCLUDE_DIR) - message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find system re2 library") - endif () -endif () - -string(FIND ${CMAKE_CURRENT_BINARY_DIR} " " _have_space) -if(_have_space GREATER 0) - message(WARNING "Using spaces in build path [${CMAKE_CURRENT_BINARY_DIR}] highly not recommended. Library re2st will be disabled.") - set (MISSING_INTERNAL_RE2_ST_LIBRARY 1) -endif() - -if (RE2_LIBRARY AND RE2_INCLUDE_DIR) - set (RE2_ST_LIBRARY ${RE2_LIBRARY}) -elseif (NOT MISSING_INTERNAL_RE2_LIBRARY) - set (USE_INTERNAL_RE2_LIBRARY 1) - set (RE2_LIBRARY re2) - set (RE2_INCLUDE_DIR "${ClickHouse_SOURCE_DIR}/contrib/re2") - if (NOT MISSING_INTERNAL_RE2_ST_LIBRARY) - set (RE2_ST_LIBRARY re2_st) - set (USE_RE2_ST 1) - else () - set (RE2_ST_LIBRARY ${RE2_LIBRARY}) - message (${RECONFIGURE_MESSAGE_LEVEL} "Using internal re2 library instead of re2_st") - endif () -endif () - -message (STATUS "Using re2: ${RE2_INCLUDE_DIR} : ${RE2_LIBRARY}; ${RE2_ST_INCLUDE_DIR} : ${RE2_ST_LIBRARY}") diff --git a/cmake/find/rocksdb.cmake b/cmake/find/rocksdb.cmake deleted file mode 100644 index 10592d1d037..00000000000 --- a/cmake/find/rocksdb.cmake +++ /dev/null @@ -1,71 +0,0 @@ -if (OS_DARWIN AND ARCH_AARCH64) - set (ENABLE_ROCKSDB OFF CACHE INTERNAL "") -endif() - -option(ENABLE_ROCKSDB "Enable ROCKSDB" ${ENABLE_LIBRARIES}) - -if (NOT ENABLE_ROCKSDB) - if (USE_INTERNAL_ROCKSDB_LIBRARY) - message (${RECONFIGURE_MESSAGE_LEVEL} "Can't use internal rocksdb library with ENABLE_ROCKSDB=OFF") - endif() - return() -endif() - -option(USE_INTERNAL_ROCKSDB_LIBRARY "Set to FALSE to use system ROCKSDB library instead of bundled" ON) - -if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/rocksdb/include") - if (USE_INTERNAL_ROCKSDB_LIBRARY) - message (WARNING "submodule contrib is missing. to fix try run: \n git submodule update --init") - message(${RECONFIGURE_MESSAGE_LEVEL} "cannot find internal rocksdb") - endif() - set (MISSING_INTERNAL_ROCKSDB 1) -endif () - -if (NOT USE_INTERNAL_ROCKSDB_LIBRARY) - find_library (ROCKSDB_LIBRARY rocksdb) - find_path (ROCKSDB_INCLUDE_DIR NAMES rocksdb/db.h PATHS ${ROCKSDB_INCLUDE_PATHS}) - if (NOT ROCKSDB_LIBRARY OR NOT ROCKSDB_INCLUDE_DIR) - message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find system rocksdb library") - endif() - - if (NOT SNAPPY_LIBRARY) - include(cmake/find/snappy.cmake) - endif() - if (NOT ZLIB_LIBRARY) - include(cmake/find/zlib.cmake) - endif() - - find_package(BZip2) - find_library(ZSTD_LIBRARY zstd) - find_library(LZ4_LIBRARY lz4) - find_library(GFLAGS_LIBRARY gflags) - - if(SNAPPY_LIBRARY AND ZLIB_LIBRARY AND LZ4_LIBRARY AND BZIP2_FOUND AND ZSTD_LIBRARY AND GFLAGS_LIBRARY) - list (APPEND ROCKSDB_LIBRARY ${SNAPPY_LIBRARY}) - list (APPEND ROCKSDB_LIBRARY ${ZLIB_LIBRARY}) - list (APPEND ROCKSDB_LIBRARY ${LZ4_LIBRARY}) - list (APPEND ROCKSDB_LIBRARY ${BZIP2_LIBRARY}) - list (APPEND ROCKSDB_LIBRARY ${ZSTD_LIBRARY}) - list (APPEND ROCKSDB_LIBRARY ${GFLAGS_LIBRARY}) - else() - message (${RECONFIGURE_MESSAGE_LEVEL} - "Can't find system rocksdb: snappy=${SNAPPY_LIBRARY} ;" - " zlib=${ZLIB_LIBRARY} ;" - " lz4=${LZ4_LIBRARY} ;" - " bz2=${BZIP2_LIBRARY} ;" - " zstd=${ZSTD_LIBRARY} ;" - " gflags=${GFLAGS_LIBRARY} ;") - endif() -endif () - -if(ROCKSDB_LIBRARY AND ROCKSDB_INCLUDE_DIR) - set(USE_ROCKSDB 1) -elseif (NOT MISSING_INTERNAL_ROCKSDB) - set (USE_INTERNAL_ROCKSDB_LIBRARY 1) - - set (ROCKSDB_INCLUDE_DIR "${ClickHouse_SOURCE_DIR}/contrib/rocksdb/include") - set (ROCKSDB_LIBRARY "rocksdb") - set (USE_ROCKSDB 1) -endif () - -message (STATUS "Using ROCKSDB=${USE_ROCKSDB}: ${ROCKSDB_INCLUDE_DIR} : ${ROCKSDB_LIBRARY}") diff --git a/cmake/find/s2geometry.cmake b/cmake/find/s2geometry.cmake deleted file mode 100644 index 348805b342e..00000000000 --- a/cmake/find/s2geometry.cmake +++ /dev/null @@ -1,24 +0,0 @@ - -option(ENABLE_S2_GEOMETRY "Enable S2 geometry library" ${ENABLE_LIBRARIES}) - -if (ENABLE_S2_GEOMETRY) - if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/s2geometry") - message (WARNING "submodule contrib/s2geometry is missing. to fix try run: \n git submodule update --init") - set (ENABLE_S2_GEOMETRY 0) - set (USE_S2_GEOMETRY 0) - else() - if (OPENSSL_FOUND) - set (S2_GEOMETRY_LIBRARY s2) - set (S2_GEOMETRY_INCLUDE_DIR ${ClickHouse_SOURCE_DIR}/contrib/s2geometry/src/s2) - set (USE_S2_GEOMETRY 1) - else() - message (WARNING "S2 uses OpenSSL, but the latter is absent.") - endif() - endif() - - if (NOT USE_S2_GEOMETRY) - message (${RECONFIGURE_MESSAGE_LEVEL} "Can't enable S2 geometry library") - endif() -endif() - -message (STATUS "Using s2geometry=${USE_S2_GEOMETRY} : ${S2_GEOMETRY_INCLUDE_DIR}") diff --git a/cmake/find/s3.cmake b/cmake/find/s3.cmake deleted file mode 100644 index 9a10c3f13ef..00000000000 --- a/cmake/find/s3.cmake +++ /dev/null @@ -1,45 +0,0 @@ -if(NOT OS_FREEBSD) - option(ENABLE_S3 "Enable S3" ${ENABLE_LIBRARIES}) -elseif(ENABLE_S3 OR USE_INTERNAL_AWS_S3_LIBRARY) - message (${RECONFIGURE_MESSAGE_LEVEL} "Can't use S3 on FreeBSD") -endif() - -if(NOT ENABLE_S3) - if(USE_INTERNAL_AWS_S3_LIBRARY) - message (${RECONFIGURE_MESSAGE_LEVEL} "Can't use internal S3 library with ENABLE_S3=OFF") - endif() - return() -endif() - -option(USE_INTERNAL_AWS_S3_LIBRARY "Set to FALSE to use system S3 instead of bundled (experimental set to OFF on your own risk)" - ON) - -if (NOT USE_INTERNAL_POCO_LIBRARY AND USE_INTERNAL_AWS_S3_LIBRARY) - message (FATAL_ERROR "Currently S3 support can be built only with internal POCO library") -endif() - -if (NOT USE_INTERNAL_AWS_S3_LIBRARY) - message (${RECONFIGURE_MESSAGE_LEVEL} "Compilation with external S3 library is not supported yet") -endif() - -if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/aws/aws-cpp-sdk-s3") - message (WARNING "submodule contrib/aws is missing. to fix try run: \n git submodule update --init") - if (USE_INTERNAL_AWS_S3_LIBRARY) - message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal S3 library") - endif () - set (MISSING_AWS_S3 1) -endif () - -if (USE_INTERNAL_AWS_S3_LIBRARY AND NOT MISSING_AWS_S3) - set(AWS_S3_INCLUDE_DIR "${ClickHouse_SOURCE_DIR}/contrib/aws/aws-cpp-sdk-s3/include") - set(AWS_S3_CORE_INCLUDE_DIR "${ClickHouse_SOURCE_DIR}/contrib/aws/aws-cpp-sdk-core/include") - set(AWS_S3_LIBRARY aws_s3) - set(USE_INTERNAL_AWS_S3_LIBRARY 1) - set(USE_AWS_S3 1) -else() - message (${RECONFIGURE_MESSAGE_LEVEL} "Can't enable S3") - set(USE_INTERNAL_AWS_S3_LIBRARY 0) - set(USE_AWS_S3 0) -endif () - -message (STATUS "Using aws_s3=${USE_AWS_S3}: ${AWS_S3_INCLUDE_DIR} : ${AWS_S3_LIBRARY}") diff --git a/cmake/find/sentry.cmake b/cmake/find/sentry.cmake deleted file mode 100644 index e08cbad1729..00000000000 --- a/cmake/find/sentry.cmake +++ /dev/null @@ -1,23 +0,0 @@ -set (SENTRY_LIBRARY "sentry") - -set (SENTRY_INCLUDE_DIR "${ClickHouse_SOURCE_DIR}/contrib/sentry-native/include") -if (NOT EXISTS "${SENTRY_INCLUDE_DIR}/sentry.h") - message (WARNING "submodule contrib/sentry-native is missing. to fix try run: \n git submodule update --init") - if (USE_SENTRY) - message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal sentry library") - endif() - return() -endif () - -if (NOT OS_FREEBSD AND NOT SPLIT_SHARED_LIBRARIES AND NOT (OS_DARWIN AND COMPILER_CLANG)) - option (USE_SENTRY "Use Sentry" ${ENABLE_LIBRARIES}) - set (SENTRY_TRANSPORT "curl" CACHE STRING "") - set (SENTRY_BACKEND "none" CACHE STRING "") - set (SENTRY_EXPORT_SYMBOLS OFF CACHE BOOL "") - set (SENTRY_LINK_PTHREAD OFF CACHE BOOL "") - set (SENTRY_PIC OFF CACHE BOOL "") - set (BUILD_SHARED_LIBS OFF) - message (STATUS "Using sentry=${USE_SENTRY}: ${SENTRY_LIBRARY}") -elseif (USE_SENTRY) - message (${RECONFIGURE_MESSAGE_LEVEL} "Sentry is not supported in current configuration") -endif () diff --git a/cmake/find/simdjson.cmake b/cmake/find/simdjson.cmake deleted file mode 100644 index bf22a331f04..00000000000 --- a/cmake/find/simdjson.cmake +++ /dev/null @@ -1,11 +0,0 @@ -option (USE_SIMDJSON "Use simdjson" ${ENABLE_LIBRARIES}) - -if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/simdjson/include/simdjson.h") - message (WARNING "submodule contrib/simdjson is missing. to fix try run: \n git submodule update --init") - if (USE_SIMDJSON) - message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal simdjson library") - endif() - return() -endif () - -message(STATUS "Using simdjson=${USE_SIMDJSON}") diff --git a/cmake/find/snappy.cmake b/cmake/find/snappy.cmake deleted file mode 100644 index 245b3a9a2ff..00000000000 --- a/cmake/find/snappy.cmake +++ /dev/null @@ -1,21 +0,0 @@ -option(USE_SNAPPY "Enable snappy library" ON) - -if(NOT USE_SNAPPY) - if (USE_INTERNAL_SNAPPY_LIBRARY) - message (${RECONFIGURE_MESSAGE_LEVEL} "Can't use internal snappy library with USE_SNAPPY=OFF") - endif() - return() -endif() - -option (USE_INTERNAL_SNAPPY_LIBRARY "Set to FALSE to use system snappy library instead of bundled" ON) - -if(NOT USE_INTERNAL_SNAPPY_LIBRARY) - find_library(SNAPPY_LIBRARY snappy) - if (NOT SNAPPY_LIBRARY) - message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find system snappy library") - endif() -else () - set(SNAPPY_LIBRARY snappy) -endif() - -message (STATUS "Using snappy: ${SNAPPY_LIBRARY}") diff --git a/cmake/find/sparsehash.cmake b/cmake/find/sparsehash.cmake deleted file mode 100644 index f258f6c1c5b..00000000000 --- a/cmake/find/sparsehash.cmake +++ /dev/null @@ -1,17 +0,0 @@ -option (USE_INTERNAL_SPARSEHASH_LIBRARY "Set to FALSE to use system sparsehash library instead of bundled" - ON) # ON by default as we are not aware of any system providing package for sparsehash-c11 - -if (NOT USE_INTERNAL_SPARSEHASH_LIBRARY) - find_path (SPARSEHASH_INCLUDE_DIR NAMES sparsehash/sparse_hash_map PATHS ${SPARSEHASH_INCLUDE_PATHS}) - if (NOT SPARSEHASH_INCLUDE_DIR) - message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find system sparsehash library") - endif () -endif () - -if (SPARSEHASH_INCLUDE_DIR) -else () - set (USE_INTERNAL_SPARSEHASH_LIBRARY 1) - set (SPARSEHASH_INCLUDE_DIR "${ClickHouse_SOURCE_DIR}/contrib/sparsehash-c11") -endif () - -message (STATUS "Using sparsehash: ${SPARSEHASH_INCLUDE_DIR}") diff --git a/cmake/find/sqlite.cmake b/cmake/find/sqlite.cmake deleted file mode 100644 index 083a9faea59..00000000000 --- a/cmake/find/sqlite.cmake +++ /dev/null @@ -1,16 +0,0 @@ -option(ENABLE_SQLITE "Enable sqlite" ${ENABLE_LIBRARIES}) - -if (NOT ENABLE_SQLITE) - return() -endif() - -if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/sqlite-amalgamation/sqlite3.c") - message (WARNING "submodule contrib/sqlite3-amalgamation is missing. to fix try run: \n git submodule update --init") - message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal sqlite library") - set (USE_SQLITE 0) - return() -endif() - -set (USE_SQLITE 1) -set (SQLITE_LIBRARY sqlite) -message (STATUS "Using sqlite=${USE_SQLITE}") diff --git a/cmake/find/ssl.cmake b/cmake/find/ssl.cmake deleted file mode 100644 index 1ac6a54ed20..00000000000 --- a/cmake/find/ssl.cmake +++ /dev/null @@ -1,133 +0,0 @@ -# Needed when securely connecting to an external server, e.g. -# clickhouse-client --host ... --secure -option(ENABLE_SSL "Enable ssl" ${ENABLE_LIBRARIES}) - -if(NOT ENABLE_SSL) - if (USE_INTERNAL_SSL_LIBRARY) - message (${RECONFIGURE_MESSAGE_LEVEL} "Can't use internal ssl library with ENABLE_SSL=OFF") - endif() - return() -endif() - -option(USE_INTERNAL_SSL_LIBRARY "Set to FALSE to use system *ssl library instead of bundled" ON) - -if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/boringssl/README.md") - if(USE_INTERNAL_SSL_LIBRARY) - message(WARNING "submodule contrib/boringssl is missing. to fix try run: \n git submodule update --init") - message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal ssl library") - endif() - set(USE_INTERNAL_SSL_LIBRARY 0) - set(MISSING_INTERNAL_SSL_LIBRARY 1) -endif() - -set (OPENSSL_USE_STATIC_LIBS ${USE_STATIC_LIBRARIES}) - -if (NOT USE_INTERNAL_SSL_LIBRARY) - if (APPLE) - set (OPENSSL_ROOT_DIR "/usr/local/opt/openssl" CACHE INTERNAL "") - # https://rt.openssl.org/Ticket/Display.html?user=guest&pass=guest&id=2232 - if (USE_STATIC_LIBRARIES) - message(WARNING "Disable USE_STATIC_LIBRARIES if you have linking problems with OpenSSL on MacOS") - endif () - endif () - find_package (OpenSSL) - - if (NOT OPENSSL_FOUND) - # Try to find manually. - set (OPENSSL_INCLUDE_PATHS "/usr/local/opt/openssl/include") - set (OPENSSL_PATHS "/usr/local/opt/openssl/lib") - find_path (OPENSSL_INCLUDE_DIR NAMES openssl/ssl.h PATHS ${OPENSSL_INCLUDE_PATHS}) - find_library (OPENSSL_SSL_LIBRARY ssl PATHS ${OPENSSL_PATHS}) - find_library (OPENSSL_CRYPTO_LIBRARY crypto PATHS ${OPENSSL_PATHS}) - if (OPENSSL_SSL_LIBRARY AND OPENSSL_CRYPTO_LIBRARY AND OPENSSL_INCLUDE_DIR) - set (OPENSSL_LIBRARIES ${OPENSSL_SSL_LIBRARY} ${OPENSSL_CRYPTO_LIBRARY}) - set (OPENSSL_FOUND 1) - endif () - endif () - - if (NOT OPENSSL_FOUND) - message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find system ssl") - endif() -endif () - -if (NOT OPENSSL_FOUND AND NOT MISSING_INTERNAL_SSL_LIBRARY) - set (USE_INTERNAL_SSL_LIBRARY 1) - set (OPENSSL_ROOT_DIR "${ClickHouse_SOURCE_DIR}/contrib/boringssl") - set (OPENSSL_INCLUDE_DIR "${OPENSSL_ROOT_DIR}/include") - set (OPENSSL_CRYPTO_LIBRARY crypto) - set (OPENSSL_SSL_LIBRARY ssl) - set (OPENSSL_FOUND 1) - set (OPENSSL_LIBRARIES ${OPENSSL_SSL_LIBRARY} ${OPENSSL_CRYPTO_LIBRARY}) -endif () - -if(OPENSSL_FOUND) - # we need keep OPENSSL_FOUND for many libs in contrib - set(USE_SSL 1) -endif() - -# used by new poco -# part from /usr/share/cmake-*/Modules/FindOpenSSL.cmake, with removed all "EXISTS " -if(OPENSSL_FOUND AND NOT USE_INTERNAL_SSL_LIBRARY) - if(NOT TARGET OpenSSL::Crypto AND - (OPENSSL_CRYPTO_LIBRARY OR - LIB_EAY_LIBRARY_DEBUG OR - LIB_EAY_LIBRARY_RELEASE) - ) - add_library(OpenSSL::Crypto UNKNOWN IMPORTED) - set_target_properties(OpenSSL::Crypto PROPERTIES - INTERFACE_INCLUDE_DIRECTORIES "${OPENSSL_INCLUDE_DIR}") - if(OPENSSL_CRYPTO_LIBRARY) - set_target_properties(OpenSSL::Crypto PROPERTIES - IMPORTED_LINK_INTERFACE_LANGUAGES "C" - IMPORTED_LOCATION "${OPENSSL_CRYPTO_LIBRARY}") - endif() - if(LIB_EAY_LIBRARY_RELEASE) - set_property(TARGET OpenSSL::Crypto APPEND PROPERTY - IMPORTED_CONFIGURATIONS RELEASE) - set_target_properties(OpenSSL::Crypto PROPERTIES - IMPORTED_LINK_INTERFACE_LANGUAGES_RELEASE "C" - IMPORTED_LOCATION_RELEASE "${LIB_EAY_LIBRARY_RELEASE}") - endif() - if(LIB_EAY_LIBRARY_DEBUG) - set_property(TARGET OpenSSL::Crypto APPEND PROPERTY - IMPORTED_CONFIGURATIONS DEBUG) - set_target_properties(OpenSSL::Crypto PROPERTIES - IMPORTED_LINK_INTERFACE_LANGUAGES_DEBUG "C" - IMPORTED_LOCATION_DEBUG "${LIB_EAY_LIBRARY_DEBUG}") - endif() - endif() - if(NOT TARGET OpenSSL::SSL AND - (OPENSSL_SSL_LIBRARY OR - SSL_EAY_LIBRARY_DEBUG OR - SSL_EAY_LIBRARY_RELEASE) - ) - add_library(OpenSSL::SSL UNKNOWN IMPORTED) - set_target_properties(OpenSSL::SSL PROPERTIES - INTERFACE_INCLUDE_DIRECTORIES "${OPENSSL_INCLUDE_DIR}") - if(OPENSSL_SSL_LIBRARY) - set_target_properties(OpenSSL::SSL PROPERTIES - IMPORTED_LINK_INTERFACE_LANGUAGES "C" - IMPORTED_LOCATION "${OPENSSL_SSL_LIBRARY}") - endif() - if(SSL_EAY_LIBRARY_RELEASE) - set_property(TARGET OpenSSL::SSL APPEND PROPERTY - IMPORTED_CONFIGURATIONS RELEASE) - set_target_properties(OpenSSL::SSL PROPERTIES - IMPORTED_LINK_INTERFACE_LANGUAGES_RELEASE "C" - IMPORTED_LOCATION_RELEASE "${SSL_EAY_LIBRARY_RELEASE}") - endif() - if(SSL_EAY_LIBRARY_DEBUG) - set_property(TARGET OpenSSL::SSL APPEND PROPERTY - IMPORTED_CONFIGURATIONS DEBUG) - set_target_properties(OpenSSL::SSL PROPERTIES - IMPORTED_LINK_INTERFACE_LANGUAGES_DEBUG "C" - IMPORTED_LOCATION_DEBUG "${SSL_EAY_LIBRARY_DEBUG}") - endif() - if(TARGET OpenSSL::Crypto) - set_target_properties(OpenSSL::SSL PROPERTIES - INTERFACE_LINK_LIBRARIES OpenSSL::Crypto) - endif() - endif() -endif() - -message (STATUS "Using ssl=${USE_SSL}: ${OPENSSL_INCLUDE_DIR} : ${OPENSSL_LIBRARIES}") diff --git a/cmake/find/thrift.cmake b/cmake/find/thrift.cmake deleted file mode 100644 index 08eeb60915e..00000000000 --- a/cmake/find/thrift.cmake +++ /dev/null @@ -1,34 +0,0 @@ -option(ENABLE_THRIFT "Enable Thrift" ${ENABLE_LIBRARIES}) - -if (NOT ENABLE_THRIFT) - message (STATUS "thrift disabled") - set(USE_INTERNAL_THRIFT_LIBRARY 0) - return() -endif() - -option(USE_INTERNAL_THRIFT_LIBRARY "Set to FALSE to use system thrift library instead of bundled" ON) -if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/thrift") - if (USE_INTERNAL_THRIFT_LIBRARY) - message (WARNING "submodule contrib/thrift is missing. to fix try run: \n git submodule update --init --recursive") - set(USE_INTERNAL_THRIFT_LIBRARY 0) - endif () -endif() - -if (USE_INTERNAL_THRIFT_LIBRARY) - if (MAKE_STATIC_LIBRARIES) - set(THRIFT_LIBRARY thrift_static) - else() - set(THRIFT_LIBRARY thrift) - endif() - set (THRIFT_INCLUDE_DIR "${ClickHouse_SOURCE_DIR}/contrib/thrift/lib/cpp/src") - set(USE_THRIFT 1) -else() - find_library(THRIFT_LIBRARY thrift) - if (NOT THRIFT_LIBRARY) - set(USE_THRIFT 0) - else() - set(USE_THRIFT 1) - endif() -endif () - -message (STATUS "Using thrift=${USE_THRIFT}: ${THRIFT_INCLUDE_DIR} : ${THRIFT_LIBRARY}") diff --git a/cmake/find/xz.cmake b/cmake/find/xz.cmake deleted file mode 100644 index f25937fe87d..00000000000 --- a/cmake/find/xz.cmake +++ /dev/null @@ -1,27 +0,0 @@ -option (USE_INTERNAL_XZ_LIBRARY "Set to OFF to use system xz (lzma) library instead of bundled" ON) - -if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/xz/src/liblzma/api/lzma.h") - if(USE_INTERNAL_XZ_LIBRARY) - message(WARNING "submodule contrib/xz is missing. to fix try run: \n git submodule update --init") - message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal xz (lzma) library") - set(USE_INTERNAL_XZ_LIBRARY 0) - endif() - set(MISSING_INTERNAL_XZ_LIBRARY 1) -endif() - -if (NOT USE_INTERNAL_XZ_LIBRARY) - find_library (XZ_LIBRARY lzma) - find_path (XZ_INCLUDE_DIR NAMES lzma.h PATHS ${XZ_INCLUDE_PATHS}) - if (NOT XZ_LIBRARY OR NOT XZ_INCLUDE_DIR) - message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find system xz (lzma) library") - endif () -endif () - -if (XZ_LIBRARY AND XZ_INCLUDE_DIR) -elseif (NOT MISSING_INTERNAL_XZ_LIBRARY) - set (USE_INTERNAL_XZ_LIBRARY 1) - set (XZ_LIBRARY liblzma) - set (XZ_INCLUDE_DIR ${ClickHouse_SOURCE_DIR}/contrib/xz/src/liblzma/api) -endif () - -message (STATUS "Using xz (lzma): ${XZ_INCLUDE_DIR} : ${XZ_LIBRARY}") diff --git a/cmake/find/yaml-cpp.cmake b/cmake/find/yaml-cpp.cmake deleted file mode 100644 index 2aba6808e31..00000000000 --- a/cmake/find/yaml-cpp.cmake +++ /dev/null @@ -1,9 +0,0 @@ -option(USE_YAML_CPP "Enable yaml-cpp" ${ENABLE_LIBRARIES}) - -if (NOT USE_YAML_CPP) - return() -endif() - -if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/yaml-cpp/README.md") - message (ERROR "submodule contrib/yaml-cpp is missing. to fix try run: \n git submodule update --init") -endif() diff --git a/cmake/find/zlib.cmake b/cmake/find/zlib.cmake deleted file mode 100644 index c2ee8217afa..00000000000 --- a/cmake/find/zlib.cmake +++ /dev/null @@ -1,42 +0,0 @@ -option (USE_INTERNAL_ZLIB_LIBRARY "Set to FALSE to use system zlib library instead of bundled" ON) - -if (NOT MSVC) - set (INTERNAL_ZLIB_NAME "zlib-ng" CACHE INTERNAL "") -else () - set (INTERNAL_ZLIB_NAME "zlib" CACHE INTERNAL "") - if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/${INTERNAL_ZLIB_NAME}") - message (WARNING "Will use standard zlib, please clone manually:\n git clone https://github.com/madler/zlib.git ${ClickHouse_SOURCE_DIR}/contrib/${INTERNAL_ZLIB_NAME}") - message (${RECONFIGURE_MESSAGE_LEVEL} "Can't use internal zlib library") - endif () -endif () - -if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/${INTERNAL_ZLIB_NAME}/zlib.h") - if(USE_INTERNAL_ZLIB_LIBRARY) - message(WARNING "submodule contrib/${INTERNAL_ZLIB_NAME} is missing. to fix try run: \n git submodule update --init") - message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal zlib library") - endif() - set(USE_INTERNAL_ZLIB_LIBRARY 0) - set(MISSING_INTERNAL_ZLIB_LIBRARY 1) -endif() - -if (NOT USE_INTERNAL_ZLIB_LIBRARY) - find_package (ZLIB) - if (NOT ZLIB_FOUND) - message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find system zlib library") - else() - set (ZLIB_NAME "libz") - endif() -endif () - -if (NOT ZLIB_FOUND AND NOT MISSING_INTERNAL_ZLIB_LIBRARY) - set (USE_INTERNAL_ZLIB_LIBRARY 1) - set (ZLIB_INCLUDE_DIR "${ClickHouse_SOURCE_DIR}/contrib/${INTERNAL_ZLIB_NAME}" "${ClickHouse_BINARY_DIR}/contrib/${INTERNAL_ZLIB_NAME}" CACHE INTERNAL "") # generated zconf.h - set (ZLIB_INCLUDE_DIRS ${ZLIB_INCLUDE_DIR}) # for poco - set (ZLIB_INCLUDE_DIRECTORIES ${ZLIB_INCLUDE_DIR}) # for protobuf - set (ZLIB_FOUND 1) # for poco - set (ZLIB_LIBRARIES zlib CACHE INTERNAL "") - set (ZLIB_LIBRARY_NAME ${ZLIB_LIBRARIES}) # for cassandra - set (ZLIB_NAME "${INTERNAL_ZLIB_NAME}") -endif () - -message (STATUS "Using ${ZLIB_NAME}: ${ZLIB_INCLUDE_DIR} : ${ZLIB_LIBRARIES}") diff --git a/cmake/find/zstd.cmake b/cmake/find/zstd.cmake deleted file mode 100644 index 2b8dd53fbc3..00000000000 --- a/cmake/find/zstd.cmake +++ /dev/null @@ -1,27 +0,0 @@ -option (USE_INTERNAL_ZSTD_LIBRARY "Set to FALSE to use system zstd library instead of bundled" ON) - -if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/zstd/lib/zstd.h") - if(USE_INTERNAL_ZSTD_LIBRARY) - message(WARNING "submodule contrib/zstd is missing. to fix try run: \n git submodule update --init") - message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal zstd library") - set(USE_INTERNAL_ZSTD_LIBRARY 0) - endif() - set(MISSING_INTERNAL_ZSTD_LIBRARY 1) -endif() - -if (NOT USE_INTERNAL_ZSTD_LIBRARY) - find_library (ZSTD_LIBRARY zstd) - find_path (ZSTD_INCLUDE_DIR NAMES zstd.h PATHS ${ZSTD_INCLUDE_PATHS}) - if (NOT ZSTD_LIBRARY OR NOT ZSTD_INCLUDE_DIR) - message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find system zstd library") - endif () -endif () - -if (ZSTD_LIBRARY AND ZSTD_INCLUDE_DIR) -elseif (NOT MISSING_INTERNAL_ZSTD_LIBRARY) - set (USE_INTERNAL_ZSTD_LIBRARY 1) - set (ZSTD_LIBRARY zstd) - set (ZSTD_INCLUDE_DIR ${ClickHouse_SOURCE_DIR}/contrib/zstd/lib) -endif () - -message (STATUS "Using zstd: ${ZSTD_INCLUDE_DIR} : ${ZSTD_LIBRARY}") diff --git a/cmake/print_include_directories.cmake b/cmake/print_include_directories.cmake deleted file mode 100644 index cc2098cb397..00000000000 --- a/cmake/print_include_directories.cmake +++ /dev/null @@ -1,29 +0,0 @@ - -# TODO? Maybe recursive collect on all deps - -get_property (dirs1 TARGET dbms PROPERTY INCLUDE_DIRECTORIES) -list(APPEND dirs ${dirs1}) - -get_property (dirs1 TARGET clickhouse_common_io PROPERTY INCLUDE_DIRECTORIES) -list(APPEND dirs ${dirs1}) - -get_property (dirs1 TARGET common PROPERTY INCLUDE_DIRECTORIES) -list(APPEND dirs ${dirs1}) - -get_property (dirs1 TARGET cityhash PROPERTY INCLUDE_DIRECTORIES) -list(APPEND dirs ${dirs1}) - -get_property (dirs1 TARGET roaring PROPERTY INCLUDE_DIRECTORIES) -list(APPEND dirs ${dirs1}) - -if (TARGET double-conversion) - get_property (dirs1 TARGET double-conversion PROPERTY INCLUDE_DIRECTORIES) - list(APPEND dirs ${dirs1}) -endif () - -list(REMOVE_DUPLICATES dirs) -file (WRITE ${CMAKE_CURRENT_BINARY_DIR}/include_directories.txt "") -foreach (dir ${dirs}) - string (REPLACE "${ClickHouse_SOURCE_DIR}" "." dir "${dir}") - file (APPEND ${CMAKE_CURRENT_BINARY_DIR}/include_directories.txt "-I ${dir} ") -endforeach () diff --git a/cmake/sanitize.cmake b/cmake/sanitize.cmake index f052948e731..73610545009 100644 --- a/cmake/sanitize.cmake +++ b/cmake/sanitize.cmake @@ -23,7 +23,7 @@ if (SANITIZE) if (CMAKE_CXX_COMPILER_ID STREQUAL "GNU") set (CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} ${ASAN_FLAGS}") endif() - if (MAKE_STATIC_LIBRARIES AND CMAKE_CXX_COMPILER_ID STREQUAL "GNU") + if (USE_STATIC_LIBRARIES AND CMAKE_CXX_COMPILER_ID STREQUAL "GNU") set (CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -static-libasan") endif () if (COMPILER_GCC) @@ -48,7 +48,7 @@ if (SANITIZE) if (CMAKE_CXX_COMPILER_ID STREQUAL "GNU") set (CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -fsanitize=memory") endif() - if (MAKE_STATIC_LIBRARIES AND CMAKE_CXX_COMPILER_ID STREQUAL "GNU") + if (USE_STATIC_LIBRARIES AND CMAKE_CXX_COMPILER_ID STREQUAL "GNU") set (CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -static-libmsan") endif () @@ -69,7 +69,7 @@ if (SANITIZE) if (CMAKE_CXX_COMPILER_ID STREQUAL "GNU") set (CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -fsanitize=thread") endif() - if (MAKE_STATIC_LIBRARIES AND CMAKE_CXX_COMPILER_ID STREQUAL "GNU") + if (USE_STATIC_LIBRARIES AND CMAKE_CXX_COMPILER_ID STREQUAL "GNU") set (CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -static-libtsan") endif () if (COMPILER_GCC) @@ -101,7 +101,7 @@ if (SANITIZE) if (CMAKE_CXX_COMPILER_ID STREQUAL "GNU") set (CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -fsanitize=undefined") endif() - if (MAKE_STATIC_LIBRARIES AND CMAKE_CXX_COMPILER_ID STREQUAL "GNU") + if (USE_STATIC_LIBRARIES AND CMAKE_CXX_COMPILER_ID STREQUAL "GNU") set (CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -static-libubsan") endif () if (COMPILER_GCC) diff --git a/cmake/target.cmake b/cmake/target.cmake index 4b109d165e7..ff216f86618 100644 --- a/cmake/target.cmake +++ b/cmake/target.cmake @@ -27,10 +27,10 @@ if (CMAKE_CROSSCOMPILING) if (ARCH_AARCH64) # FIXME: broken dependencies set (ENABLE_GRPC OFF CACHE INTERNAL "") - set (USE_SENTRY OFF CACHE INTERNAL "") + set (ENABLE_SENTRY OFF CACHE INTERNAL "") elseif (ARCH_PPC64LE) set (ENABLE_GRPC OFF CACHE INTERNAL "") - set (USE_SENTRY OFF CACHE INTERNAL "") + set (ENABLE_SENTRY OFF CACHE INTERNAL "") endif () elseif (OS_FREEBSD) # FIXME: broken dependencies @@ -43,7 +43,7 @@ if (CMAKE_CROSSCOMPILING) endif () if (USE_MUSL) - set (USE_SENTRY OFF CACHE INTERNAL "") + set (ENABLE_SENTRY OFF CACHE INTERNAL "") set (ENABLE_ODBC OFF CACHE INTERNAL "") set (ENABLE_GRPC OFF CACHE INTERNAL "") set (ENABLE_HDFS OFF CACHE INTERNAL "") diff --git a/cmake/warnings.cmake b/cmake/warnings.cmake index ecc31529dc4..2093d3dcc87 100644 --- a/cmake/warnings.cmake +++ b/cmake/warnings.cmake @@ -55,11 +55,6 @@ if (COMPILER_CLANG) no_warning(weak-template-vtables) no_warning(weak-vtables) - # XXX: libstdc++ has some of these for 3way compare - if (NOT USE_LIBCXX) - no_warning(zero-as-null-pointer-constant) - endif() - # TODO Enable conversion, sign-conversion, double-promotion warnings. else () add_warning(comma) @@ -98,10 +93,7 @@ if (COMPILER_CLANG) add_warning(tautological-bitwise-compare) # XXX: libstdc++ has some of these for 3way compare - if (USE_LIBCXX) - add_warning(zero-as-null-pointer-constant) - endif() - + add_warning(zero-as-null-pointer-constant) endif () elseif (COMPILER_GCC) # Add compiler options only to c++ compiler @@ -183,11 +175,8 @@ elseif (COMPILER_GCC) add_cxx_compile_options(-Wundef) # Warn if vector operation is not implemented via SIMD capabilities of the architecture add_cxx_compile_options(-Wvector-operation-performance) - # XXX: libstdc++ has some of these for 3way compare - if (USE_LIBCXX) - # Warn when a literal 0 is used as null pointer constant. - add_cxx_compile_options(-Wzero-as-null-pointer-constant) - endif() + # Warn when a literal 0 is used as null pointer constant. + add_cxx_compile_options(-Wzero-as-null-pointer-constant) if (CMAKE_CXX_COMPILER_VERSION VERSION_GREATER_EQUAL 10) # XXX: gcc10 stuck with this option while compiling GatherUtils code diff --git a/contrib/CMakeLists.txt b/contrib/CMakeLists.txt index 0e7fecd5748..6172f231b6e 100644 --- a/contrib/CMakeLists.txt +++ b/contrib/CMakeLists.txt @@ -1,4 +1,4 @@ -# Third-party libraries may have substandard code. +#"${folder}/CMakeLists.txt" Third-party libraries may have substandard code. set (CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -w") set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -w") @@ -20,292 +20,132 @@ if (SANITIZE STREQUAL "undefined") endif() set_property(DIRECTORY PROPERTY EXCLUDE_FROM_ALL 1) -add_subdirectory (abseil-cpp-cmake) -add_subdirectory (magic-enum-cmake) -add_subdirectory (boost-cmake) -add_subdirectory (cctz-cmake) -add_subdirectory (consistent-hashing) -add_subdirectory (dragonbox-cmake) -add_subdirectory (hyperscan-cmake) -add_subdirectory (jemalloc-cmake) -add_subdirectory (libcpuid-cmake) -add_subdirectory (libdivide) -add_subdirectory (libmetrohash) -add_subdirectory (lz4-cmake) -add_subdirectory (murmurhash) -add_subdirectory (replxx-cmake) -add_subdirectory (unixodbc-cmake) -add_subdirectory (nanodbc-cmake) -if (USE_INTERNAL_CAPNP_LIBRARY AND NOT MISSING_INTERNAL_CAPNP_LIBRARY) - add_subdirectory(capnproto-cmake) -endif () +# add_contrib cmake_folder[ base_folder1[, ...base_folderN]] +function(add_contrib cmake_folder) + if (ARGN) + set(base_folders ${ARGN}) + else() + set(base_folders ${cmake_folder}) + endif() + + foreach (base_folder ${base_folders}) + # some typos in the code + if (NOT IS_DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}/${base_folder}") + message(FATAL_ERROR "No such base folder '${base_folder}' (for '${cmake_folder}' cmake folder). Typo in the base folder name?") + endif() + + file(GLOB contrib_files "${base_folder}/*") + if (NOT contrib_files) + message(STATUS "submodule ${base_folder} is missing or empty. to fix try run:") + message(STATUS " git submodule update --init") + return() + endif() + endforeach() + + message(STATUS "Adding contrib module ${base_folders} (configuring with ${cmake_folder})") + add_subdirectory (${cmake_folder}) +endfunction() + +add_contrib (miniselect-cmake miniselect) +add_contrib (pdqsort-cmake pdqsort) +add_contrib (sparsehash-c11-cmake sparsehash-c11) +add_contrib (abseil-cpp-cmake abseil-cpp) +add_contrib (magic-enum-cmake magic_enum) +add_contrib (boost-cmake boost) +add_contrib (cctz-cmake cctz) +add_contrib (consistent-hashing) +add_contrib (dragonbox-cmake dragonbox) +add_contrib (hyperscan-cmake hyperscan) +add_contrib (jemalloc-cmake jemalloc) +add_contrib (libcpuid-cmake libcpuid) +add_contrib (libdivide) +add_contrib (libmetrohash) +add_contrib (lz4-cmake lz4) +add_contrib (murmurhash) +add_contrib (replxx-cmake replxx) +add_contrib (unixodbc-cmake unixodbc) +add_contrib (nanodbc-cmake nanodbc) +add_contrib (capnproto-cmake capnproto) +add_contrib (yaml-cpp-cmake yaml-cpp) +add_contrib (re2-cmake re2) +add_contrib (xz-cmake xz) +add_contrib (brotli-cmake brotli) +add_contrib (double-conversion-cmake double-conversion) +add_contrib (boringssl-cmake boringssl) +add_contrib (poco-cmake poco) +add_contrib (croaring-cmake croaring) +add_contrib (zstd-cmake zstd) +add_contrib (zlib-ng-cmake zlib-ng) +add_contrib (bzip2-cmake bzip2) +add_contrib (snappy-cmake snappy) +add_contrib (rocksdb-cmake rocksdb) +add_contrib (thrift-cmake thrift) +# parquet/arrow/orc +add_contrib (arrow-cmake arrow) # requires: snappy, thrift, double-conversion +add_contrib (avro-cmake avro) # requires: snappy +add_contrib (protobuf-cmake protobuf) +add_contrib (openldap-cmake openldap) +add_contrib (grpc-cmake grpc) +add_contrib (msgpack-c-cmake msgpack-c) if (ENABLE_FUZZING) - add_subdirectory (libprotobuf-mutator-cmake) + add_contrib (libprotobuf-mutator-cmake libprotobuf-mutator) endif() -if (USE_YAML_CPP) - add_subdirectory (yaml-cpp-cmake) +add_contrib (cityhash102) +add_contrib (libfarmhash) +add_contrib (icu-cmake icu) +add_contrib (h3-cmake h3) +add_contrib (mariadb-connector-c-cmake mariadb-connector-c) + +if (ENABLE_TESTS) + add_contrib (googletest-cmake googletest) endif() -if (USE_INTERNAL_XZ_LIBRARY) - add_subdirectory (xz-cmake) +add_contrib (llvm-cmake llvm) +add_contrib (libxml2-cmake libxml2) +add_contrib (aws-s3-cmake + aws + aws-c-common + aws-c-event-stream + aws-checksums +) +add_contrib (base64-cmake base64) +add_contrib (simdjson-cmake simdjson) +add_contrib (rapidjson-cmake rapidjson) +add_contrib (fastops-cmake fastops) +add_contrib (libuv-cmake libuv) +add_contrib (amqpcpp-cmake AMQP-CPP) # requires: libuv +add_contrib (cassandra-cmake cassandra) # requires: libuv +add_contrib (curl-cmake curl) +add_contrib (azure-cmake azure) +add_contrib (sentry-native-cmake sentry-native) # requires: curl +add_contrib (fmtlib-cmake fmtlib) +add_contrib (krb5-cmake krb5) +add_contrib (cyrus-sasl-cmake cyrus-sasl) # for krb5 +add_contrib (libgsasl-cmake libgsasl) # requires krb5 +add_contrib (librdkafka-cmake librdkafka) # requires: libgsasl +add_contrib (libhdfs3-cmake libhdfs3) # requires: protobuf, krb5 +add_contrib (hive-metastore-cmake hive-metastore) # requires: thrift/avro/arrow/libhdfs3 +add_contrib (cppkafka-cmake cppkafka) +add_contrib (libpqxx-cmake libpqxx) +add_contrib (libpq-cmake libpq) +add_contrib (nuraft-cmake NuRaft) +add_contrib (fast_float-cmake fast_float) +add_contrib (datasketches-cpp-cmake datasketches-cpp) + +option(ENABLE_NLP "Enable NLP functions support" ${ENABLE_LIBRARIES}) +if (ENABLE_NLP) + add_contrib (libstemmer-c-cmake libstemmer_c) + add_contrib (wordnet-blast-cmake wordnet-blast) + add_contrib (lemmagen-c-cmake lemmagen-c) + add_contrib (nlp-data-cmake nlp-data) + add_contrib (cld2-cmake cld2) endif() -add_subdirectory (poco-cmake) -add_subdirectory (croaring-cmake) - -# TODO: refactor the contrib libraries below this comment. - -if (USE_INTERNAL_ZSTD_LIBRARY) - add_subdirectory (zstd-cmake) -endif () - -if (USE_INTERNAL_RE2_LIBRARY) - add_subdirectory (re2-cmake) -endif () - -if (USE_INTERNAL_DOUBLE_CONVERSION_LIBRARY) - add_subdirectory (double-conversion-cmake) -endif () - -if (USE_INTERNAL_CITYHASH_LIBRARY) - add_subdirectory (cityhash102) -endif () - -if (USE_INTERNAL_FARMHASH_LIBRARY) - add_subdirectory (libfarmhash) -endif () - -if (USE_INTERNAL_ZLIB_LIBRARY) - if (INTERNAL_ZLIB_NAME STREQUAL "zlib-ng") - add_subdirectory (zlib-ng-cmake) - else () - add_subdirectory (${INTERNAL_ZLIB_NAME}) - endif () -endif () - -if (USE_INTERNAL_H3_LIBRARY) - add_subdirectory(h3-cmake) -endif () - -if (USE_INTERNAL_SSL_LIBRARY) - add_subdirectory (boringssl-cmake) - - add_library(OpenSSL::Crypto ALIAS crypto) - add_library(OpenSSL::SSL ALIAS ssl) -endif () - -if (USE_INTERNAL_LDAP_LIBRARY) - add_subdirectory (openldap-cmake) -endif () - -if (USE_INTERNAL_MYSQL_LIBRARY) - add_subdirectory (mariadb-connector-c-cmake) -endif () - -if (USE_INTERNAL_RDKAFKA_LIBRARY) - add_subdirectory (librdkafka-cmake) - target_include_directories(rdkafka BEFORE PRIVATE ${ZLIB_INCLUDE_DIR}) - if(OPENSSL_INCLUDE_DIR) - target_include_directories(rdkafka BEFORE PRIVATE ${OPENSSL_INCLUDE_DIR}) - endif() -endif () - -if (USE_RDKAFKA) - add_subdirectory (cppkafka-cmake) -endif() - -if (ENABLE_ICU AND USE_INTERNAL_ICU_LIBRARY) - add_subdirectory (icu-cmake) -endif () - -if(USE_INTERNAL_SNAPPY_LIBRARY) - set(SNAPPY_BUILD_TESTS 0 CACHE INTERNAL "") - - add_subdirectory(snappy-cmake) - - set (SNAPPY_INCLUDE_DIR "${ClickHouse_SOURCE_DIR}/contrib/snappy") -endif() - -if (USE_INTERNAL_PARQUET_LIBRARY) - # We dont use arrow's cmakefiles because they uses too many depends and download some libs in compile time - # But you can update auto-generated parquet files manually: - # cd {BUILD_DIR}/contrib/arrow/cpp/src/parquet && mkdir -p build && cd build - # cmake .. -DARROW_COMPUTE=ON -DARROW_PARQUET=ON -DARROW_SIMD_LEVEL=NONE -DARROW_VERBOSE_THIRDPARTY_BUILD=ON - # -DARROW_BUILD_SHARED=1 -DARROW_BUILD_UTILITIES=OFF -DARROW_BUILD_INTEGRATION=OFF - # -DBoost_FOUND=1 -DARROW_TEST_LINKAGE="shared" - # make -j8 - # copy {BUILD_DIR}/contrib/arrow/cpp/src/parquet/*.cpp,*.h -> {BUILD_DIR}/contrib/arrow-cmake/cpp/src/parquet/ - - # Also useful parquet reader: - # cd {BUILD_DIR}/contrib/arrow/cpp && mkdir -p build && cd build - # cmake .. -DARROW_PARQUET=1 -DARROW_WITH_SNAPPY=1 -DPARQUET_BUILD_EXECUTABLES=1 - # make -j8 - # {BUILD_DIR}/contrib/arrow/cpp/build/release/parquet-reader some_file.parquet - - add_subdirectory(arrow-cmake) - - # The library is large - avoid bloat. - target_compile_options (${ARROW_LIBRARY} PRIVATE -g0) - target_compile_options (${PARQUET_LIBRARY} PRIVATE -g0) -endif() - -if (USE_INTERNAL_AVRO_LIBRARY) - add_subdirectory(avro-cmake) -endif() - -if(USE_INTERNAL_GTEST_LIBRARY) - add_subdirectory(googletest-cmake) -elseif(GTEST_SRC_DIR) - add_subdirectory(${GTEST_SRC_DIR}/googletest ${CMAKE_CURRENT_BINARY_DIR}/googletest) - target_compile_definitions(gtest INTERFACE GTEST_HAS_POSIX_RE=0) -endif() - -function(add_llvm) - # ld: unknown option: --color-diagnostics - if (APPLE) - set (LINKER_SUPPORTS_COLOR_DIAGNOSTICS 0 CACHE INTERNAL "") - endif () - - # Do not adjust RPATH in llvm, since then it will not be able to find libcxx/libcxxabi/libunwind - set (CMAKE_INSTALL_RPATH "ON") - set (LLVM_COMPILER_CHECKED 1 CACHE INTERNAL "") - set (LLVM_ENABLE_EH 1 CACHE INTERNAL "") - set (LLVM_ENABLE_RTTI 1 CACHE INTERNAL "") - set (LLVM_ENABLE_PIC 0 CACHE INTERNAL "") - set (LLVM_TARGETS_TO_BUILD "X86;AArch64" CACHE STRING "") - - # Need to use C++17 since the compilation is not possible with C++20 currently, due to ambiguous operator != etc. - # LLVM project will set its default value for the -std=... but our global setting from CMake will override it. - set (CMAKE_CXX_STANDARD 17) - - add_subdirectory (llvm/llvm) -endfunction() -if (USE_EMBEDDED_COMPILER) - add_llvm() -endif () - -if (USE_INTERNAL_LIBGSASL_LIBRARY) - add_subdirectory(libgsasl-cmake) -endif() - -if (USE_INTERNAL_LIBXML2_LIBRARY) - add_subdirectory(libxml2-cmake) -endif () - -if (USE_INTERNAL_BROTLI_LIBRARY) - add_subdirectory(brotli-cmake) - target_compile_definitions(brotli PRIVATE BROTLI_BUILD_PORTABLE=1) -endif () - -if (USE_INTERNAL_PROTOBUF_LIBRARY) - add_subdirectory(protobuf-cmake) -endif () - -if (USE_INTERNAL_THRIFT_LIBRARY) - add_subdirectory(thrift-cmake) -endif () - -if (USE_INTERNAL_HDFS3_LIBRARY) - add_subdirectory(libhdfs3-cmake) -endif () - -if (USE_INTERNAL_GRPC_LIBRARY) - add_subdirectory(grpc-cmake) -endif () - -if (USE_INTERNAL_AWS_S3_LIBRARY) - add_subdirectory(aws-s3-cmake) - - # The library is large - avoid bloat. - target_compile_options (aws_s3 PRIVATE -g0) - target_compile_options (aws_s3_checksums PRIVATE -g0) - -endif () - -if (USE_BASE64) - add_subdirectory (base64-cmake) -endif() - -if (USE_SIMDJSON) - add_subdirectory (simdjson-cmake) -endif() - -if (USE_FASTOPS) - add_subdirectory (fastops-cmake) -endif() - -if (USE_AMQPCPP OR USE_CASSANDRA) - add_subdirectory (libuv-cmake) -endif() -if (USE_AMQPCPP) - add_subdirectory (amqpcpp-cmake) -endif() -if (USE_CASSANDRA) - add_subdirectory (cassandra-cmake) -endif() - -# Should go before: -# - sentry-native -add_subdirectory (curl-cmake) - -if (USE_INTERNAL_AZURE_BLOB_STORAGE_LIBRARY) - add_subdirectory(azure-cmake) -endif() - -if (USE_SENTRY) - add_subdirectory (sentry-native-cmake) -endif() - -add_subdirectory (fmtlib-cmake) - -if (USE_KRB5) - add_subdirectory (krb5-cmake) - if (USE_CYRUS_SASL) - add_subdirectory (cyrus-sasl-cmake) - endif() -endif() - -if (USE_INTERNAL_ROCKSDB_LIBRARY) - add_subdirectory(rocksdb-cmake) -endif() - -if (USE_LIBPQXX) - add_subdirectory (libpq-cmake) - add_subdirectory (libpqxx-cmake) -endif() - -if (USE_NURAFT) - add_subdirectory(nuraft-cmake) -endif() - -add_subdirectory(fast_float-cmake) - -if (USE_NLP) - add_subdirectory(libstemmer-c-cmake) - add_subdirectory(wordnet-blast-cmake) - add_subdirectory(lemmagen-c-cmake) - add_subdirectory(nlp-data-cmake) - add_subdirectory(cld2-cmake) -endif() - -if (USE_BZIP2) - add_subdirectory(bzip2-cmake) -endif() - -if (USE_SQLITE) - add_subdirectory(sqlite-cmake) -endif() - -if (USE_S2_GEOMETRY) - add_subdirectory(s2geometry-cmake) -endif() - -if (USE_HIVE) - add_subdirectory (hive-metastore-cmake) -endif() +add_contrib (sqlite-cmake sqlite-amalgamation) +add_contrib (s2geometry-cmake s2geometry) # Put all targets defined here and in subdirectories under "contrib/" folders in GUI-based IDEs. # Some of third-party projects may override CMAKE_FOLDER or FOLDER property of their targets, so they would not appear diff --git a/contrib/NuRaft b/contrib/NuRaft index c2043aa250e..1707a7572aa 160000 --- a/contrib/NuRaft +++ b/contrib/NuRaft @@ -1 +1 @@ -Subproject commit c2043aa250e53ad5cf75e596e319d587af4dcb3c +Subproject commit 1707a7572aa66ec5d0a2dbe2bf5effa3352e6b2d diff --git a/contrib/abseil-cpp-cmake/CMakeLists.txt b/contrib/abseil-cpp-cmake/CMakeLists.txt index 65e4c24ff5a..4fb02327d17 100644 --- a/contrib/abseil-cpp-cmake/CMakeLists.txt +++ b/contrib/abseil-cpp-cmake/CMakeLists.txt @@ -6,15 +6,17 @@ set(BUILD_TESTING OFF) set(ABSL_PROPAGATE_CXX_STD ON) add_subdirectory("${ABSL_ROOT_DIR}" "${ClickHouse_BINARY_DIR}/contrib/abseil-cpp") -add_library(abseil_swiss_tables INTERFACE) +add_library(_abseil_swiss_tables INTERFACE) -target_link_libraries(abseil_swiss_tables INTERFACE +target_link_libraries(_abseil_swiss_tables INTERFACE absl::flat_hash_map absl::flat_hash_set ) get_target_property(FLAT_HASH_MAP_INCLUDE_DIR absl::flat_hash_map INTERFACE_INCLUDE_DIRECTORIES) -target_include_directories (abseil_swiss_tables SYSTEM BEFORE INTERFACE ${FLAT_HASH_MAP_INCLUDE_DIR}) +target_include_directories (_abseil_swiss_tables SYSTEM BEFORE INTERFACE ${FLAT_HASH_MAP_INCLUDE_DIR}) get_target_property(FLAT_HASH_SET_INCLUDE_DIR absl::flat_hash_set INTERFACE_INCLUDE_DIRECTORIES) -target_include_directories (abseil_swiss_tables SYSTEM BEFORE INTERFACE ${FLAT_HASH_SET_INCLUDE_DIR}) +target_include_directories (_abseil_swiss_tables SYSTEM BEFORE INTERFACE ${FLAT_HASH_SET_INCLUDE_DIR}) + +add_library(ch_contrib::abseil_swiss_tables ALIAS _abseil_swiss_tables) diff --git a/contrib/amqpcpp-cmake/CMakeLists.txt b/contrib/amqpcpp-cmake/CMakeLists.txt index faef7bd4a1c..974d097e06f 100644 --- a/contrib/amqpcpp-cmake/CMakeLists.txt +++ b/contrib/amqpcpp-cmake/CMakeLists.txt @@ -1,3 +1,10 @@ +option(ENABLE_AMQPCPP "Enable AMQP-CPP" ${ENABLE_LIBRARIES}) + +if (NOT ENABLE_AMQPCPP) + message(STATUS "Not using AMQP-CPP") + return() +endif() + set (LIBRARY_DIR "${ClickHouse_SOURCE_DIR}/contrib/AMQP-CPP") set (SRCS @@ -23,9 +30,9 @@ set (SRCS "${LIBRARY_DIR}/src/watchable.cpp" ) -add_library(amqp-cpp ${SRCS}) +add_library(_amqp-cpp ${SRCS}) -target_compile_options (amqp-cpp +target_compile_options (_amqp-cpp PRIVATE -Wno-old-style-cast -Wno-inconsistent-missing-destructor-override @@ -40,5 +47,6 @@ target_compile_options (amqp-cpp -w ) -target_include_directories (amqp-cpp SYSTEM PUBLIC "${LIBRARY_DIR}/include") -target_link_libraries(amqp-cpp PUBLIC ${OPENSSL_SSL_LIBRARY} ${OPENSSL_CRYPTO_LIBRARY}) +target_include_directories (_amqp-cpp SYSTEM BEFORE PUBLIC "${LIBRARY_DIR}/include" "${LIBRARY_DIR}") +target_link_libraries (_amqp-cpp PUBLIC OpenSSL::Crypto OpenSSL::SSL ch_contrib::uv) +add_library (ch_contrib::amqp_cpp ALIAS _amqp-cpp) diff --git a/contrib/arrow-cmake/CMakeLists.txt b/contrib/arrow-cmake/CMakeLists.txt index a28a83a87c5..54bfead6da7 100644 --- a/contrib/arrow-cmake/CMakeLists.txt +++ b/contrib/arrow-cmake/CMakeLists.txt @@ -1,3 +1,40 @@ +# We dont use arrow's cmakefiles because they uses too many depends and download some libs in compile time +# But you can update auto-generated parquet files manually: +# cd {BUILD_DIR}/contrib/arrow/cpp/src/parquet && mkdir -p build && cd build +# cmake .. -DARROW_COMPUTE=ON -DARROW_PARQUET=ON -DARROW_SIMD_LEVEL=NONE -DARROW_VERBOSE_THIRDPARTY_BUILD=ON +# -DARROW_BUILD_SHARED=1 -DARROW_BUILD_UTILITIES=OFF -DARROW_BUILD_INTEGRATION=OFF +# -DBoost_FOUND=1 -DARROW_TEST_LINKAGE="shared" +# make -j8 +# copy {BUILD_DIR}/contrib/arrow/cpp/src/parquet/*.cpp,*.h -> {BUILD_DIR}/contrib/arrow-cmake/cpp/src/parquet/ + +# Also useful parquet reader: +# cd {BUILD_DIR}/contrib/arrow/cpp && mkdir -p build && cd build +# cmake .. -DARROW_PARQUET=1 -DARROW_WITH_SNAPPY=1 -DPARQUET_BUILD_EXECUTABLES=1 +# make -j8 +# {BUILD_DIR}/contrib/arrow/cpp/build/release/parquet-reader some_file.parquet + +set (ENABLE_PARQUET_DEFAULT ${ENABLE_LIBRARIES}) +if (OS_FREEBSD) + set (ENABLE_PARQUET_DEFAULT OFF) +endif() +option (ENABLE_PARQUET "Enable parquet" ${ENABLE_PARQUET_DEFAULT}) + +if (NOT ENABLE_PARQUET) + message(STATUS "Building without Parquet support") + return() +endif() + +# Freebsd: ../contrib/arrow/cpp/src/arrow/util/bit-util.h:27:10: fatal error: endian.h: No such file or directory +if (OS_FREEBSD) + message (FATAL_ERROR "Using internal parquet library on FreeBSD is not supported") +endif() + +if(USE_STATIC_LIBRARIES) + set(FLATBUFFERS_LIBRARY flatbuffers) +else() + set(FLATBUFFERS_LIBRARY flatbuffers_shared) +endif() + set (CMAKE_CXX_STANDARD 17) set(ARROW_VERSION "6.0.1") @@ -27,11 +64,10 @@ set(ORC_SOURCE_SRC_DIR "${ORC_SOURCE_DIR}/src") set(ORC_BUILD_SRC_DIR "${CMAKE_CURRENT_BINARY_DIR}/../orc/c++/src") set(ORC_BUILD_INCLUDE_DIR "${CMAKE_CURRENT_BINARY_DIR}/../orc/c++/include") -set(GOOGLE_PROTOBUF_DIR "${Protobuf_INCLUDE_DIR}/") set(ORC_ADDITION_SOURCE_DIR ${CMAKE_CURRENT_BINARY_DIR}) set(ARROW_SRC_DIR "${ClickHouse_SOURCE_DIR}/contrib/arrow/cpp/src") -set(PROTOBUF_EXECUTABLE ${Protobuf_PROTOC_EXECUTABLE}) +set(PROTOBUF_EXECUTABLE $) set(PROTO_DIR "${ORC_SOURCE_DIR}/../proto") @@ -48,7 +84,7 @@ set(FLATBUFFERS_BINARY_DIR "${ClickHouse_BINARY_DIR}/contrib/flatbuffers") set(FLATBUFFERS_INCLUDE_DIR "${FLATBUFFERS_SRC_DIR}/include") # set flatbuffers CMake options -if (MAKE_STATIC_LIBRARIES) +if (USE_STATIC_LIBRARIES) set(FLATBUFFERS_BUILD_FLATLIB ON CACHE BOOL "Enable the build of the flatbuffers library") set(FLATBUFFERS_BUILD_SHAREDLIB OFF CACHE BOOL "Disable the build of the flatbuffers shared library") else () @@ -303,30 +339,14 @@ set(ARROW_SRCS ${ORC_SRCS} ) -if (SNAPPY_INCLUDE_DIR AND SNAPPY_LIBRARY) - set(ARROW_WITH_SNAPPY 1) -endif () - -if (ZLIB_INCLUDE_DIR AND ZLIB_LIBRARIES) - set(ARROW_WITH_ZLIB 1) -endif () - -if (ZSTD_INCLUDE_DIR AND ZSTD_LIBRARY) - set(ARROW_WITH_ZSTD 1) -endif () - add_definitions(-DARROW_WITH_LZ4) SET(ARROW_SRCS "${LIBRARY_DIR}/util/compression_lz4.cc" ${ARROW_SRCS}) -if (ARROW_WITH_SNAPPY) - add_definitions(-DARROW_WITH_SNAPPY) - SET(ARROW_SRCS "${LIBRARY_DIR}/util/compression_snappy.cc" ${ARROW_SRCS}) -endif () +add_definitions(-DARROW_WITH_SNAPPY) +SET(ARROW_SRCS "${LIBRARY_DIR}/util/compression_snappy.cc" ${ARROW_SRCS}) -if (ARROW_WITH_ZLIB) - add_definitions(-DARROW_WITH_ZLIB) - SET(ARROW_SRCS "${LIBRARY_DIR}/util/compression_zlib.cc" ${ARROW_SRCS}) -endif () +add_definitions(-DARROW_WITH_ZLIB) +SET(ARROW_SRCS "${LIBRARY_DIR}/util/compression_zlib.cc" ${ARROW_SRCS}) if (ARROW_WITH_ZSTD) add_definitions(-DARROW_WITH_ZSTD) @@ -334,41 +354,33 @@ if (ARROW_WITH_ZSTD) endif () -add_library(${ARROW_LIBRARY} ${ARROW_SRCS}) +add_library(_arrow ${ARROW_SRCS}) # Arrow dependencies -add_dependencies(${ARROW_LIBRARY} ${FLATBUFFERS_LIBRARY}) +add_dependencies(_arrow ${FLATBUFFERS_LIBRARY}) -target_link_libraries(${ARROW_LIBRARY} PRIVATE ${FLATBUFFERS_LIBRARY} boost::filesystem) +target_link_libraries(_arrow PRIVATE ${FLATBUFFERS_LIBRARY} boost::filesystem) -if (USE_INTERNAL_PROTOBUF_LIBRARY) - add_dependencies(${ARROW_LIBRARY} protoc) -endif () +add_dependencies(_arrow protoc) -target_include_directories(${ARROW_LIBRARY} SYSTEM PUBLIC ${ARROW_SRC_DIR}) -target_include_directories(${ARROW_LIBRARY} SYSTEM PUBLIC "${CMAKE_CURRENT_BINARY_DIR}/cpp/src") -target_link_libraries(${ARROW_LIBRARY} PRIVATE ${DOUBLE_CONVERSION_LIBRARIES} ${Protobuf_LIBRARY}) -target_link_libraries(${ARROW_LIBRARY} PRIVATE lz4) -if (ARROW_WITH_SNAPPY) - target_link_libraries(${ARROW_LIBRARY} PRIVATE ${SNAPPY_LIBRARY}) -endif () -if (ARROW_WITH_ZLIB) - target_link_libraries(${ARROW_LIBRARY} PRIVATE ${ZLIB_LIBRARIES}) -endif () -if (ARROW_WITH_ZSTD) - target_link_libraries(${ARROW_LIBRARY} PRIVATE ${ZSTD_LIBRARY}) - target_include_directories(${ARROW_LIBRARY} SYSTEM BEFORE PRIVATE ${ZLIB_INCLUDE_DIR}) -endif () +target_include_directories(_arrow SYSTEM BEFORE PUBLIC ${ARROW_SRC_DIR}) +target_include_directories(_arrow SYSTEM BEFORE PUBLIC "${CMAKE_CURRENT_BINARY_DIR}/cpp/src") +target_link_libraries(_arrow PRIVATE ch_contrib::double_conversion) +target_link_libraries(_arrow PRIVATE ch_contrib::protobuf) +target_link_libraries(_arrow PRIVATE ch_contrib::lz4) +target_link_libraries(_arrow PRIVATE ch_contrib::snappy) +target_link_libraries(_arrow PRIVATE ch_contrib::zlib) +target_link_libraries(_arrow PRIVATE ch_contrib::zstd) -target_include_directories(${ARROW_LIBRARY} SYSTEM PRIVATE ${ORC_INCLUDE_DIR}) -target_include_directories(${ARROW_LIBRARY} SYSTEM PRIVATE ${ORC_SOURCE_SRC_DIR}) -target_include_directories(${ARROW_LIBRARY} SYSTEM PRIVATE ${ORC_SOURCE_WRAP_DIR}) -target_include_directories(${ARROW_LIBRARY} SYSTEM PRIVATE ${GOOGLE_PROTOBUF_DIR}) -target_include_directories(${ARROW_LIBRARY} SYSTEM PRIVATE ${ORC_BUILD_SRC_DIR}) -target_include_directories(${ARROW_LIBRARY} SYSTEM PRIVATE ${ORC_BUILD_INCLUDE_DIR}) -target_include_directories(${ARROW_LIBRARY} SYSTEM PRIVATE ${ORC_ADDITION_SOURCE_DIR}) -target_include_directories(${ARROW_LIBRARY} SYSTEM PRIVATE ${FLATBUFFERS_INCLUDE_DIR}) -target_include_directories(${ARROW_LIBRARY} SYSTEM PRIVATE ${HDFS_INCLUDE_DIR}) +target_include_directories(_arrow SYSTEM BEFORE PUBLIC ${ORC_INCLUDE_DIR}) +target_include_directories(_arrow SYSTEM BEFORE PUBLIC ${ORC_BUILD_INCLUDE_DIR}) +target_include_directories(_arrow SYSTEM PRIVATE ${ORC_SOURCE_SRC_DIR}) +target_include_directories(_arrow SYSTEM PRIVATE ${ORC_SOURCE_WRAP_DIR}) +target_include_directories(_arrow SYSTEM PRIVATE ${ORC_BUILD_SRC_DIR}) +target_include_directories(_arrow SYSTEM PRIVATE ${ORC_ADDITION_SOURCE_DIR}) +target_include_directories(_arrow SYSTEM PRIVATE ${ARROW_SRC_DIR}) +target_include_directories(_arrow SYSTEM PRIVATE ${FLATBUFFERS_INCLUDE_DIR}) +target_include_directories(_arrow SYSTEM PRIVATE ${HDFS_INCLUDE_DIR}) # === parquet @@ -411,13 +423,23 @@ set(PARQUET_SRCS "${GEN_LIBRARY_DIR}/parquet_types.cpp" ) #list(TRANSFORM PARQUET_SRCS PREPEND "${LIBRARY_DIR}/") # cmake 3.12 -add_library(${PARQUET_LIBRARY} ${PARQUET_SRCS}) -target_include_directories(${PARQUET_LIBRARY} SYSTEM PUBLIC "${ClickHouse_SOURCE_DIR}/contrib/arrow/cpp/src" "${CMAKE_CURRENT_SOURCE_DIR}/cpp/src" PRIVATE ${OPENSSL_INCLUDE_DIR}) -target_link_libraries(${PARQUET_LIBRARY} PUBLIC ${ARROW_LIBRARY} PRIVATE ${THRIFT_LIBRARY} boost::headers_only boost::regex ${OPENSSL_LIBRARIES}) +add_library(_parquet ${PARQUET_SRCS}) +add_library(ch_contrib::parquet ALIAS _parquet) +target_include_directories(_parquet SYSTEM BEFORE + PUBLIC + "${ClickHouse_SOURCE_DIR}/contrib/arrow/cpp/src" + "${CMAKE_CURRENT_SOURCE_DIR}/cpp/src") +target_link_libraries(_parquet + PUBLIC _arrow + PRIVATE + ch_contrib::thrift + boost::headers_only + boost::regex + OpenSSL::Crypto OpenSSL::SSL) if (SANITIZE STREQUAL "undefined") - target_compile_options(${PARQUET_LIBRARY} PRIVATE -fno-sanitize=undefined) - target_compile_options(${ARROW_LIBRARY} PRIVATE -fno-sanitize=undefined) + target_compile_options(_parquet PRIVATE -fno-sanitize=undefined) + target_compile_options(_arrow PRIVATE -fno-sanitize=undefined) endif () # === tools @@ -426,5 +448,9 @@ set(TOOLS_DIR "${ClickHouse_SOURCE_DIR}/contrib/arrow/cpp/tools/parquet") set(PARQUET_TOOLS parquet_dump_schema parquet_reader parquet_scan) foreach (TOOL ${PARQUET_TOOLS}) add_executable(${TOOL} "${TOOLS_DIR}/${TOOL}.cc") - target_link_libraries(${TOOL} PRIVATE ${PARQUET_LIBRARY}) + target_link_libraries(${TOOL} PRIVATE _parquet) endforeach () + +# The library is large - avoid bloat. +target_compile_options (_arrow PRIVATE -g0) +target_compile_options (_parquet PRIVATE -g0) diff --git a/contrib/avro-cmake/CMakeLists.txt b/contrib/avro-cmake/CMakeLists.txt index b56afd1598c..d91ce40dd54 100644 --- a/contrib/avro-cmake/CMakeLists.txt +++ b/contrib/avro-cmake/CMakeLists.txt @@ -1,3 +1,11 @@ +# Needed when using Apache Avro serialization format +option (ENABLE_AVRO "Enable Avro" ${ENABLE_LIBRARIES}) + +if (NOT ENABLE_AVRO) + message(STATUS "Not using avro") + return() +endif() + set(AVROCPP_ROOT_DIR "${CMAKE_SOURCE_DIR}/contrib/avro/lang/c++") set(AVROCPP_INCLUDE_DIR "${AVROCPP_ROOT_DIR}/api") set(AVROCPP_SOURCE_DIR "${AVROCPP_ROOT_DIR}/impl") @@ -40,18 +48,17 @@ set (AVROCPP_SOURCE_FILES "${AVROCPP_SOURCE_DIR}/Validator.cc" ) -add_library (avrocpp ${AVROCPP_SOURCE_FILES}) -set_target_properties (avrocpp PROPERTIES VERSION ${AVRO_VERSION_MAJOR}.${AVRO_VERSION_MINOR}) +add_library (_avrocpp ${AVROCPP_SOURCE_FILES}) +add_library (ch_contrib::avrocpp ALIAS _avrocpp) +set_target_properties (_avrocpp PROPERTIES VERSION ${AVRO_VERSION_MAJOR}.${AVRO_VERSION_MINOR}) -target_include_directories(avrocpp SYSTEM PUBLIC ${AVROCPP_INCLUDE_DIR}) +target_include_directories(_avrocpp SYSTEM PUBLIC ${AVROCPP_INCLUDE_DIR}) -target_link_libraries (avrocpp PRIVATE boost::headers_only boost::iostreams) +target_link_libraries (_avrocpp PRIVATE boost::headers_only boost::iostreams) -if (SNAPPY_INCLUDE_DIR AND SNAPPY_LIBRARY) - target_compile_definitions (avrocpp PUBLIC SNAPPY_CODEC_AVAILABLE) - target_include_directories (avrocpp PRIVATE ${SNAPPY_INCLUDE_DIR}) - target_link_libraries (avrocpp PRIVATE ${SNAPPY_LIBRARY}) -endif () +target_compile_definitions (_avrocpp PUBLIC SNAPPY_CODEC_AVAILABLE) +target_include_directories (_avrocpp PRIVATE ${SNAPPY_INCLUDE_DIR}) +target_link_libraries (_avrocpp PRIVATE ch_contrib::snappy) if (COMPILER_GCC) set (SUPPRESS_WARNINGS -Wno-non-virtual-dtor) @@ -59,11 +66,12 @@ elseif (COMPILER_CLANG) set (SUPPRESS_WARNINGS -Wno-non-virtual-dtor) endif () -target_compile_options(avrocpp PRIVATE ${SUPPRESS_WARNINGS}) +target_compile_options(_avrocpp PRIVATE ${SUPPRESS_WARNINGS}) # create a symlink to include headers with ADD_CUSTOM_TARGET(avro_symlink_headers ALL COMMAND ${CMAKE_COMMAND} -E make_directory "${AVROCPP_ROOT_DIR}/include" COMMAND ${CMAKE_COMMAND} -E create_symlink "${AVROCPP_ROOT_DIR}/api" "${AVROCPP_ROOT_DIR}/include/avro" ) -add_dependencies(avrocpp avro_symlink_headers) +add_dependencies(_avrocpp avro_symlink_headers) +target_include_directories(_avrocpp SYSTEM BEFORE PUBLIC "${AVROCPP_ROOT_DIR}/include") diff --git a/contrib/aws-s3-cmake/CMakeLists.txt b/contrib/aws-s3-cmake/CMakeLists.txt index 50f9482ef54..de6486e58fd 100644 --- a/contrib/aws-s3-cmake/CMakeLists.txt +++ b/contrib/aws-s3-cmake/CMakeLists.txt @@ -1,3 +1,14 @@ +if(NOT OS_FREEBSD) + option(ENABLE_S3 "Enable S3" ${ENABLE_LIBRARIES}) +elseif(ENABLE_S3) + message (${RECONFIGURE_MESSAGE_LEVEL} "Can't use S3 on FreeBSD") +endif() + +if(NOT ENABLE_S3) + message(STATUS "Not using S3") + return() +endif() + SET(AWS_S3_LIBRARY_DIR "${ClickHouse_SOURCE_DIR}/contrib/aws/aws-cpp-sdk-s3") SET(AWS_CORE_LIBRARY_DIR "${ClickHouse_SOURCE_DIR}/contrib/aws/aws-cpp-sdk-core") SET(AWS_CHECKSUMS_LIBRARY_DIR "${ClickHouse_SOURCE_DIR}/contrib/aws-checksums") @@ -80,24 +91,30 @@ set(S3_INCLUDES "${CMAKE_CURRENT_BINARY_DIR}/include/" ) -add_library(aws_s3_checksums ${AWS_CHECKSUMS_SOURCES}) -target_include_directories(aws_s3_checksums SYSTEM PUBLIC "${AWS_CHECKSUMS_LIBRARY_DIR}/include/") +add_library(_aws_s3_checksums ${AWS_CHECKSUMS_SOURCES}) +target_include_directories(_aws_s3_checksums SYSTEM PUBLIC "${AWS_CHECKSUMS_LIBRARY_DIR}/include/") if(CMAKE_BUILD_TYPE_UC STREQUAL "DEBUG") - target_compile_definitions(aws_s3_checksums PRIVATE "-DDEBUG_BUILD") + target_compile_definitions(_aws_s3_checksums PRIVATE "-DDEBUG_BUILD") endif() -set_target_properties(aws_s3_checksums PROPERTIES LINKER_LANGUAGE C) -set_property(TARGET aws_s3_checksums PROPERTY C_STANDARD 99) +set_target_properties(_aws_s3_checksums PROPERTIES LINKER_LANGUAGE C) +set_property(TARGET _aws_s3_checksums PROPERTY C_STANDARD 99) -add_library(aws_s3 ${S3_UNIFIED_SRC}) +add_library(_aws_s3 ${S3_UNIFIED_SRC}) -target_compile_definitions(aws_s3 PUBLIC "AWS_SDK_VERSION_MAJOR=1") -target_compile_definitions(aws_s3 PUBLIC "AWS_SDK_VERSION_MINOR=7") -target_compile_definitions(aws_s3 PUBLIC "AWS_SDK_VERSION_PATCH=231") -target_include_directories(aws_s3 SYSTEM PUBLIC ${S3_INCLUDES}) +target_compile_definitions(_aws_s3 PUBLIC "AWS_SDK_VERSION_MAJOR=1") +target_compile_definitions(_aws_s3 PUBLIC "AWS_SDK_VERSION_MINOR=7") +target_compile_definitions(_aws_s3 PUBLIC "AWS_SDK_VERSION_PATCH=231") +target_include_directories(_aws_s3 SYSTEM BEFORE PUBLIC ${S3_INCLUDES}) -if (OPENSSL_FOUND) - target_compile_definitions(aws_s3 PUBLIC -DENABLE_OPENSSL_ENCRYPTION) - target_link_libraries(aws_s3 PRIVATE ${OPENSSL_LIBRARIES}) +if (TARGET OpenSSL::SSL) + target_compile_definitions(_aws_s3 PUBLIC -DENABLE_OPENSSL_ENCRYPTION) + target_link_libraries(_aws_s3 PRIVATE OpenSSL::Crypto OpenSSL::SSL) endif() -target_link_libraries(aws_s3 PRIVATE aws_s3_checksums) +target_link_libraries(_aws_s3 PRIVATE _aws_s3_checksums) + +# The library is large - avoid bloat. +target_compile_options (_aws_s3 PRIVATE -g0) +target_compile_options (_aws_s3_checksums PRIVATE -g0) + +add_library(ch_contrib::aws_s3 ALIAS _aws_s3) diff --git a/contrib/azure-cmake/CMakeLists.txt b/contrib/azure-cmake/CMakeLists.txt index 527503b85a2..031d8dc9a0b 100644 --- a/contrib/azure-cmake/CMakeLists.txt +++ b/contrib/azure-cmake/CMakeLists.txt @@ -1,3 +1,10 @@ +option (ENABLE_AZURE_BLOB_STORAGE "Enable Azure blob storage" ${ENABLE_LIBRARIES}) + +if (NOT ENABLE_AZURE_BLOB_STORAGE) + message(STATUS "Not using Azure blob storage") + return() +endif() + set(AZURE_DIR "${ClickHouse_SOURCE_DIR}/contrib/azure") set(AZURE_SDK_LIBRARY_DIR "${AZURE_DIR}/sdk") @@ -43,10 +50,10 @@ set(AZURE_SDK_INCLUDES include("${AZURE_DIR}/cmake-modules/AzureTransportAdapters.cmake") -add_library(azure_sdk ${AZURE_SDK_UNIFIED_SRC}) +add_library(_azure_sdk ${AZURE_SDK_UNIFIED_SRC}) if (COMPILER_CLANG) - target_compile_options(azure_sdk PRIVATE + target_compile_options(_azure_sdk PRIVATE -Wno-deprecated-copy-dtor -Wno-extra-semi -Wno-suggest-destructor-override @@ -55,20 +62,22 @@ if (COMPILER_CLANG) ) if (CMAKE_CXX_COMPILER_VERSION VERSION_GREATER_EQUAL 13) - target_compile_options(azure_sdk PRIVATE -Wno-reserved-identifier) + target_compile_options(_azure_sdk PRIVATE -Wno-reserved-identifier) endif() endif() # Originally, on Windows azure-core is built with bcrypt and crypt32 by default -if (OPENSSL_FOUND) - target_link_libraries(azure_sdk PRIVATE ${OPENSSL_LIBRARIES}) +if (TARGET OpenSSL::SSL) + target_link_libraries(_azure_sdk PRIVATE OpenSSL::Crypto OpenSSL::SSL) endif() # Originally, on Windows azure-core is built with winhttp by default -if (CURL_FOUND) - target_link_libraries(azure_sdk PRIVATE ${CURL_LIBRARY}) +if (TARGET ch_contrib::curl) + target_link_libraries(_azure_sdk PRIVATE ch_contrib::curl) endif() -target_link_libraries(azure_sdk PRIVATE ${LIBXML2_LIBRARIES}) +target_link_libraries(_azure_sdk PRIVATE ch_contrib::libxml2) -target_include_directories(azure_sdk SYSTEM PUBLIC ${AZURE_SDK_INCLUDES}) +target_include_directories(_azure_sdk SYSTEM BEFORE PUBLIC ${AZURE_SDK_INCLUDES}) + +add_library(ch_contrib::azure_sdk ALIAS _azure_sdk) diff --git a/contrib/base64-cmake/CMakeLists.txt b/contrib/base64-cmake/CMakeLists.txt index 4ebb4e68728..69040a9bedc 100644 --- a/contrib/base64-cmake/CMakeLists.txt +++ b/contrib/base64-cmake/CMakeLists.txt @@ -1,36 +1,47 @@ +if(ARCH_AMD64 OR ARCH_ARM) + option (ENABLE_BASE64 "Enable base64" ${ENABLE_LIBRARIES}) +elseif(ENABLE_BASE64) + message (${RECONFIGURE_MESSAGE_LEVEL} "base64 library is only supported on x86_64 and aarch64") +endif() + +if (NOT ENABLE_BASE64) + message(STATUS "Not using base64") + return() +endif() + SET(LIBRARY_DIR "${ClickHouse_SOURCE_DIR}/contrib/base64") -add_library(base64_scalar OBJECT "${LIBRARY_DIR}/turbob64c.c" "${LIBRARY_DIR}/turbob64d.c") -add_library(base64_ssse3 OBJECT "${LIBRARY_DIR}/turbob64sse.c") # This file also contains code for ARM NEON +add_library(_base64_scalar OBJECT "${LIBRARY_DIR}/turbob64c.c" "${LIBRARY_DIR}/turbob64d.c") +add_library(_base64_ssse3 OBJECT "${LIBRARY_DIR}/turbob64sse.c") # This file also contains code for ARM NEON if (ARCH_AMD64) - add_library(base64_avx OBJECT "${LIBRARY_DIR}/turbob64sse.c") # This is not a mistake. One file is compiled twice. - add_library(base64_avx2 OBJECT "${LIBRARY_DIR}/turbob64avx2.c") + add_library(_base64_avx OBJECT "${LIBRARY_DIR}/turbob64sse.c") # This is not a mistake. One file is compiled twice. + add_library(_base64_avx2 OBJECT "${LIBRARY_DIR}/turbob64avx2.c") endif () -target_compile_options(base64_scalar PRIVATE -falign-loops) +target_compile_options(_base64_scalar PRIVATE -falign-loops) if (ARCH_AMD64) - target_compile_options(base64_ssse3 PRIVATE -mno-avx -mno-avx2 -mssse3 -falign-loops) - target_compile_options(base64_avx PRIVATE -falign-loops -mavx) - target_compile_options(base64_avx2 PRIVATE -falign-loops -mavx2) + target_compile_options(_base64_ssse3 PRIVATE -mno-avx -mno-avx2 -mssse3 -falign-loops) + target_compile_options(_base64_avx PRIVATE -falign-loops -mavx) + target_compile_options(_base64_avx2 PRIVATE -falign-loops -mavx2) else () - target_compile_options(base64_ssse3 PRIVATE -falign-loops) + target_compile_options(_base64_ssse3 PRIVATE -falign-loops) endif () if (ARCH_AMD64) - add_library(base64 - $ - $ - $ - $) + add_library(_base64 + $ + $ + $ + $) else () - add_library(base64 - $ - $) + add_library(_base64 + $ + $) endif () -target_include_directories(base64 SYSTEM PUBLIC ${LIBRARY_DIR}) +target_include_directories(_base64 SYSTEM PUBLIC ${LIBRARY_DIR}) if (XCODE OR XCODE_VERSION) # https://gitlab.kitware.com/cmake/cmake/issues/17457 @@ -39,5 +50,7 @@ if (XCODE OR XCODE_VERSION) if (NOT EXISTS "${CMAKE_CURRENT_BINARY_DIR}/dummy.c") file(WRITE "${CMAKE_CURRENT_BINARY_DIR}/dummy.c" "") endif () - target_sources(base64 PRIVATE "${CMAKE_CURRENT_BINARY_DIR}/dummy.c") + target_sources(_base64 PRIVATE "${CMAKE_CURRENT_BINARY_DIR}/dummy.c") endif () + +add_library(ch_contrib::base64 ALIAS _base64) diff --git a/contrib/boost-cmake/CMakeLists.txt b/contrib/boost-cmake/CMakeLists.txt index 4a21b8a0e2d..0215c68e683 100644 --- a/contrib/boost-cmake/CMakeLists.txt +++ b/contrib/boost-cmake/CMakeLists.txt @@ -1,243 +1,181 @@ -option (USE_INTERNAL_BOOST_LIBRARY "Use internal Boost library" ON) +set (LIBRARY_DIR "${ClickHouse_SOURCE_DIR}/contrib/boost") -if (NOT USE_INTERNAL_BOOST_LIBRARY) - set(BOOST_VERSION 1.78) +# filesystem - find_package(Boost ${BOOST_VERSION} COMPONENTS - system - filesystem - iostreams - program_options - regex - context - coroutine - graph - ) +set (SRCS_FILESYSTEM + "${LIBRARY_DIR}/libs/filesystem/src/codecvt_error_category.cpp" + "${LIBRARY_DIR}/libs/filesystem/src/directory.cpp" + "${LIBRARY_DIR}/libs/filesystem/src/exception.cpp" + "${LIBRARY_DIR}/libs/filesystem/src/operations.cpp" + "${LIBRARY_DIR}/libs/filesystem/src/path.cpp" + "${LIBRARY_DIR}/libs/filesystem/src/path_traits.cpp" + "${LIBRARY_DIR}/libs/filesystem/src/portability.cpp" + "${LIBRARY_DIR}/libs/filesystem/src/unique_path.cpp" + "${LIBRARY_DIR}/libs/filesystem/src/utf8_codecvt_facet.cpp" + "${LIBRARY_DIR}/libs/filesystem/src/windows_file_codecvt.cpp" +) - if(Boost_INCLUDE_DIR AND Boost_FILESYSTEM_LIBRARY AND - Boost_PROGRAM_OPTIONS_LIBRARY AND Boost_REGEX_LIBRARY AND Boost_SYSTEM_LIBRARY AND Boost_CONTEXT_LIBRARY AND - Boost_COROUTINE_LIBRARY AND Boost_GRAPH_LIBRARY) +add_library (_boost_filesystem ${SRCS_FILESYSTEM}) +add_library (boost::filesystem ALIAS _boost_filesystem) +target_include_directories (_boost_filesystem SYSTEM BEFORE PUBLIC ${LIBRARY_DIR}) - set(EXTERNAL_BOOST_FOUND 1) +# headers-only - add_library (_boost_headers_only INTERFACE) - add_library (boost::headers_only ALIAS _boost_headers_only) - target_include_directories (_boost_headers_only SYSTEM BEFORE INTERFACE ${Boost_INCLUDE_DIR}) +add_library (_boost_headers_only INTERFACE) +add_library (boost::headers_only ALIAS _boost_headers_only) +target_include_directories (_boost_headers_only SYSTEM BEFORE INTERFACE ${LIBRARY_DIR}) - add_library (_boost_filesystem INTERFACE) - add_library (_boost_iostreams INTERFACE) - add_library (_boost_program_options INTERFACE) - add_library (_boost_regex INTERFACE) - add_library (_boost_system INTERFACE) - add_library (_boost_context INTERFACE) - add_library (_boost_coroutine INTERFACE) - add_library (_boost_graph INTERFACE) +# asio - target_link_libraries (_boost_filesystem INTERFACE ${Boost_FILESYSTEM_LIBRARY}) - target_link_libraries (_boost_iostreams INTERFACE ${Boost_IOSTREAMS_LIBRARY}) - target_link_libraries (_boost_program_options INTERFACE ${Boost_PROGRAM_OPTIONS_LIBRARY}) - target_link_libraries (_boost_regex INTERFACE ${Boost_REGEX_LIBRARY}) - target_link_libraries (_boost_system INTERFACE ${Boost_SYSTEM_LIBRARY}) - target_link_libraries (_boost_context INTERFACE ${Boost_CONTEXT_LIBRARY}) - target_link_libraries (_boost_coroutine INTERFACE ${Boost_COROUTINE_LIBRARY}) - target_link_libraries (_boost_graph INTERFACE ${Boost_GRAPH_LIBRARY}) +target_compile_definitions (_boost_headers_only INTERFACE BOOST_ASIO_STANDALONE=1) - add_library (boost::filesystem ALIAS _boost_filesystem) - add_library (boost::iostreams ALIAS _boost_iostreams) - add_library (boost::program_options ALIAS _boost_program_options) - add_library (boost::regex ALIAS _boost_regex) - add_library (boost::system ALIAS _boost_system) - add_library (boost::context ALIAS _boost_context) - add_library (boost::coroutine ALIAS _boost_coroutine) - add_library (boost::graph ALIAS _boost_graph) - else() - set(EXTERNAL_BOOST_FOUND 0) - message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find system boost") +# iostreams + +set (SRCS_IOSTREAMS + "${LIBRARY_DIR}/libs/iostreams/src/file_descriptor.cpp" + "${LIBRARY_DIR}/libs/iostreams/src/gzip.cpp" + "${LIBRARY_DIR}/libs/iostreams/src/mapped_file.cpp" + "${LIBRARY_DIR}/libs/iostreams/src/zlib.cpp" +) + +add_library (_boost_iostreams ${SRCS_IOSTREAMS}) +add_library (boost::iostreams ALIAS _boost_iostreams) +target_include_directories (_boost_iostreams PRIVATE ${LIBRARY_DIR}) +target_link_libraries (_boost_iostreams PRIVATE ch_contrib::zlib) + +# program_options + +set (SRCS_PROGRAM_OPTIONS + "${LIBRARY_DIR}/libs/program_options/src/cmdline.cpp" + "${LIBRARY_DIR}/libs/program_options/src/config_file.cpp" + "${LIBRARY_DIR}/libs/program_options/src/convert.cpp" + "${LIBRARY_DIR}/libs/program_options/src/options_description.cpp" + "${LIBRARY_DIR}/libs/program_options/src/parsers.cpp" + "${LIBRARY_DIR}/libs/program_options/src/positional_options.cpp" + "${LIBRARY_DIR}/libs/program_options/src/split.cpp" + "${LIBRARY_DIR}/libs/program_options/src/utf8_codecvt_facet.cpp" + "${LIBRARY_DIR}/libs/program_options/src/value_semantic.cpp" + "${LIBRARY_DIR}/libs/program_options/src/variables_map.cpp" + "${LIBRARY_DIR}/libs/program_options/src/winmain.cpp" +) + +add_library (_boost_program_options ${SRCS_PROGRAM_OPTIONS}) +add_library (boost::program_options ALIAS _boost_program_options) +target_include_directories (_boost_program_options SYSTEM BEFORE PUBLIC ${LIBRARY_DIR}) + +# regex + +set (SRCS_REGEX + "${LIBRARY_DIR}/libs/regex/src/posix_api.cpp" + "${LIBRARY_DIR}/libs/regex/src/regex_debug.cpp" + "${LIBRARY_DIR}/libs/regex/src/regex.cpp" + "${LIBRARY_DIR}/libs/regex/src/static_mutex.cpp" + "${LIBRARY_DIR}/libs/regex/src/wide_posix_api.cpp" +) + +add_library (_boost_regex ${SRCS_REGEX}) +add_library (boost::regex ALIAS _boost_regex) +target_include_directories (_boost_regex PRIVATE ${LIBRARY_DIR}) + +# system + +set (SRCS_SYSTEM + "${LIBRARY_DIR}/libs/system/src/error_code.cpp" +) + +add_library (_boost_system ${SRCS_SYSTEM}) +add_library (boost::system ALIAS _boost_system) +target_include_directories (_boost_system PRIVATE ${LIBRARY_DIR}) + +# context +enable_language(ASM) +SET(ASM_OPTIONS "-x assembler-with-cpp") + +set (SRCS_CONTEXT + "${LIBRARY_DIR}/libs/context/src/dummy.cpp" + "${LIBRARY_DIR}/libs/context/src/posix/stack_traits.cpp" +) + +if (SANITIZE AND (SANITIZE STREQUAL "address" OR SANITIZE STREQUAL "thread")) + add_compile_definitions(BOOST_USE_UCONTEXT) + + if (SANITIZE STREQUAL "address") + add_compile_definitions(BOOST_USE_ASAN) + elseif (SANITIZE STREQUAL "thread") + add_compile_definitions(BOOST_USE_TSAN) endif() + + set (SRCS_CONTEXT ${SRCS_CONTEXT} + "${LIBRARY_DIR}/libs/context/src/fiber.cpp" + "${LIBRARY_DIR}/libs/context/src/continuation.cpp" + ) +endif() +if (ARCH_ARM) + set (SRCS_CONTEXT ${SRCS_CONTEXT} + "${LIBRARY_DIR}/libs/context/src/asm/jump_arm64_aapcs_elf_gas.S" + "${LIBRARY_DIR}/libs/context/src/asm/make_arm64_aapcs_elf_gas.S" + "${LIBRARY_DIR}/libs/context/src/asm/ontop_arm64_aapcs_elf_gas.S" + ) +elseif (ARCH_PPC64LE) + set (SRCS_CONTEXT ${SRCS_CONTEXT} + "${LIBRARY_DIR}/libs/context/src/asm/jump_ppc64_sysv_elf_gas.S" + "${LIBRARY_DIR}/libs/context/src/asm/make_ppc64_sysv_elf_gas.S" + "${LIBRARY_DIR}/libs/context/src/asm/ontop_ppc64_sysv_elf_gas.S" + ) +elseif (ARCH_RISCV64) + set (SRCS_CONTEXT ${SRCS_CONTEXT} + "${LIBRARY_DIR}/libs/context/src/asm/jump_riscv64_sysv_elf_gas.S" + "${LIBRARY_DIR}/libs/context/src/asm/make_riscv64_sysv_elf_gas.S" + "${LIBRARY_DIR}/libs/context/src/asm/ontop_riscv64_sysv_elf_gas.S" + ) +elseif(OS_DARWIN) + set (SRCS_CONTEXT ${SRCS_CONTEXT} + "${LIBRARY_DIR}/libs/context/src/asm/jump_x86_64_sysv_macho_gas.S" + "${LIBRARY_DIR}/libs/context/src/asm/make_x86_64_sysv_macho_gas.S" + "${LIBRARY_DIR}/libs/context/src/asm/ontop_x86_64_sysv_macho_gas.S" + ) +else() + set (SRCS_CONTEXT ${SRCS_CONTEXT} + "${LIBRARY_DIR}/libs/context/src/asm/jump_x86_64_sysv_elf_gas.S" + "${LIBRARY_DIR}/libs/context/src/asm/make_x86_64_sysv_elf_gas.S" + "${LIBRARY_DIR}/libs/context/src/asm/ontop_x86_64_sysv_elf_gas.S" + ) endif() -if (NOT EXTERNAL_BOOST_FOUND) - set (USE_INTERNAL_BOOST_LIBRARY 1) - set (LIBRARY_DIR "${ClickHouse_SOURCE_DIR}/contrib/boost") +add_library (_boost_context ${SRCS_CONTEXT}) +add_library (boost::context ALIAS _boost_context) +target_include_directories (_boost_context PRIVATE ${LIBRARY_DIR}) - # filesystem +# coroutine - set (SRCS_FILESYSTEM - "${LIBRARY_DIR}/libs/filesystem/src/codecvt_error_category.cpp" - "${LIBRARY_DIR}/libs/filesystem/src/directory.cpp" - "${LIBRARY_DIR}/libs/filesystem/src/exception.cpp" - "${LIBRARY_DIR}/libs/filesystem/src/operations.cpp" - "${LIBRARY_DIR}/libs/filesystem/src/path.cpp" - "${LIBRARY_DIR}/libs/filesystem/src/path_traits.cpp" - "${LIBRARY_DIR}/libs/filesystem/src/portability.cpp" - "${LIBRARY_DIR}/libs/filesystem/src/unique_path.cpp" - "${LIBRARY_DIR}/libs/filesystem/src/utf8_codecvt_facet.cpp" - "${LIBRARY_DIR}/libs/filesystem/src/windows_file_codecvt.cpp" - ) +set (SRCS_COROUTINE + "${LIBRARY_DIR}/libs/coroutine/detail/coroutine_context.cpp" + "${LIBRARY_DIR}/libs/coroutine/exceptions.cpp" + "${LIBRARY_DIR}/libs/coroutine/posix/stack_traits.cpp" +) +add_library (_boost_coroutine ${SRCS_COROUTINE}) +add_library (boost::coroutine ALIAS _boost_coroutine) +target_include_directories (_boost_coroutine PRIVATE ${LIBRARY_DIR}) +target_link_libraries(_boost_coroutine PRIVATE _boost_context) - add_library (_boost_filesystem ${SRCS_FILESYSTEM}) - add_library (boost::filesystem ALIAS _boost_filesystem) - target_include_directories (_boost_filesystem SYSTEM BEFORE PUBLIC ${LIBRARY_DIR}) +# graph - # headers-only +set (SRCS_GRAPH + "${LIBRARY_DIR}/libs/graph/src/graphml.cpp" + "${LIBRARY_DIR}/libs/graph/src/read_graphviz_new.cpp" +) - add_library (_boost_headers_only INTERFACE) - add_library (boost::headers_only ALIAS _boost_headers_only) - target_include_directories (_boost_headers_only SYSTEM BEFORE INTERFACE ${LIBRARY_DIR}) +add_library (_boost_graph ${SRCS_GRAPH}) +add_library (boost::graph ALIAS _boost_graph) +target_include_directories (_boost_graph PRIVATE ${LIBRARY_DIR}) +target_link_libraries(_boost_graph PRIVATE _boost_regex) - # asio +# circular buffer +add_library(_boost_circular_buffer INTERFACE) +add_library(boost::circular_buffer ALIAS _boost_circular_buffer) +target_include_directories(_boost_circular_buffer SYSTEM BEFORE INTERFACE ${LIBRARY_DIR}) - target_compile_definitions (_boost_headers_only INTERFACE BOOST_ASIO_STANDALONE=1) - - # iostreams - - set (SRCS_IOSTREAMS - "${LIBRARY_DIR}/libs/iostreams/src/file_descriptor.cpp" - "${LIBRARY_DIR}/libs/iostreams/src/gzip.cpp" - "${LIBRARY_DIR}/libs/iostreams/src/mapped_file.cpp" - "${LIBRARY_DIR}/libs/iostreams/src/zlib.cpp" - ) - - add_library (_boost_iostreams ${SRCS_IOSTREAMS}) - add_library (boost::iostreams ALIAS _boost_iostreams) - target_include_directories (_boost_iostreams PRIVATE ${LIBRARY_DIR}) - target_link_libraries (_boost_iostreams PRIVATE ${ZLIB_LIBRARIES}) - - # program_options - - set (SRCS_PROGRAM_OPTIONS - "${LIBRARY_DIR}/libs/program_options/src/cmdline.cpp" - "${LIBRARY_DIR}/libs/program_options/src/config_file.cpp" - "${LIBRARY_DIR}/libs/program_options/src/convert.cpp" - "${LIBRARY_DIR}/libs/program_options/src/options_description.cpp" - "${LIBRARY_DIR}/libs/program_options/src/parsers.cpp" - "${LIBRARY_DIR}/libs/program_options/src/positional_options.cpp" - "${LIBRARY_DIR}/libs/program_options/src/split.cpp" - "${LIBRARY_DIR}/libs/program_options/src/utf8_codecvt_facet.cpp" - "${LIBRARY_DIR}/libs/program_options/src/value_semantic.cpp" - "${LIBRARY_DIR}/libs/program_options/src/variables_map.cpp" - "${LIBRARY_DIR}/libs/program_options/src/winmain.cpp" - ) - - add_library (_boost_program_options ${SRCS_PROGRAM_OPTIONS}) - add_library (boost::program_options ALIAS _boost_program_options) - target_include_directories (_boost_program_options SYSTEM BEFORE PUBLIC ${LIBRARY_DIR}) - - # regex - - set (SRCS_REGEX - "${LIBRARY_DIR}/libs/regex/src/posix_api.cpp" - "${LIBRARY_DIR}/libs/regex/src/regex_debug.cpp" - "${LIBRARY_DIR}/libs/regex/src/regex.cpp" - "${LIBRARY_DIR}/libs/regex/src/static_mutex.cpp" - "${LIBRARY_DIR}/libs/regex/src/wide_posix_api.cpp" - ) - - add_library (_boost_regex ${SRCS_REGEX}) - add_library (boost::regex ALIAS _boost_regex) - target_include_directories (_boost_regex PRIVATE ${LIBRARY_DIR}) - - # system - - set (SRCS_SYSTEM - "${LIBRARY_DIR}/libs/system/src/error_code.cpp" - ) - - add_library (_boost_system ${SRCS_SYSTEM}) - add_library (boost::system ALIAS _boost_system) - target_include_directories (_boost_system PRIVATE ${LIBRARY_DIR}) - - # context - enable_language(ASM) - SET(ASM_OPTIONS "-x assembler-with-cpp") - - set (SRCS_CONTEXT - "${LIBRARY_DIR}/libs/context/src/dummy.cpp" - "${LIBRARY_DIR}/libs/context/src/posix/stack_traits.cpp" - ) - - if (SANITIZE AND (SANITIZE STREQUAL "address" OR SANITIZE STREQUAL "thread")) - add_compile_definitions(BOOST_USE_UCONTEXT) - - if (SANITIZE STREQUAL "address") - add_compile_definitions(BOOST_USE_ASAN) - elseif (SANITIZE STREQUAL "thread") - add_compile_definitions(BOOST_USE_TSAN) - endif() - - set (SRCS_CONTEXT ${SRCS_CONTEXT} - "${LIBRARY_DIR}/libs/context/src/fiber.cpp" - "${LIBRARY_DIR}/libs/context/src/continuation.cpp" - ) - endif() - if (ARCH_ARM) - set (SRCS_CONTEXT ${SRCS_CONTEXT} - "${LIBRARY_DIR}/libs/context/src/asm/jump_arm64_aapcs_elf_gas.S" - "${LIBRARY_DIR}/libs/context/src/asm/make_arm64_aapcs_elf_gas.S" - "${LIBRARY_DIR}/libs/context/src/asm/ontop_arm64_aapcs_elf_gas.S" - ) - elseif (ARCH_PPC64LE) - set (SRCS_CONTEXT ${SRCS_CONTEXT} - "${LIBRARY_DIR}/libs/context/src/asm/jump_ppc64_sysv_elf_gas.S" - "${LIBRARY_DIR}/libs/context/src/asm/make_ppc64_sysv_elf_gas.S" - "${LIBRARY_DIR}/libs/context/src/asm/ontop_ppc64_sysv_elf_gas.S" - ) - elseif (ARCH_RISCV64) - set (SRCS_CONTEXT ${SRCS_CONTEXT} - "${LIBRARY_DIR}/libs/context/src/asm/jump_riscv64_sysv_elf_gas.S" - "${LIBRARY_DIR}/libs/context/src/asm/make_riscv64_sysv_elf_gas.S" - "${LIBRARY_DIR}/libs/context/src/asm/ontop_riscv64_sysv_elf_gas.S" - ) - elseif(OS_DARWIN) - set (SRCS_CONTEXT ${SRCS_CONTEXT} - "${LIBRARY_DIR}/libs/context/src/asm/jump_x86_64_sysv_macho_gas.S" - "${LIBRARY_DIR}/libs/context/src/asm/make_x86_64_sysv_macho_gas.S" - "${LIBRARY_DIR}/libs/context/src/asm/ontop_x86_64_sysv_macho_gas.S" - ) - else() - set (SRCS_CONTEXT ${SRCS_CONTEXT} - "${LIBRARY_DIR}/libs/context/src/asm/jump_x86_64_sysv_elf_gas.S" - "${LIBRARY_DIR}/libs/context/src/asm/make_x86_64_sysv_elf_gas.S" - "${LIBRARY_DIR}/libs/context/src/asm/ontop_x86_64_sysv_elf_gas.S" - ) - endif() - - add_library (_boost_context ${SRCS_CONTEXT}) - add_library (boost::context ALIAS _boost_context) - target_include_directories (_boost_context PRIVATE ${LIBRARY_DIR}) - - # coroutine - - set (SRCS_COROUTINE - "${LIBRARY_DIR}/libs/coroutine/detail/coroutine_context.cpp" - "${LIBRARY_DIR}/libs/coroutine/exceptions.cpp" - "${LIBRARY_DIR}/libs/coroutine/posix/stack_traits.cpp" - ) - add_library (_boost_coroutine ${SRCS_COROUTINE}) - add_library (boost::coroutine ALIAS _boost_coroutine) - target_include_directories (_boost_coroutine PRIVATE ${LIBRARY_DIR}) - target_link_libraries(_boost_coroutine PRIVATE _boost_context) - - # graph - - set (SRCS_GRAPH - "${LIBRARY_DIR}/libs/graph/src/graphml.cpp" - "${LIBRARY_DIR}/libs/graph/src/read_graphviz_new.cpp" - ) - - add_library (_boost_graph ${SRCS_GRAPH}) - add_library (boost::graph ALIAS _boost_graph) - target_include_directories (_boost_graph PRIVATE ${LIBRARY_DIR}) - target_link_libraries(_boost_graph PRIVATE _boost_regex) - - # circular buffer - add_library(_boost_circular_buffer INTERFACE) - add_library(boost::circular_buffer ALIAS _boost_circular_buffer) - target_include_directories(_boost_circular_buffer SYSTEM BEFORE INTERFACE ${LIBRARY_DIR}) - - # heap - add_library(_boost_heap INTERFACE) - add_library(boost::heap ALIAS _boost_heap) - target_include_directories(_boost_heap SYSTEM BEFORE INTERFACE ${LIBRARY_DIR}) - -endif () +# heap +add_library(_boost_heap INTERFACE) +add_library(boost::heap ALIAS _boost_heap) +target_include_directories(_boost_heap SYSTEM BEFORE INTERFACE ${LIBRARY_DIR}) diff --git a/contrib/boringssl-cmake/CMakeLists.txt b/contrib/boringssl-cmake/CMakeLists.txt index d599351fd5c..dd3332d70be 100644 --- a/contrib/boringssl-cmake/CMakeLists.txt +++ b/contrib/boringssl-cmake/CMakeLists.txt @@ -1,3 +1,13 @@ +# Needed for: +# - securely connecting to an external server, e.g. clickhouse-client --host ... --secure +# - lots of thirdparty libraries +option(ENABLE_SSL "Enable ssl" ${ENABLE_LIBRARIES}) + +if(NOT ENABLE_SSL) + message(STATUS "Not using openssl") + return() +endif() + # Copyright (c) 2019 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -358,7 +368,7 @@ elseif(WIN32) endif() add_library( - crypto + _crypto ${CRYPTO_ARCH_SOURCES} err_data.c @@ -595,7 +605,7 @@ add_library( ) add_library( - ssl + _ssl "${BORINGSSL_SOURCE_DIR}/ssl/bio_ssl.cc" "${BORINGSSL_SOURCE_DIR}/ssl/d1_both.cc" @@ -662,18 +672,22 @@ add_executable( "${BORINGSSL_SOURCE_DIR}/tool/transport_common.cc" ) -target_link_libraries(ssl crypto) -target_link_libraries(bssl ssl) +target_link_libraries(_ssl _crypto) +target_link_libraries(bssl _ssl) if(NOT WIN32 AND NOT ANDROID) - target_link_libraries(crypto pthread) + target_link_libraries(_crypto pthread) endif() +# NOTE: that ClickHouse does not support WIN32 anyway. if(WIN32) target_link_libraries(bssl ws2_32) endif() -target_include_directories(crypto SYSTEM PUBLIC "${BORINGSSL_SOURCE_DIR}/include") -target_include_directories(ssl SYSTEM PUBLIC "${BORINGSSL_SOURCE_DIR}/include") +target_include_directories(_crypto SYSTEM PUBLIC "${BORINGSSL_SOURCE_DIR}/include") +target_include_directories(_ssl SYSTEM PUBLIC "${BORINGSSL_SOURCE_DIR}/include") -target_compile_options(crypto PRIVATE -Wno-gnu-anonymous-struct) +target_compile_options(_crypto PRIVATE -Wno-gnu-anonymous-struct) + +add_library(OpenSSL::Crypto ALIAS _crypto) +add_library(OpenSSL::SSL ALIAS _ssl) diff --git a/contrib/brotli-cmake/CMakeLists.txt b/contrib/brotli-cmake/CMakeLists.txt index 7293cae0665..c81a6bf9076 100644 --- a/contrib/brotli-cmake/CMakeLists.txt +++ b/contrib/brotli-cmake/CMakeLists.txt @@ -1,3 +1,10 @@ +option (ENABLE_BROTLI "Enable brotli" ${ENABLE_LIBRARIES}) + +if (NOT ENABLE_BROTLI) + message(STATUS "Not using brotli") + return() +endif() + set(BROTLI_SOURCE_DIR "${ClickHouse_SOURCE_DIR}/contrib/brotli/c") set(BROTLI_BINARY_DIR "${ClickHouse_BINARY_DIR}/contrib/brotli/c") @@ -33,10 +40,12 @@ set(SRCS "${BROTLI_SOURCE_DIR}/common/constants.c" ) -add_library(brotli ${SRCS}) +add_library(_brotli ${SRCS}) +add_library(ch_contrib::brotli ALIAS _brotli) -target_include_directories(brotli PUBLIC "${BROTLI_SOURCE_DIR}/include") +target_include_directories(_brotli SYSTEM BEFORE PUBLIC "${BROTLI_SOURCE_DIR}/include") if(M_LIBRARY) - target_link_libraries(brotli PRIVATE ${M_LIBRARY}) + target_link_libraries(_brotli PRIVATE ${M_LIBRARY}) endif() +target_compile_definitions(_brotli PRIVATE BROTLI_BUILD_PORTABLE=1) diff --git a/contrib/bzip2-cmake/CMakeLists.txt b/contrib/bzip2-cmake/CMakeLists.txt index a9d2efa43c1..2e01a624000 100644 --- a/contrib/bzip2-cmake/CMakeLists.txt +++ b/contrib/bzip2-cmake/CMakeLists.txt @@ -1,3 +1,9 @@ +option(ENABLE_BZIP2 "Enable bzip2 compression support" ${ENABLE_LIBRARIES}) +if (NOT ENABLE_BZIP2) + message (STATUS "bzip2 compression disabled") + return() +endif() + set(BZIP2_SOURCE_DIR "${ClickHouse_SOURCE_DIR}/contrib/bzip2") set(BZIP2_BINARY_DIR "${ClickHouse_BINARY_DIR}/contrib/bzip2") @@ -18,6 +24,10 @@ configure_file ( "${BZIP2_BINARY_DIR}/bz_version.h" ) -add_library(bzip2 ${SRCS}) - -target_include_directories(bzip2 PUBLIC "${BZIP2_SOURCE_DIR}" "${BZIP2_BINARY_DIR}") +add_library(_bzip2 ${SRCS}) +add_library(ch_contrib::bzip2 ALIAS _bzip2) +# To avoid -Wreserved-id-macro we use SYSTEM: +# +# clickhouse/contrib/bzip2/bzlib.h:23:9: error: macro name is a reserved identifier [-Werror,-Wreserved-id-macro] +# #define _BZLIB_H +target_include_directories(_bzip2 SYSTEM BEFORE PUBLIC "${BZIP2_SOURCE_DIR}" "${BZIP2_BINARY_DIR}") diff --git a/contrib/capnproto-cmake/CMakeLists.txt b/contrib/capnproto-cmake/CMakeLists.txt index 05446355535..297b847cd58 100644 --- a/contrib/capnproto-cmake/CMakeLists.txt +++ b/contrib/capnproto-cmake/CMakeLists.txt @@ -1,3 +1,10 @@ +option (ENABLE_CAPNP "Enable Cap'n Proto" ${ENABLE_LIBRARIES}) + +if (NOT ENABLE_CAPNP) + message(STATUS "Not using Cap'n Proto library") + return() +endif() + set (CAPNPROTO_SOURCE_DIR "${ClickHouse_SOURCE_DIR}/contrib/capnproto/c++/src") set (CMAKE_CXX_STANDARD 17) @@ -29,8 +36,8 @@ set (KJ_SRCS "${CAPNPROTO_SOURCE_DIR}/kj/parse/char.c++" ) -add_library(kj ${KJ_SRCS}) -target_include_directories(kj SYSTEM PUBLIC ${CAPNPROTO_SOURCE_DIR}) +add_library(_kj ${KJ_SRCS}) +target_include_directories(_kj SYSTEM PUBLIC ${CAPNPROTO_SOURCE_DIR}) set (CAPNP_SRCS "${CAPNPROTO_SOURCE_DIR}/capnp/c++.capnp.c++" @@ -51,11 +58,11 @@ set (CAPNP_SRCS "${CAPNPROTO_SOURCE_DIR}/capnp/stringify.c++" ) -add_library(capnp ${CAPNP_SRCS}) -set_target_properties(capnp +add_library(_capnp ${CAPNP_SRCS}) +set_target_properties(_capnp PROPERTIES LINKER_LANGUAGE CXX ) -target_link_libraries(capnp PUBLIC kj) +target_link_libraries(_capnp PUBLIC _kj) set (CAPNPC_SRCS "${CAPNPROTO_SOURCE_DIR}/capnp/compiler/type-id.c++" @@ -71,8 +78,8 @@ set (CAPNPC_SRCS "${CAPNPROTO_SOURCE_DIR}/capnp/serialize-text.c++" ) -add_library(capnpc ${CAPNPC_SRCS}) -target_link_libraries(capnpc PUBLIC capnp) +add_library(_capnpc ${CAPNPC_SRCS}) +target_link_libraries(_capnpc PUBLIC _capnp) # The library has substandard code if (COMPILER_GCC) @@ -82,6 +89,8 @@ elseif (COMPILER_CLANG) set (CAPNP_PRIVATE_CXX_FLAGS -fno-char8_t) endif () -target_compile_options(kj PRIVATE ${SUPPRESS_WARNINGS} ${CAPNP_PRIVATE_CXX_FLAGS}) -target_compile_options(capnp PRIVATE ${SUPPRESS_WARNINGS} ${CAPNP_PRIVATE_CXX_FLAGS}) -target_compile_options(capnpc PRIVATE ${SUPPRESS_WARNINGS} ${CAPNP_PRIVATE_CXX_FLAGS}) +target_compile_options(_kj PRIVATE ${SUPPRESS_WARNINGS} ${CAPNP_PRIVATE_CXX_FLAGS}) +target_compile_options(_capnp PRIVATE ${SUPPRESS_WARNINGS} ${CAPNP_PRIVATE_CXX_FLAGS}) +target_compile_options(_capnpc PRIVATE ${SUPPRESS_WARNINGS} ${CAPNP_PRIVATE_CXX_FLAGS}) + +add_library(ch_contrib::capnp ALIAS _capnpc) diff --git a/contrib/cassandra-cmake/CMakeLists.txt b/contrib/cassandra-cmake/CMakeLists.txt index a8f2bec5e2b..416dca6f2bc 100644 --- a/contrib/cassandra-cmake/CMakeLists.txt +++ b/contrib/cassandra-cmake/CMakeLists.txt @@ -1,3 +1,14 @@ +option(ENABLE_CASSANDRA "Enable Cassandra" ${ENABLE_LIBRARIES}) + +if (NOT ENABLE_CASSANDRA) + message(STATUS "Not using cassandra") + return() +endif() + +if (APPLE) + set(CMAKE_MACOSX_RPATH ON) +endif() + # Need to use C++17 since the compilation is not possible with C++20 currently. set (CMAKE_CXX_STANDARD 17) @@ -42,16 +53,16 @@ endif() list(APPEND SOURCES ${CASS_SRC_DIR}/atomic/atomic_std.hpp) -add_library(curl_hostcheck OBJECT ${CASS_SRC_DIR}/third_party/curl/hostcheck.cpp) -add_library(hdr_histogram OBJECT ${CASS_SRC_DIR}/third_party/hdr_histogram/hdr_histogram.cpp) -add_library(http-parser OBJECT ${CASS_SRC_DIR}/third_party/http-parser/http_parser.c) -add_library(minizip OBJECT +add_library(_curl_hostcheck OBJECT ${CASS_SRC_DIR}/third_party/curl/hostcheck.cpp) +add_library(_hdr_histogram OBJECT ${CASS_SRC_DIR}/third_party/hdr_histogram/hdr_histogram.cpp) +add_library(_http-parser OBJECT ${CASS_SRC_DIR}/third_party/http-parser/http_parser.c) +add_library(_minizip OBJECT ${CASS_SRC_DIR}/third_party/minizip/ioapi.c ${CASS_SRC_DIR}/third_party/minizip/zip.c ${CASS_SRC_DIR}/third_party/minizip/unzip.c) -target_link_libraries(minizip zlib) -target_compile_definitions(minizip PRIVATE "-Dz_crc_t=unsigned long") +target_link_libraries(_minizip ch_contrib::zlib) +target_compile_definitions(_minizip PRIVATE "-Dz_crc_t=unsigned long") list(APPEND INCLUDE_DIRS ${CASS_SRC_DIR}/third_party/curl @@ -108,20 +119,22 @@ configure_file( ${CMAKE_CURRENT_BINARY_DIR}/driver_config.hpp) -add_library(cassandra +add_library(_cassandra ${SOURCES} - $ - $ - $ - $) + $ + $ + $ + $) -target_link_libraries(cassandra zlib) -add_library(cassandra_static ALIAS cassandra) -target_include_directories(cassandra PRIVATE ${CMAKE_CURRENT_BINARY_DIR} ${INCLUDE_DIRS}) -target_compile_definitions(cassandra PRIVATE CASS_BUILDING) +target_link_libraries(_cassandra ch_contrib::zlib) +target_include_directories(_cassandra PRIVATE ${CMAKE_CURRENT_BINARY_DIR} ${INCLUDE_DIRS}) +target_include_directories(_cassandra SYSTEM BEFORE PUBLIC ${CASS_INCLUDE_DIR}) +target_compile_definitions(_cassandra PRIVATE CASS_BUILDING) -target_link_libraries(cassandra uv) +target_link_libraries(_cassandra ch_contrib::uv) if(CASS_USE_OPENSSL) - target_link_libraries(cassandra ssl) + target_link_libraries(_cassandra OpenSSL::SSL) endif() + +add_library(ch_contrib::cassandra ALIAS _cassandra) diff --git a/contrib/cctz-cmake/CMakeLists.txt b/contrib/cctz-cmake/CMakeLists.txt index 2248ba8b612..f1ef9b53f7d 100644 --- a/contrib/cctz-cmake/CMakeLists.txt +++ b/contrib/cctz-cmake/CMakeLists.txt @@ -1,106 +1,63 @@ -option (USE_INTERNAL_CCTZ_LIBRARY "Use internal cctz library" ON) +include(${ClickHouse_SOURCE_DIR}/cmake/embed_binary.cmake) +set(LIBRARY_DIR "${ClickHouse_SOURCE_DIR}/contrib/cctz") -if (NOT USE_INTERNAL_CCTZ_LIBRARY) - find_library (LIBRARY_CCTZ cctz) - find_path (INCLUDE_CCTZ NAMES cctz/civil_time.h) +set (SRCS + "${LIBRARY_DIR}/src/civil_time_detail.cc" + "${LIBRARY_DIR}/src/time_zone_fixed.cc" + "${LIBRARY_DIR}/src/time_zone_format.cc" + "${LIBRARY_DIR}/src/time_zone_if.cc" + "${LIBRARY_DIR}/src/time_zone_impl.cc" + "${LIBRARY_DIR}/src/time_zone_info.cc" + "${LIBRARY_DIR}/src/time_zone_libc.cc" + "${LIBRARY_DIR}/src/time_zone_lookup.cc" + "${LIBRARY_DIR}/src/time_zone_posix.cc" + "${LIBRARY_DIR}/src/zone_info_source.cc" +) - if (LIBRARY_CCTZ AND INCLUDE_CCTZ) - set (EXTERNAL_CCTZ_LIBRARY_FOUND 1) +add_library (_cctz ${SRCS}) +target_include_directories (_cctz PUBLIC "${LIBRARY_DIR}/include") - set(CMAKE_REQUIRED_LIBRARIES ${LIBRARY_CCTZ}) - set(CMAKE_REQUIRED_INCLUDES ${INCLUDE_CCTZ}) - check_cxx_source_compiles( - " - #include - int main() { - cctz::civil_day date; - } - " - EXTERNAL_CCTZ_LIBRARY_WORKS - ) - - if (NOT EXTERNAL_CCTZ_LIBRARY_WORKS) - message (${RECONFIGURE_MESSAGE_LEVEL} "External cctz is not working: ${LIBRARY_CCTZ} ${INCLUDE_CCTZ}") - else() - add_library (cctz UNKNOWN IMPORTED) - set_property (TARGET cctz PROPERTY IMPORTED_LOCATION ${LIBRARY_CCTZ}) - set_property (TARGET cctz PROPERTY INTERFACE_INCLUDE_DIRECTORIES ${INCLUDE_CCTZ}) - endif() - - set(SYSTEM_STORAGE_TZ_FILE "${CMAKE_BINARY_DIR}/src/Storages/System/StorageSystemTimeZones.generated.cpp") - file(REMOVE ${SYSTEM_STORAGE_TZ_FILE}) - file(APPEND ${SYSTEM_STORAGE_TZ_FILE} "// autogenerated by ClickHouse/contrib/cctz-cmake/CMakeLists.txt\n") - file(APPEND ${SYSTEM_STORAGE_TZ_FILE} "const char * auto_time_zones[] {nullptr};\n" ) - - else() - set (EXTERNAL_CCTZ_LIBRARY_FOUND 0) - message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find system cctz") - endif() -endif() - -if (NOT EXTERNAL_CCTZ_LIBRARY_FOUND OR NOT EXTERNAL_CCTZ_LIBRARY_WORKS) - include(${ClickHouse_SOURCE_DIR}/cmake/embed_binary.cmake) - set(USE_INTERNAL_CCTZ_LIBRARY 1) - set(LIBRARY_DIR "${ClickHouse_SOURCE_DIR}/contrib/cctz") - - set (SRCS - "${LIBRARY_DIR}/src/civil_time_detail.cc" - "${LIBRARY_DIR}/src/time_zone_fixed.cc" - "${LIBRARY_DIR}/src/time_zone_format.cc" - "${LIBRARY_DIR}/src/time_zone_if.cc" - "${LIBRARY_DIR}/src/time_zone_impl.cc" - "${LIBRARY_DIR}/src/time_zone_info.cc" - "${LIBRARY_DIR}/src/time_zone_libc.cc" - "${LIBRARY_DIR}/src/time_zone_lookup.cc" - "${LIBRARY_DIR}/src/time_zone_posix.cc" - "${LIBRARY_DIR}/src/zone_info_source.cc" - ) - - add_library (cctz ${SRCS}) - target_include_directories (cctz SYSTEM PUBLIC "${LIBRARY_DIR}/include") - - if (OS_FREEBSD) - # yes, need linux, because bsd check inside linux in time_zone_libc.cc:24 - target_compile_definitions (cctz PRIVATE __USE_BSD linux _XOPEN_SOURCE=600) - endif () - - # Related to time_zones table: - # StorageSystemTimeZones.generated.cpp is autogenerated each time during a build - # data in this file will be used to populate the system.time_zones table, this is specific to OS_LINUX - # as the library that's built using embedded tzdata is also specific to OS_LINUX - set(SYSTEM_STORAGE_TZ_FILE "${CMAKE_BINARY_DIR}/src/Storages/System/StorageSystemTimeZones.generated.cpp") - # remove existing copies so that its generated fresh on each build. - file(REMOVE ${SYSTEM_STORAGE_TZ_FILE}) - - # get the list of timezones from tzdata shipped with cctz - set(TZDIR "${LIBRARY_DIR}/testdata/zoneinfo") - file(STRINGS "${LIBRARY_DIR}/testdata/version" TZDATA_VERSION) - set_property(GLOBAL PROPERTY TZDATA_VERSION_PROP "${TZDATA_VERSION}") - message(STATUS "Packaging with tzdata version: ${TZDATA_VERSION}") - - set(TIMEZONE_RESOURCE_FILES) - - # each file in that dir (except of tab and localtime) store the info about timezone - execute_process(COMMAND - bash -c "cd ${TZDIR} && find * -type f -and ! -name '*.tab' -and ! -name 'localtime' | LC_ALL=C sort | paste -sd ';' -" - OUTPUT_STRIP_TRAILING_WHITESPACE - OUTPUT_VARIABLE TIMEZONES) - - file(APPEND ${SYSTEM_STORAGE_TZ_FILE} "// autogenerated by ClickHouse/contrib/cctz-cmake/CMakeLists.txt\n") - file(APPEND ${SYSTEM_STORAGE_TZ_FILE} "const char * auto_time_zones[] {\n" ) - - foreach(TIMEZONE ${TIMEZONES}) - file(APPEND ${SYSTEM_STORAGE_TZ_FILE} " \"${TIMEZONE}\",\n") - list(APPEND TIMEZONE_RESOURCE_FILES "${TIMEZONE}") - endforeach(TIMEZONE) - file(APPEND ${SYSTEM_STORAGE_TZ_FILE} " nullptr};\n") - clickhouse_embed_binaries( - TARGET tzdata - RESOURCE_DIR "${TZDIR}" - RESOURCES ${TIMEZONE_RESOURCE_FILES} - ) - add_dependencies(cctz tzdata) - target_link_libraries(cctz INTERFACE "-Wl,${WHOLE_ARCHIVE} $ -Wl,${NO_WHOLE_ARCHIVE}") +if (OS_FREEBSD) + # yes, need linux, because bsd check inside linux in time_zone_libc.cc:24 + target_compile_definitions (_cctz PRIVATE __USE_BSD linux _XOPEN_SOURCE=600) endif () -message (STATUS "Using cctz") +# Related to time_zones table: +# StorageSystemTimeZones.generated.cpp is autogenerated each time during a build +# data in this file will be used to populate the system.time_zones table, this is specific to OS_LINUX +# as the library that's built using embedded tzdata is also specific to OS_LINUX +set(SYSTEM_STORAGE_TZ_FILE "${CMAKE_BINARY_DIR}/src/Storages/System/StorageSystemTimeZones.generated.cpp") +# remove existing copies so that its generated fresh on each build. +file(REMOVE ${SYSTEM_STORAGE_TZ_FILE}) + +# get the list of timezones from tzdata shipped with cctz +set(TZDIR "${LIBRARY_DIR}/testdata/zoneinfo") +file(STRINGS "${LIBRARY_DIR}/testdata/version" TZDATA_VERSION) +set_property(GLOBAL PROPERTY TZDATA_VERSION_PROP "${TZDATA_VERSION}") +message(STATUS "Packaging with tzdata version: ${TZDATA_VERSION}") + +set(TIMEZONE_RESOURCE_FILES) + +# each file in that dir (except of tab and localtime) store the info about timezone +execute_process(COMMAND + bash -c "cd ${TZDIR} && find * -type f -and ! -name '*.tab' -and ! -name 'localtime' | LC_ALL=C sort | paste -sd ';' -" + OUTPUT_STRIP_TRAILING_WHITESPACE + OUTPUT_VARIABLE TIMEZONES) + +file(APPEND ${SYSTEM_STORAGE_TZ_FILE} "// autogenerated by ClickHouse/contrib/cctz-cmake/CMakeLists.txt\n") +file(APPEND ${SYSTEM_STORAGE_TZ_FILE} "const char * auto_time_zones[] {\n" ) + +foreach(TIMEZONE ${TIMEZONES}) + file(APPEND ${SYSTEM_STORAGE_TZ_FILE} " \"${TIMEZONE}\",\n") + list(APPEND TIMEZONE_RESOURCE_FILES "${TIMEZONE}") +endforeach(TIMEZONE) +file(APPEND ${SYSTEM_STORAGE_TZ_FILE} " nullptr};\n") +clickhouse_embed_binaries( + TARGET tzdata + RESOURCE_DIR "${TZDIR}" + RESOURCES ${TIMEZONE_RESOURCE_FILES} +) +add_dependencies(_cctz tzdata) +target_link_libraries(_cctz INTERFACE "-Wl,${WHOLE_ARCHIVE} $ -Wl,${NO_WHOLE_ARCHIVE}") + +add_library(ch_contrib::cctz ALIAS _cctz) diff --git a/contrib/cityhash102/CMakeLists.txt b/contrib/cityhash102/CMakeLists.txt index f40a6d2408b..744fa29f3b0 100644 --- a/contrib/cityhash102/CMakeLists.txt +++ b/contrib/cityhash102/CMakeLists.txt @@ -1,8 +1,10 @@ -add_library(cityhash +add_library(_cityhash src/city.cc include/citycrc.h include/city.h src/config.h) -target_include_directories(cityhash SYSTEM BEFORE PUBLIC include) -target_include_directories(cityhash SYSTEM PRIVATE src) +target_include_directories(_cityhash SYSTEM BEFORE PUBLIC include) +target_include_directories(_cityhash SYSTEM PRIVATE src) + +add_library(ch_contrib::cityhash ALIAS _cityhash) diff --git a/contrib/cld2-cmake/CMakeLists.txt b/contrib/cld2-cmake/CMakeLists.txt index 77526a95a8f..8600856ea36 100644 --- a/contrib/cld2-cmake/CMakeLists.txt +++ b/contrib/cld2-cmake/CMakeLists.txt @@ -26,7 +26,8 @@ set (SRCS "${LIBRARY_DIR}/internal/cld2_generated_distinctoctachrome.cc" "${LIBRARY_DIR}/internal/cld_generated_score_quad_octa_2.cc" ) -add_library(cld2 ${SRCS}) -set_property(TARGET cld2 PROPERTY POSITION_INDEPENDENT_CODE ON) -target_compile_options (cld2 PRIVATE -Wno-reserved-id-macro -Wno-c++11-narrowing) -target_include_directories(cld2 SYSTEM BEFORE PUBLIC "${LIBRARY_DIR}/public") +add_library(_cld2 ${SRCS}) +set_property(TARGET _cld2 PROPERTY POSITION_INDEPENDENT_CODE ON) +target_compile_options (_cld2 PRIVATE -Wno-reserved-id-macro -Wno-c++11-narrowing) +target_include_directories(_cld2 SYSTEM BEFORE PUBLIC "${LIBRARY_DIR}/public") +add_library(ch_contrib::cld2 ALIAS _cld2) diff --git a/contrib/consistent-hashing/CMakeLists.txt b/contrib/consistent-hashing/CMakeLists.txt index 7543022df46..5d979824434 100644 --- a/contrib/consistent-hashing/CMakeLists.txt +++ b/contrib/consistent-hashing/CMakeLists.txt @@ -1,2 +1,3 @@ -add_library(consistent-hashing consistent_hashing.cpp popcount.cpp) -target_include_directories(consistent-hashing SYSTEM PUBLIC ${CMAKE_CURRENT_SOURCE_DIR}) +add_library(_consistent_hashing consistent_hashing.cpp popcount.cpp) +target_include_directories(_consistent_hashing SYSTEM PUBLIC ${CMAKE_CURRENT_SOURCE_DIR}) +add_library(ch_contrib::consistent_hashing ALIAS _consistent_hashing) diff --git a/contrib/cppkafka-cmake/CMakeLists.txt b/contrib/cppkafka-cmake/CMakeLists.txt index 0bc33ada529..87bf2356a80 100644 --- a/contrib/cppkafka-cmake/CMakeLists.txt +++ b/contrib/cppkafka-cmake/CMakeLists.txt @@ -1,3 +1,8 @@ +if (NOT ENABLE_KAFKA) + message(STATUS "Not using librdkafka (skip cppkafka)") + return() +endif() + set(LIBRARY_DIR "${ClickHouse_SOURCE_DIR}/contrib/cppkafka") set(SRCS @@ -22,12 +27,13 @@ set(SRCS "${LIBRARY_DIR}/src/topic.cpp" ) -add_library(cppkafka ${SRCS}) +add_library(_cppkafka ${SRCS}) +add_library(ch_contrib::cppkafka ALIAS _cppkafka) -target_link_libraries(cppkafka +target_link_libraries(_cppkafka PRIVATE - ${RDKAFKA_LIBRARY} + ch_contrib::rdkafka boost::headers_only ) -target_include_directories(cppkafka PRIVATE "${LIBRARY_DIR}/include/cppkafka") -target_include_directories(cppkafka SYSTEM BEFORE PUBLIC "${LIBRARY_DIR}/include") +target_include_directories(_cppkafka PRIVATE "${LIBRARY_DIR}/include/cppkafka") +target_include_directories(_cppkafka SYSTEM BEFORE PUBLIC "${LIBRARY_DIR}/include") diff --git a/contrib/croaring-cmake/CMakeLists.txt b/contrib/croaring-cmake/CMakeLists.txt index 3d327d068c1..0bb7d0bd221 100644 --- a/contrib/croaring-cmake/CMakeLists.txt +++ b/contrib/croaring-cmake/CMakeLists.txt @@ -19,15 +19,15 @@ set(SRCS "${LIBRARY_DIR}/src/roaring_priority_queue.c" "${LIBRARY_DIR}/src/roaring_array.c") -add_library(roaring ${SRCS}) +add_library(_roaring ${SRCS}) -target_include_directories(roaring PRIVATE "${LIBRARY_DIR}/include/roaring") -target_include_directories(roaring SYSTEM BEFORE PUBLIC "${LIBRARY_DIR}/include") -target_include_directories(roaring SYSTEM BEFORE PUBLIC "${LIBRARY_DIR}/cpp") +target_include_directories(_roaring PRIVATE "${LIBRARY_DIR}/include/roaring") +target_include_directories(_roaring SYSTEM BEFORE PUBLIC "${LIBRARY_DIR}/include") +target_include_directories(_roaring SYSTEM BEFORE PUBLIC "${LIBRARY_DIR}/cpp") # We redirect malloc/free family of functions to different functions that will track memory in ClickHouse. # Also note that we exploit implicit function declarations. -target_compile_definitions(roaring PRIVATE +target_compile_definitions(_roaring PRIVATE -Dmalloc=clickhouse_malloc -Dcalloc=clickhouse_calloc -Drealloc=clickhouse_realloc @@ -35,4 +35,6 @@ target_compile_definitions(roaring PRIVATE -Dfree=clickhouse_free -Dposix_memalign=clickhouse_posix_memalign) -target_link_libraries(roaring PUBLIC clickhouse_common_io) +target_link_libraries(_roaring PUBLIC clickhouse_common_io) + +add_library(ch_contrib::roaring ALIAS _roaring) diff --git a/contrib/curl-cmake/CMakeLists.txt b/contrib/curl-cmake/CMakeLists.txt index 63ac8da24b1..589f40384e3 100644 --- a/contrib/curl-cmake/CMakeLists.txt +++ b/contrib/curl-cmake/CMakeLists.txt @@ -1,4 +1,7 @@ -if (NOT USE_INTERNAL_CURL) +option (ENABLE_CURL "Enable curl" ${ENABLE_LIBRARIES}) + +if (NOT ENABLE_CURL) + message(STATUS "Not using curl") return() endif() @@ -144,36 +147,24 @@ set (SRCS "${LIBRARY_DIR}/lib/vssh/libssh.c" ) -add_library (curl ${SRCS}) +add_library (_curl ${SRCS}) -target_compile_definitions (curl PRIVATE +target_compile_definitions (_curl PRIVATE HAVE_CONFIG_H BUILDING_LIBCURL CURL_HIDDEN_SYMBOLS libcurl_EXPORTS OS="${CMAKE_SYSTEM_NAME}" ) -target_include_directories (curl SYSTEM PUBLIC +target_include_directories (_curl SYSTEM PUBLIC "${LIBRARY_DIR}/include" "${LIBRARY_DIR}/lib" . # curl_config.h ) -target_link_libraries (curl PRIVATE ssl) +target_link_libraries (_curl PRIVATE OpenSSL::SSL) # The library is large - avoid bloat (XXX: is it?) -target_compile_options (curl PRIVATE -g0) +target_compile_options (_curl PRIVATE -g0) -# find_package(CURL) compatibility for the following packages that uses -# find_package(CURL)/include(FindCURL): -# - mariadb-connector-c -# - aws-s3-cmake -# - sentry-native -set (CURL_FOUND ON CACHE BOOL "") -set (CURL_ROOT_DIR ${LIBRARY_DIR} CACHE PATH "") -set (CURL_INCLUDE_DIR "${LIBRARY_DIR}/include" CACHE PATH "") -set (CURL_INCLUDE_DIRS "${LIBRARY_DIR}/include" CACHE PATH "") -set (CURL_LIBRARY curl CACHE STRING "") -set (CURL_LIBRARIES ${CURL_LIBRARY} CACHE STRING "") -set (CURL_VERSION_STRING 7.67.0 CACHE STRING "") -add_library (CURL::libcurl ALIAS ${CURL_LIBRARY}) +add_library (ch_contrib::curl ALIAS _curl) diff --git a/contrib/cyrus-sasl-cmake/CMakeLists.txt b/contrib/cyrus-sasl-cmake/CMakeLists.txt index aa25a078718..41deaae19a7 100644 --- a/contrib/cyrus-sasl-cmake/CMakeLists.txt +++ b/contrib/cyrus-sasl-cmake/CMakeLists.txt @@ -1,8 +1,20 @@ +if (${ENABLE_LIBRARIES} AND ${ENABLE_KRB5}) + set (DEFAULT_ENABLE_CYRUS_SASL 1) +else() + set (DEFAULT_ENABLE_CYRUS_SASL 0) +endif() +option(ENABLE_CYRUS_SASL "Enable cyrus-sasl" ${DEFAULT_ENABLE_CYRUS_SASL}) + +if (NOT ENABLE_CYRUS_SASL) + message(STATUS "Not using cyrus-sasl") + return() +endif() + set(CYRUS_SASL_SOURCE_DIR "${ClickHouse_SOURCE_DIR}/contrib/cyrus-sasl") -add_library(${CYRUS_SASL_LIBRARY}) +add_library(_sasl2) -target_sources(${CYRUS_SASL_LIBRARY} PRIVATE +target_sources(_sasl2 PRIVATE "${CYRUS_SASL_SOURCE_DIR}/plugins/gssapi.c" # "${CYRUS_SASL_SOURCE_DIR}/plugins/gssapiv2_init.c" "${CYRUS_SASL_SOURCE_DIR}/common/plugin_common.c" @@ -20,11 +32,11 @@ target_sources(${CYRUS_SASL_LIBRARY} PRIVATE "${CYRUS_SASL_SOURCE_DIR}/lib/checkpw.c" ) -target_include_directories(${CYRUS_SASL_LIBRARY} PUBLIC +target_include_directories(_sasl2 PUBLIC ${CMAKE_CURRENT_BINARY_DIR} ) -target_include_directories(${CYRUS_SASL_LIBRARY} PRIVATE +target_include_directories(_sasl2 PRIVATE ${CMAKE_CURRENT_SOURCE_DIR} # for config.h "${CYRUS_SASL_SOURCE_DIR}/plugins" ${CYRUS_SASL_SOURCE_DIR} @@ -38,7 +50,7 @@ target_include_directories(${CYRUS_SASL_LIBRARY} PRIVATE "${CYRUS_SASL_SOURCE_DIR}/tests" ) -target_compile_definitions(${CYRUS_SASL_LIBRARY} PUBLIC +target_compile_definitions(_sasl2 PUBLIC HAVE_CONFIG_H # PLUGINDIR="/usr/local/lib/sasl2" PLUGINDIR="" @@ -64,6 +76,6 @@ file(COPY DESTINATION ${CMAKE_CURRENT_BINARY_DIR} ) -target_link_libraries(${CYRUS_SASL_LIBRARY} - PUBLIC ${KRB5_LIBRARY} -) +target_link_libraries(_sasl2 PUBLIC ch_contrib::krb5) + +add_library(ch_contrib::sasl2 ALIAS _sasl2) diff --git a/contrib/datasketches-cpp-cmake/CMakeLists.txt b/contrib/datasketches-cpp-cmake/CMakeLists.txt new file mode 100644 index 00000000000..b12a88ad57b --- /dev/null +++ b/contrib/datasketches-cpp-cmake/CMakeLists.txt @@ -0,0 +1,14 @@ +option (ENABLE_DATASKETCHES "Enable DataSketches" ${ENABLE_LIBRARIES}) + +if (NOT ENABLE_DATASKETCHES) + message(STATUS "Not using DataSketches") + return() +endif() + +set(DATASKETCHES_LIBRARY theta) +add_library(_datasketches INTERFACE) +target_include_directories(_datasketches SYSTEM BEFORE INTERFACE + "${ClickHouse_SOURCE_DIR}/contrib/datasketches-cpp/common/include" + "${ClickHouse_SOURCE_DIR}/contrib/datasketches-cpp/theta/include") + +add_library(ch_contrib::datasketches ALIAS _datasketches) diff --git a/contrib/double-conversion-cmake/CMakeLists.txt b/contrib/double-conversion-cmake/CMakeLists.txt index c8bf1b34b8f..dc5b1719abf 100644 --- a/contrib/double-conversion-cmake/CMakeLists.txt +++ b/contrib/double-conversion-cmake/CMakeLists.txt @@ -1,6 +1,6 @@ SET(LIBRARY_DIR "${ClickHouse_SOURCE_DIR}/contrib/double-conversion") -add_library(double-conversion +add_library(_double-conversion "${LIBRARY_DIR}/double-conversion/bignum.cc" "${LIBRARY_DIR}/double-conversion/bignum-dtoa.cc" "${LIBRARY_DIR}/double-conversion/cached-powers.cc" @@ -10,4 +10,6 @@ add_library(double-conversion "${LIBRARY_DIR}/double-conversion/fixed-dtoa.cc" "${LIBRARY_DIR}/double-conversion/strtod.cc") -target_include_directories(double-conversion SYSTEM BEFORE PUBLIC "${LIBRARY_DIR}") +target_include_directories(_double-conversion SYSTEM BEFORE PUBLIC "${LIBRARY_DIR}") + +add_library(ch_contrib::double_conversion ALIAS _double-conversion) diff --git a/contrib/dragonbox-cmake/CMakeLists.txt b/contrib/dragonbox-cmake/CMakeLists.txt index 604394c6dce..6644ac3c313 100644 --- a/contrib/dragonbox-cmake/CMakeLists.txt +++ b/contrib/dragonbox-cmake/CMakeLists.txt @@ -1,5 +1,5 @@ set(LIBRARY_DIR "${ClickHouse_SOURCE_DIR}/contrib/dragonbox") -add_library(dragonbox_to_chars "${LIBRARY_DIR}/source/dragonbox_to_chars.cpp") - -target_include_directories(dragonbox_to_chars SYSTEM BEFORE PUBLIC "${LIBRARY_DIR}/include/") +add_library(_dragonbox_to_chars "${LIBRARY_DIR}/source/dragonbox_to_chars.cpp") +target_include_directories(_dragonbox_to_chars SYSTEM BEFORE PUBLIC "${LIBRARY_DIR}/include/") +add_library(ch_contrib::dragonbox_to_chars ALIAS _dragonbox_to_chars) diff --git a/contrib/fast_float-cmake/CMakeLists.txt b/contrib/fast_float-cmake/CMakeLists.txt index cd945f79a20..4ddd11c6d37 100644 --- a/contrib/fast_float-cmake/CMakeLists.txt +++ b/contrib/fast_float-cmake/CMakeLists.txt @@ -1,2 +1,3 @@ -add_library(fast_float INTERFACE) -target_include_directories(fast_float INTERFACE "${ClickHouse_SOURCE_DIR}/contrib/fast_float/include/") +add_library(_fast_float INTERFACE) +target_include_directories(_fast_float SYSTEM BEFORE INTERFACE "${ClickHouse_SOURCE_DIR}/contrib/fast_float/include/") +add_library(ch_contrib::fast_float ALIAS _fast_float) diff --git a/contrib/fastops-cmake/CMakeLists.txt b/contrib/fastops-cmake/CMakeLists.txt index fe7293c614b..17d6a7f5fcb 100644 --- a/contrib/fastops-cmake/CMakeLists.txt +++ b/contrib/fastops-cmake/CMakeLists.txt @@ -1,3 +1,14 @@ +if(ARCH_AMD64 AND NOT OS_FREEBSD AND NOT OS_DARWIN) + option(ENABLE_FASTOPS "Enable fast vectorized mathematical functions library by Mikhail Parakhin" ${ENABLE_LIBRARIES}) +elseif(ENABLE_FASTOPS) + message (${RECONFIGURE_MESSAGE_LEVEL} "Fastops library is supported on x86_64 only, and not FreeBSD or Darwin") +endif() + +if(NOT ENABLE_FASTOPS) + message(STATUS "Not using fast vectorized mathematical functions library by Mikhail Parakhin") + return() +endif() + set(LIBRARY_DIR "${ClickHouse_SOURCE_DIR}/contrib/fastops") set(SRCS "") @@ -14,6 +25,8 @@ endif() set (SRCS ${SRCS} "${LIBRARY_DIR}/fastops/plain/ops_plain.cpp" "${LIBRARY_DIR}/fastops/core/avx_id.cpp" "${LIBRARY_DIR}/fastops/fastops.cpp") -add_library(fastops ${SRCS}) +add_library(_fastops ${SRCS}) -target_include_directories(fastops SYSTEM PUBLIC "${LIBRARY_DIR}") +target_include_directories(_fastops SYSTEM PUBLIC "${LIBRARY_DIR}") + +add_library(ch_contrib::fastops ALIAS _fastops) diff --git a/contrib/fmtlib-cmake/CMakeLists.txt b/contrib/fmtlib-cmake/CMakeLists.txt index f3bf73d7dbc..d8cb721b9ba 100644 --- a/contrib/fmtlib-cmake/CMakeLists.txt +++ b/contrib/fmtlib-cmake/CMakeLists.txt @@ -16,5 +16,6 @@ set (SRCS ../fmtlib/include/fmt/ranges.h ) -add_library(fmt ${SRCS}) -target_include_directories(fmt SYSTEM PUBLIC ../fmtlib/include) +add_library(_fmt ${SRCS}) +target_include_directories(_fmt SYSTEM PUBLIC ../fmtlib/include) +add_library(ch_contrib::fmt ALIAS _fmt) diff --git a/contrib/googletest-cmake/CMakeLists.txt b/contrib/googletest-cmake/CMakeLists.txt index ec7ac91c471..f116eddc337 100644 --- a/contrib/googletest-cmake/CMakeLists.txt +++ b/contrib/googletest-cmake/CMakeLists.txt @@ -1,11 +1,15 @@ set (SRC_DIR "${ClickHouse_SOURCE_DIR}/contrib/googletest/googletest") -add_library(gtest "${SRC_DIR}/src/gtest-all.cc") -set_target_properties(gtest PROPERTIES VERSION "1.0.0") -target_compile_definitions (gtest INTERFACE GTEST_HAS_POSIX_RE=0) -target_include_directories(gtest SYSTEM PUBLIC "${SRC_DIR}/include") -target_include_directories(gtest PRIVATE "${SRC_DIR}") +add_library(_gtest "${SRC_DIR}/src/gtest-all.cc") +set_target_properties(_gtest PROPERTIES VERSION "1.0.0") +target_compile_definitions (_gtest INTERFACE GTEST_HAS_POSIX_RE=0) +target_include_directories(_gtest SYSTEM PUBLIC "${SRC_DIR}/include") +target_include_directories(_gtest PRIVATE "${SRC_DIR}") -add_library(gtest_main "${SRC_DIR}/src/gtest_main.cc") -set_target_properties(gtest_main PROPERTIES VERSION "1.0.0") -target_link_libraries(gtest_main PUBLIC gtest) +add_library(_gtest_main "${SRC_DIR}/src/gtest_main.cc") +set_target_properties(_gtest_main PROPERTIES VERSION "1.0.0") +target_link_libraries(_gtest_main PUBLIC _gtest) + +add_library(_gtest_all INTERFACE) +target_link_libraries(_gtest_all INTERFACE _gtest _gtest_main) +add_library(ch_contrib::gtest_all ALIAS _gtest_all) diff --git a/contrib/grpc-cmake/CMakeLists.txt b/contrib/grpc-cmake/CMakeLists.txt index b93968f62f9..520e04d198e 100644 --- a/contrib/grpc-cmake/CMakeLists.txt +++ b/contrib/grpc-cmake/CMakeLists.txt @@ -1,47 +1,52 @@ +# disable grpc due to conflicts of abseil (required by grpc) dynamic annotations with libtsan.a +if (SANITIZE STREQUAL "thread" AND COMPILER_GCC) + set(ENABLE_GRPC_DEFAULT OFF) +else() + set(ENABLE_GRPC_DEFAULT ${ENABLE_LIBRARIES}) +endif() +option(ENABLE_GRPC "Use gRPC" ${ENABLE_GRPC_DEFAULT}) + +if(NOT ENABLE_GRPC) + message(STATUS "Not using gRPC") + return() +endif() + set(_gRPC_SOURCE_DIR "${ClickHouse_SOURCE_DIR}/contrib/grpc") set(_gRPC_BINARY_DIR "${ClickHouse_BINARY_DIR}/contrib/grpc") # Use re2 from ClickHouse contrib, not from gRPC third_party. -if(NOT RE2_INCLUDE_DIR) - message(FATAL_ERROR " grpc: The location of the \"re2\" library is unknown") -endif() set(gRPC_RE2_PROVIDER "clickhouse" CACHE STRING "" FORCE) -set(_gRPC_RE2_INCLUDE_DIR "${RE2_INCLUDE_DIR}") -set(_gRPC_RE2_LIBRARIES "${RE2_LIBRARY}") +set(_gRPC_RE2_INCLUDE_DIR "") +set(_gRPC_RE2_LIBRARIES ch_contrib::re2) # Use zlib from ClickHouse contrib, not from gRPC third_party. -if(NOT ZLIB_INCLUDE_DIRS) - message(FATAL_ERROR " grpc: The location of the \"zlib\" library is unknown") -endif() set(gRPC_ZLIB_PROVIDER "clickhouse" CACHE STRING "" FORCE) -set(_gRPC_ZLIB_INCLUDE_DIR "${ZLIB_INCLUDE_DIRS}") -set(_gRPC_ZLIB_LIBRARIES "${ZLIB_LIBRARIES}") +set(_gRPC_ZLIB_INCLUDE_DIR "") +set(_gRPC_ZLIB_LIBRARIES ch_contrib::zlib) # Use protobuf from ClickHouse contrib, not from gRPC third_party. -if(NOT Protobuf_INCLUDE_DIR OR NOT Protobuf_LIBRARY) - message(FATAL_ERROR " grpc: The location of the \"protobuf\" library is unknown") -elseif (NOT Protobuf_PROTOC_EXECUTABLE) - message(FATAL_ERROR " grpc: The location of the protobuf compiler is unknown") -elseif (NOT Protobuf_PROTOC_LIBRARY) - message(FATAL_ERROR " grpc: The location of the protobuf compiler's library is unknown") -endif() set(gRPC_PROTOBUF_PROVIDER "clickhouse" CACHE STRING "" FORCE) -set(_gRPC_PROTOBUF_WELLKNOWN_INCLUDE_DIR "${Protobuf_INCLUDE_DIR}") -set(_gRPC_PROTOBUF_LIBRARIES "${Protobuf_LIBRARY}") +set(_gRPC_PROTOBUF_LIBRARIES ch_contrib::protobuf) set(_gRPC_PROTOBUF_PROTOC "protoc") -set(_gRPC_PROTOBUF_PROTOC_EXECUTABLE "${Protobuf_PROTOC_EXECUTABLE}") -set(_gRPC_PROTOBUF_PROTOC_LIBRARIES "${Protobuf_PROTOC_LIBRARY}") +set(_gRPC_PROTOBUF_PROTOC_EXECUTABLE $) +set(_gRPC_PROTOBUF_PROTOC_LIBRARIES ch_contrib::protoc) + +if(TARGET OpenSSL::SSL) + set(gRPC_USE_UNSECURE_LIBRARIES FALSE) +else() + set(gRPC_USE_UNSECURE_LIBRARIES TRUE) +endif() # Use OpenSSL from ClickHouse contrib, not from gRPC third_party. set(gRPC_SSL_PROVIDER "clickhouse" CACHE STRING "" FORCE) -set(_gRPC_SSL_INCLUDE_DIR ${OPENSSL_INCLUDE_DIR}) -set(_gRPC_SSL_LIBRARIES ${OPENSSL_LIBRARIES}) +set(_gRPC_SSL_INCLUDE_DIR "") +set(_gRPC_SSL_LIBRARIES OpenSSL::Crypto OpenSSL::SSL) # Use abseil-cpp from ClickHouse contrib, not from gRPC third_party. set(gRPC_ABSL_PROVIDER "clickhouse" CACHE STRING "" FORCE) # Choose to build static or shared library for c-ares. -if (MAKE_STATIC_LIBRARIES) +if (USE_STATIC_LIBRARIES) set(CARES_STATIC ON CACHE BOOL "" FORCE) set(CARES_SHARED OFF CACHE BOOL "" FORCE) else () @@ -77,3 +82,17 @@ add_subdirectory("${_gRPC_SOURCE_DIR}" "${_gRPC_BINARY_DIR}") # The contrib/grpc/CMakeLists.txt redefined the PROTOBUF_GENERATE_GRPC_CPP() function for its own purposes, # so we need to redefine it back. include("${ClickHouse_SOURCE_DIR}/contrib/grpc-cmake/protobuf_generate_grpc.cmake") + +set(gRPC_CPP_PLUGIN $) +set(gRPC_PYTHON_PLUGIN $) + +set(gRPC_INCLUDE_DIRS "${ClickHouse_SOURCE_DIR}/contrib/grpc/include") +if(gRPC_USE_UNSECURE_LIBRARIES) + set(gRPC_LIBRARIES grpc_unsecure grpc++_unsecure) +else() + set(gRPC_LIBRARIES grpc grpc++) +endif() +add_library(_ch_contrib_grpc INTERFACE) +target_link_libraries(_ch_contrib_grpc INTERFACE ${gRPC_LIBRARIES}) +target_include_directories(_ch_contrib_grpc SYSTEM INTERFACE ${gRPC_INCLUDE_DIRS}) +add_library(ch_contrib::grpc ALIAS _ch_contrib_grpc) diff --git a/contrib/h3-cmake/CMakeLists.txt b/contrib/h3-cmake/CMakeLists.txt index f4c70dc476f..984d1b1ae7c 100644 --- a/contrib/h3-cmake/CMakeLists.txt +++ b/contrib/h3-cmake/CMakeLists.txt @@ -1,3 +1,10 @@ +option (ENABLE_H3 "Enable H3" ${ENABLE_LIBRARIES}) + +if(NOT ENABLE_H3) + message(STATUS "Not using H3") + return() +endif() + set(H3_SOURCE_DIR "${ClickHouse_SOURCE_DIR}/contrib/h3/src/h3lib") set(H3_BINARY_DIR "${ClickHouse_BINARY_DIR}/contrib/h3/src/h3lib") @@ -23,10 +30,12 @@ set(SRCS configure_file("${H3_SOURCE_DIR}/include/h3api.h.in" "${H3_BINARY_DIR}/include/h3api.h") -add_library(h3 ${SRCS}) -target_include_directories(h3 SYSTEM PUBLIC "${H3_SOURCE_DIR}/include") -target_include_directories(h3 SYSTEM PUBLIC "${H3_BINARY_DIR}/include") -target_compile_definitions(h3 PRIVATE H3_HAVE_VLA) +add_library(_h3 ${SRCS}) +target_include_directories(_h3 SYSTEM PUBLIC "${H3_SOURCE_DIR}/include") +target_include_directories(_h3 SYSTEM PUBLIC "${H3_BINARY_DIR}/include") +target_compile_definitions(_h3 PRIVATE H3_HAVE_VLA) if(M_LIBRARY) - target_link_libraries(h3 PRIVATE ${M_LIBRARY}) + target_link_libraries(_h3 PRIVATE ${M_LIBRARY}) endif() + +add_library(ch_contrib::h3 ALIAS _h3) diff --git a/contrib/hive-metastore-cmake/CMakeLists.txt b/contrib/hive-metastore-cmake/CMakeLists.txt index c92405fa4e8..9069d46cea7 100644 --- a/contrib/hive-metastore-cmake/CMakeLists.txt +++ b/contrib/hive-metastore-cmake/CMakeLists.txt @@ -1,9 +1,21 @@ +if (TARGET ch_contrib::hdfs) + option(ENABLE_HIVE "Enable Hive" ${ENABLE_LIBRARIES}) +elseif(ENABLE_HIVE) + message (${RECONFIGURE_MESSAGE_LEVEL} "Cannot use Hive without HDFS") +endif() + +if (NOT ENABLE_HIVE) + message("Hive disabled") + return() +endif() + set (SRCS ${ClickHouse_SOURCE_DIR}/contrib/hive-metastore/hive_metastore_constants.cpp ${ClickHouse_SOURCE_DIR}/contrib/hive-metastore/hive_metastore_types.cpp ${ClickHouse_SOURCE_DIR}/contrib/hive-metastore/ThriftHiveMetastore.cpp ) -add_library(${HIVE_METASTORE_LIBRARY} ${SRCS}) -target_link_libraries(${HIVE_METASTORE_LIBRARY} PUBLIC ${THRIFT_LIBRARY}) -target_include_directories(${HIVE_METASTORE_LIBRARY} SYSTEM PUBLIC ${HIVE_METASTORE_INCLUDE_DIR}) +add_library(_hivemetastore ${SRCS}) +add_library(ch_contrib::hivemetastore ALIAS _hivemetastore) +target_link_libraries(_hivemetastore PUBLIC ch_contrib::thrift) +target_include_directories(_hivemetastore SYSTEM BEFORE PUBLIC ${ClickHouse_SOURCE_DIR}/contrib/hive-metastore) diff --git a/contrib/hyperscan-cmake/CMakeLists.txt b/contrib/hyperscan-cmake/CMakeLists.txt index 248551d0b0c..02c823a3a42 100644 --- a/contrib/hyperscan-cmake/CMakeLists.txt +++ b/contrib/hyperscan-cmake/CMakeLists.txt @@ -6,268 +6,234 @@ elseif(ENABLE_HYPERSCAN) endif () if (NOT ENABLE_HYPERSCAN) - if (USE_INTERNAL_HYPERSCAN_LIBRARY) - message (${RECONFIGURE_MESSAGE_LEVEL} "Can't use internal hyperscan with ENABLE_HYPERSCAN=OFF") - endif() - - add_library (hyperscan INTERFACE) - target_compile_definitions (hyperscan INTERFACE USE_HYPERSCAN=0) - message (STATUS "Not using hyperscan") return() endif() -option (USE_INTERNAL_HYPERSCAN_LIBRARY "Use internal hyperscan library" ON) +set (LIBRARY_DIR "${ClickHouse_SOURCE_DIR}/contrib/hyperscan") -if (NOT USE_INTERNAL_HYPERSCAN_LIBRARY) - find_library (LIBRARY_HYPERSCAN hs) - find_path (INCLUDE_HYPERSCAN NAMES hs.h HINTS /usr/include/hs) # Ubuntu puts headers in this folder +set (SRCS + "${LIBRARY_DIR}/src/alloc.c" + "${LIBRARY_DIR}/src/compiler/asserts.cpp" + "${LIBRARY_DIR}/src/compiler/compiler.cpp" + "${LIBRARY_DIR}/src/compiler/error.cpp" + "${LIBRARY_DIR}/src/crc32.c" + "${LIBRARY_DIR}/src/database.c" + "${LIBRARY_DIR}/src/fdr/engine_description.cpp" + "${LIBRARY_DIR}/src/fdr/fdr_compile_util.cpp" + "${LIBRARY_DIR}/src/fdr/fdr_compile.cpp" + "${LIBRARY_DIR}/src/fdr/fdr_confirm_compile.cpp" + "${LIBRARY_DIR}/src/fdr/fdr_engine_description.cpp" + "${LIBRARY_DIR}/src/fdr/fdr.c" + "${LIBRARY_DIR}/src/fdr/flood_compile.cpp" + "${LIBRARY_DIR}/src/fdr/teddy_compile.cpp" + "${LIBRARY_DIR}/src/fdr/teddy_engine_description.cpp" + "${LIBRARY_DIR}/src/fdr/teddy.c" + "${LIBRARY_DIR}/src/grey.cpp" + "${LIBRARY_DIR}/src/hs_valid_platform.c" + "${LIBRARY_DIR}/src/hs_version.c" + "${LIBRARY_DIR}/src/hs.cpp" + "${LIBRARY_DIR}/src/hwlm/hwlm_build.cpp" + "${LIBRARY_DIR}/src/hwlm/hwlm_literal.cpp" + "${LIBRARY_DIR}/src/hwlm/hwlm.c" + "${LIBRARY_DIR}/src/hwlm/noodle_build.cpp" + "${LIBRARY_DIR}/src/hwlm/noodle_engine.c" + "${LIBRARY_DIR}/src/nfa/accel_dfa_build_strat.cpp" + "${LIBRARY_DIR}/src/nfa/accel.c" + "${LIBRARY_DIR}/src/nfa/accelcompile.cpp" + "${LIBRARY_DIR}/src/nfa/castle.c" + "${LIBRARY_DIR}/src/nfa/castlecompile.cpp" + "${LIBRARY_DIR}/src/nfa/dfa_build_strat.cpp" + "${LIBRARY_DIR}/src/nfa/dfa_min.cpp" + "${LIBRARY_DIR}/src/nfa/gough.c" + "${LIBRARY_DIR}/src/nfa/goughcompile_accel.cpp" + "${LIBRARY_DIR}/src/nfa/goughcompile_reg.cpp" + "${LIBRARY_DIR}/src/nfa/goughcompile.cpp" + "${LIBRARY_DIR}/src/nfa/lbr.c" + "${LIBRARY_DIR}/src/nfa/limex_64.c" + "${LIBRARY_DIR}/src/nfa/limex_accel.c" + "${LIBRARY_DIR}/src/nfa/limex_compile.cpp" + "${LIBRARY_DIR}/src/nfa/limex_native.c" + "${LIBRARY_DIR}/src/nfa/limex_simd128.c" + "${LIBRARY_DIR}/src/nfa/limex_simd256.c" + "${LIBRARY_DIR}/src/nfa/limex_simd384.c" + "${LIBRARY_DIR}/src/nfa/limex_simd512.c" + "${LIBRARY_DIR}/src/nfa/mcclellan.c" + "${LIBRARY_DIR}/src/nfa/mcclellancompile_util.cpp" + "${LIBRARY_DIR}/src/nfa/mcclellancompile.cpp" + "${LIBRARY_DIR}/src/nfa/mcsheng_compile.cpp" + "${LIBRARY_DIR}/src/nfa/mcsheng_data.c" + "${LIBRARY_DIR}/src/nfa/mcsheng.c" + "${LIBRARY_DIR}/src/nfa/mpv.c" + "${LIBRARY_DIR}/src/nfa/mpvcompile.cpp" + "${LIBRARY_DIR}/src/nfa/nfa_api_dispatch.c" + "${LIBRARY_DIR}/src/nfa/nfa_build_util.cpp" + "${LIBRARY_DIR}/src/nfa/rdfa_graph.cpp" + "${LIBRARY_DIR}/src/nfa/rdfa_merge.cpp" + "${LIBRARY_DIR}/src/nfa/rdfa.cpp" + "${LIBRARY_DIR}/src/nfa/repeat.c" + "${LIBRARY_DIR}/src/nfa/repeatcompile.cpp" + "${LIBRARY_DIR}/src/nfa/sheng.c" + "${LIBRARY_DIR}/src/nfa/shengcompile.cpp" + "${LIBRARY_DIR}/src/nfa/shufti.c" + "${LIBRARY_DIR}/src/nfa/shufticompile.cpp" + "${LIBRARY_DIR}/src/nfa/tamarama.c" + "${LIBRARY_DIR}/src/nfa/tamaramacompile.cpp" + "${LIBRARY_DIR}/src/nfa/truffle.c" + "${LIBRARY_DIR}/src/nfa/trufflecompile.cpp" + "${LIBRARY_DIR}/src/nfagraph/ng_anchored_acyclic.cpp" + "${LIBRARY_DIR}/src/nfagraph/ng_anchored_dots.cpp" + "${LIBRARY_DIR}/src/nfagraph/ng_asserts.cpp" + "${LIBRARY_DIR}/src/nfagraph/ng_builder.cpp" + "${LIBRARY_DIR}/src/nfagraph/ng_calc_components.cpp" + "${LIBRARY_DIR}/src/nfagraph/ng_cyclic_redundancy.cpp" + "${LIBRARY_DIR}/src/nfagraph/ng_depth.cpp" + "${LIBRARY_DIR}/src/nfagraph/ng_dominators.cpp" + "${LIBRARY_DIR}/src/nfagraph/ng_edge_redundancy.cpp" + "${LIBRARY_DIR}/src/nfagraph/ng_equivalence.cpp" + "${LIBRARY_DIR}/src/nfagraph/ng_execute.cpp" + "${LIBRARY_DIR}/src/nfagraph/ng_expr_info.cpp" + "${LIBRARY_DIR}/src/nfagraph/ng_extparam.cpp" + "${LIBRARY_DIR}/src/nfagraph/ng_fixed_width.cpp" + "${LIBRARY_DIR}/src/nfagraph/ng_fuzzy.cpp" + "${LIBRARY_DIR}/src/nfagraph/ng_haig.cpp" + "${LIBRARY_DIR}/src/nfagraph/ng_holder.cpp" + "${LIBRARY_DIR}/src/nfagraph/ng_is_equal.cpp" + "${LIBRARY_DIR}/src/nfagraph/ng_lbr.cpp" + "${LIBRARY_DIR}/src/nfagraph/ng_limex_accel.cpp" + "${LIBRARY_DIR}/src/nfagraph/ng_limex.cpp" + "${LIBRARY_DIR}/src/nfagraph/ng_literal_analysis.cpp" + "${LIBRARY_DIR}/src/nfagraph/ng_literal_component.cpp" + "${LIBRARY_DIR}/src/nfagraph/ng_literal_decorated.cpp" + "${LIBRARY_DIR}/src/nfagraph/ng_mcclellan.cpp" + "${LIBRARY_DIR}/src/nfagraph/ng_misc_opt.cpp" + "${LIBRARY_DIR}/src/nfagraph/ng_netflow.cpp" + "${LIBRARY_DIR}/src/nfagraph/ng_prefilter.cpp" + "${LIBRARY_DIR}/src/nfagraph/ng_prune.cpp" + "${LIBRARY_DIR}/src/nfagraph/ng_puff.cpp" + "${LIBRARY_DIR}/src/nfagraph/ng_redundancy.cpp" + "${LIBRARY_DIR}/src/nfagraph/ng_region_redundancy.cpp" + "${LIBRARY_DIR}/src/nfagraph/ng_region.cpp" + "${LIBRARY_DIR}/src/nfagraph/ng_repeat.cpp" + "${LIBRARY_DIR}/src/nfagraph/ng_reports.cpp" + "${LIBRARY_DIR}/src/nfagraph/ng_restructuring.cpp" + "${LIBRARY_DIR}/src/nfagraph/ng_revacc.cpp" + "${LIBRARY_DIR}/src/nfagraph/ng_sep.cpp" + "${LIBRARY_DIR}/src/nfagraph/ng_small_literal_set.cpp" + "${LIBRARY_DIR}/src/nfagraph/ng_som_add_redundancy.cpp" + "${LIBRARY_DIR}/src/nfagraph/ng_som_util.cpp" + "${LIBRARY_DIR}/src/nfagraph/ng_som.cpp" + "${LIBRARY_DIR}/src/nfagraph/ng_split.cpp" + "${LIBRARY_DIR}/src/nfagraph/ng_squash.cpp" + "${LIBRARY_DIR}/src/nfagraph/ng_stop.cpp" + "${LIBRARY_DIR}/src/nfagraph/ng_uncalc_components.cpp" + "${LIBRARY_DIR}/src/nfagraph/ng_utf8.cpp" + "${LIBRARY_DIR}/src/nfagraph/ng_util.cpp" + "${LIBRARY_DIR}/src/nfagraph/ng_vacuous.cpp" + "${LIBRARY_DIR}/src/nfagraph/ng_violet.cpp" + "${LIBRARY_DIR}/src/nfagraph/ng_width.cpp" + "${LIBRARY_DIR}/src/nfagraph/ng.cpp" + "${LIBRARY_DIR}/src/parser/AsciiComponentClass.cpp" + "${LIBRARY_DIR}/src/parser/buildstate.cpp" + "${LIBRARY_DIR}/src/parser/check_refs.cpp" + "${LIBRARY_DIR}/src/parser/Component.cpp" + "${LIBRARY_DIR}/src/parser/ComponentAlternation.cpp" + "${LIBRARY_DIR}/src/parser/ComponentAssertion.cpp" + "${LIBRARY_DIR}/src/parser/ComponentAtomicGroup.cpp" + "${LIBRARY_DIR}/src/parser/ComponentBackReference.cpp" + "${LIBRARY_DIR}/src/parser/ComponentBoundary.cpp" + "${LIBRARY_DIR}/src/parser/ComponentByte.cpp" + "${LIBRARY_DIR}/src/parser/ComponentClass.cpp" + "${LIBRARY_DIR}/src/parser/ComponentCondReference.cpp" + "${LIBRARY_DIR}/src/parser/ComponentEmpty.cpp" + "${LIBRARY_DIR}/src/parser/ComponentEUS.cpp" + "${LIBRARY_DIR}/src/parser/ComponentRepeat.cpp" + "${LIBRARY_DIR}/src/parser/ComponentSequence.cpp" + "${LIBRARY_DIR}/src/parser/ComponentVisitor.cpp" + "${LIBRARY_DIR}/src/parser/ComponentWordBoundary.cpp" + "${LIBRARY_DIR}/src/parser/ConstComponentVisitor.cpp" + "${LIBRARY_DIR}/src/parser/control_verbs.cpp" + "${LIBRARY_DIR}/src/parser/logical_combination.cpp" + "${LIBRARY_DIR}/src/parser/parse_error.cpp" + "${LIBRARY_DIR}/src/parser/parser_util.cpp" + "${LIBRARY_DIR}/src/parser/Parser.cpp" + "${LIBRARY_DIR}/src/parser/prefilter.cpp" + "${LIBRARY_DIR}/src/parser/shortcut_literal.cpp" + "${LIBRARY_DIR}/src/parser/ucp_table.cpp" + "${LIBRARY_DIR}/src/parser/unsupported.cpp" + "${LIBRARY_DIR}/src/parser/utf8_validate.cpp" + "${LIBRARY_DIR}/src/parser/Utf8ComponentClass.cpp" + "${LIBRARY_DIR}/src/rose/block.c" + "${LIBRARY_DIR}/src/rose/catchup.c" + "${LIBRARY_DIR}/src/rose/init.c" + "${LIBRARY_DIR}/src/rose/match.c" + "${LIBRARY_DIR}/src/rose/program_runtime.c" + "${LIBRARY_DIR}/src/rose/rose_build_add_mask.cpp" + "${LIBRARY_DIR}/src/rose/rose_build_add.cpp" + "${LIBRARY_DIR}/src/rose/rose_build_anchored.cpp" + "${LIBRARY_DIR}/src/rose/rose_build_bytecode.cpp" + "${LIBRARY_DIR}/src/rose/rose_build_castle.cpp" + "${LIBRARY_DIR}/src/rose/rose_build_compile.cpp" + "${LIBRARY_DIR}/src/rose/rose_build_convert.cpp" + "${LIBRARY_DIR}/src/rose/rose_build_dedupe.cpp" + "${LIBRARY_DIR}/src/rose/rose_build_engine_blob.cpp" + "${LIBRARY_DIR}/src/rose/rose_build_exclusive.cpp" + "${LIBRARY_DIR}/src/rose/rose_build_groups.cpp" + "${LIBRARY_DIR}/src/rose/rose_build_infix.cpp" + "${LIBRARY_DIR}/src/rose/rose_build_instructions.cpp" + "${LIBRARY_DIR}/src/rose/rose_build_lit_accel.cpp" + "${LIBRARY_DIR}/src/rose/rose_build_long_lit.cpp" + "${LIBRARY_DIR}/src/rose/rose_build_lookaround.cpp" + "${LIBRARY_DIR}/src/rose/rose_build_matchers.cpp" + "${LIBRARY_DIR}/src/rose/rose_build_merge.cpp" + "${LIBRARY_DIR}/src/rose/rose_build_misc.cpp" + "${LIBRARY_DIR}/src/rose/rose_build_program.cpp" + "${LIBRARY_DIR}/src/rose/rose_build_role_aliasing.cpp" + "${LIBRARY_DIR}/src/rose/rose_build_scatter.cpp" + "${LIBRARY_DIR}/src/rose/rose_build_width.cpp" + "${LIBRARY_DIR}/src/rose/rose_in_util.cpp" + "${LIBRARY_DIR}/src/rose/stream.c" + "${LIBRARY_DIR}/src/runtime.c" + "${LIBRARY_DIR}/src/scratch.c" + "${LIBRARY_DIR}/src/smallwrite/smallwrite_build.cpp" + "${LIBRARY_DIR}/src/som/slot_manager.cpp" + "${LIBRARY_DIR}/src/som/som_runtime.c" + "${LIBRARY_DIR}/src/som/som_stream.c" + "${LIBRARY_DIR}/src/stream_compress.c" + "${LIBRARY_DIR}/src/util/alloc.cpp" + "${LIBRARY_DIR}/src/util/charreach.cpp" + "${LIBRARY_DIR}/src/util/clique.cpp" + "${LIBRARY_DIR}/src/util/compile_context.cpp" + "${LIBRARY_DIR}/src/util/compile_error.cpp" + "${LIBRARY_DIR}/src/util/cpuid_flags.c" + "${LIBRARY_DIR}/src/util/depth.cpp" + "${LIBRARY_DIR}/src/util/fatbit_build.cpp" + "${LIBRARY_DIR}/src/util/multibit_build.cpp" + "${LIBRARY_DIR}/src/util/multibit.c" + "${LIBRARY_DIR}/src/util/report_manager.cpp" + "${LIBRARY_DIR}/src/util/simd_utils.c" + "${LIBRARY_DIR}/src/util/state_compress.c" + "${LIBRARY_DIR}/src/util/target_info.cpp" + "${LIBRARY_DIR}/src/util/ue2string.cpp" +) - if (LIBRARY_HYPERSCAN AND INCLUDE_HYPERSCAN) - set (EXTERNAL_HYPERSCAN_LIBRARY_FOUND 1) - - add_library (hyperscan INTERFACE) - set_target_properties (hyperscan PROPERTIES INTERFACE_LINK_LIBRARIES ${LIBRARY_HYPERSCAN}) - set_target_properties (hyperscan PROPERTIES INTERFACE_INCLUDE_DIRECTORIES ${INCLUDE_HYPERSCAN}) - set_property(TARGET hyperscan APPEND PROPERTY INTERFACE_COMPILE_DEFINITIONS USE_HYPERSCAN=1) - else () - set (EXTERNAL_HYPERSCAN_LIBRARY_FOUND 0) - message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find system hyperscan library") - endif () +add_library (_hyperscan ${SRCS}) +target_compile_options (_hyperscan + PRIVATE -g0 # Library has too much debug information + -mno-avx -mno-avx2 # The library is using dynamic dispatch and is confused if AVX is enabled globally + -march=corei7 -O2 -fno-strict-aliasing -fno-omit-frame-pointer -fvisibility=hidden # The options from original build system + -fno-sanitize=undefined # Assume the library takes care of itself +) +target_include_directories (_hyperscan + PRIVATE + common + "${LIBRARY_DIR}/include" +) +target_include_directories (_hyperscan SYSTEM PUBLIC "${LIBRARY_DIR}/src") +if (ARCH_AMD64) + target_include_directories (_hyperscan PRIVATE x86_64) endif () +target_link_libraries (_hyperscan PRIVATE boost::headers_only) -if (NOT EXTERNAL_HYPERSCAN_LIBRARY_FOUND) - set (USE_INTERNAL_HYPERSCAN_LIBRARY 1) - - set (LIBRARY_DIR "${ClickHouse_SOURCE_DIR}/contrib/hyperscan") - - set (SRCS - "${LIBRARY_DIR}/src/alloc.c" - "${LIBRARY_DIR}/src/compiler/asserts.cpp" - "${LIBRARY_DIR}/src/compiler/compiler.cpp" - "${LIBRARY_DIR}/src/compiler/error.cpp" - "${LIBRARY_DIR}/src/crc32.c" - "${LIBRARY_DIR}/src/database.c" - "${LIBRARY_DIR}/src/fdr/engine_description.cpp" - "${LIBRARY_DIR}/src/fdr/fdr_compile_util.cpp" - "${LIBRARY_DIR}/src/fdr/fdr_compile.cpp" - "${LIBRARY_DIR}/src/fdr/fdr_confirm_compile.cpp" - "${LIBRARY_DIR}/src/fdr/fdr_engine_description.cpp" - "${LIBRARY_DIR}/src/fdr/fdr.c" - "${LIBRARY_DIR}/src/fdr/flood_compile.cpp" - "${LIBRARY_DIR}/src/fdr/teddy_compile.cpp" - "${LIBRARY_DIR}/src/fdr/teddy_engine_description.cpp" - "${LIBRARY_DIR}/src/fdr/teddy.c" - "${LIBRARY_DIR}/src/grey.cpp" - "${LIBRARY_DIR}/src/hs_valid_platform.c" - "${LIBRARY_DIR}/src/hs_version.c" - "${LIBRARY_DIR}/src/hs.cpp" - "${LIBRARY_DIR}/src/hwlm/hwlm_build.cpp" - "${LIBRARY_DIR}/src/hwlm/hwlm_literal.cpp" - "${LIBRARY_DIR}/src/hwlm/hwlm.c" - "${LIBRARY_DIR}/src/hwlm/noodle_build.cpp" - "${LIBRARY_DIR}/src/hwlm/noodle_engine.c" - "${LIBRARY_DIR}/src/nfa/accel_dfa_build_strat.cpp" - "${LIBRARY_DIR}/src/nfa/accel.c" - "${LIBRARY_DIR}/src/nfa/accelcompile.cpp" - "${LIBRARY_DIR}/src/nfa/castle.c" - "${LIBRARY_DIR}/src/nfa/castlecompile.cpp" - "${LIBRARY_DIR}/src/nfa/dfa_build_strat.cpp" - "${LIBRARY_DIR}/src/nfa/dfa_min.cpp" - "${LIBRARY_DIR}/src/nfa/gough.c" - "${LIBRARY_DIR}/src/nfa/goughcompile_accel.cpp" - "${LIBRARY_DIR}/src/nfa/goughcompile_reg.cpp" - "${LIBRARY_DIR}/src/nfa/goughcompile.cpp" - "${LIBRARY_DIR}/src/nfa/lbr.c" - "${LIBRARY_DIR}/src/nfa/limex_64.c" - "${LIBRARY_DIR}/src/nfa/limex_accel.c" - "${LIBRARY_DIR}/src/nfa/limex_compile.cpp" - "${LIBRARY_DIR}/src/nfa/limex_native.c" - "${LIBRARY_DIR}/src/nfa/limex_simd128.c" - "${LIBRARY_DIR}/src/nfa/limex_simd256.c" - "${LIBRARY_DIR}/src/nfa/limex_simd384.c" - "${LIBRARY_DIR}/src/nfa/limex_simd512.c" - "${LIBRARY_DIR}/src/nfa/mcclellan.c" - "${LIBRARY_DIR}/src/nfa/mcclellancompile_util.cpp" - "${LIBRARY_DIR}/src/nfa/mcclellancompile.cpp" - "${LIBRARY_DIR}/src/nfa/mcsheng_compile.cpp" - "${LIBRARY_DIR}/src/nfa/mcsheng_data.c" - "${LIBRARY_DIR}/src/nfa/mcsheng.c" - "${LIBRARY_DIR}/src/nfa/mpv.c" - "${LIBRARY_DIR}/src/nfa/mpvcompile.cpp" - "${LIBRARY_DIR}/src/nfa/nfa_api_dispatch.c" - "${LIBRARY_DIR}/src/nfa/nfa_build_util.cpp" - "${LIBRARY_DIR}/src/nfa/rdfa_graph.cpp" - "${LIBRARY_DIR}/src/nfa/rdfa_merge.cpp" - "${LIBRARY_DIR}/src/nfa/rdfa.cpp" - "${LIBRARY_DIR}/src/nfa/repeat.c" - "${LIBRARY_DIR}/src/nfa/repeatcompile.cpp" - "${LIBRARY_DIR}/src/nfa/sheng.c" - "${LIBRARY_DIR}/src/nfa/shengcompile.cpp" - "${LIBRARY_DIR}/src/nfa/shufti.c" - "${LIBRARY_DIR}/src/nfa/shufticompile.cpp" - "${LIBRARY_DIR}/src/nfa/tamarama.c" - "${LIBRARY_DIR}/src/nfa/tamaramacompile.cpp" - "${LIBRARY_DIR}/src/nfa/truffle.c" - "${LIBRARY_DIR}/src/nfa/trufflecompile.cpp" - "${LIBRARY_DIR}/src/nfagraph/ng_anchored_acyclic.cpp" - "${LIBRARY_DIR}/src/nfagraph/ng_anchored_dots.cpp" - "${LIBRARY_DIR}/src/nfagraph/ng_asserts.cpp" - "${LIBRARY_DIR}/src/nfagraph/ng_builder.cpp" - "${LIBRARY_DIR}/src/nfagraph/ng_calc_components.cpp" - "${LIBRARY_DIR}/src/nfagraph/ng_cyclic_redundancy.cpp" - "${LIBRARY_DIR}/src/nfagraph/ng_depth.cpp" - "${LIBRARY_DIR}/src/nfagraph/ng_dominators.cpp" - "${LIBRARY_DIR}/src/nfagraph/ng_edge_redundancy.cpp" - "${LIBRARY_DIR}/src/nfagraph/ng_equivalence.cpp" - "${LIBRARY_DIR}/src/nfagraph/ng_execute.cpp" - "${LIBRARY_DIR}/src/nfagraph/ng_expr_info.cpp" - "${LIBRARY_DIR}/src/nfagraph/ng_extparam.cpp" - "${LIBRARY_DIR}/src/nfagraph/ng_fixed_width.cpp" - "${LIBRARY_DIR}/src/nfagraph/ng_fuzzy.cpp" - "${LIBRARY_DIR}/src/nfagraph/ng_haig.cpp" - "${LIBRARY_DIR}/src/nfagraph/ng_holder.cpp" - "${LIBRARY_DIR}/src/nfagraph/ng_is_equal.cpp" - "${LIBRARY_DIR}/src/nfagraph/ng_lbr.cpp" - "${LIBRARY_DIR}/src/nfagraph/ng_limex_accel.cpp" - "${LIBRARY_DIR}/src/nfagraph/ng_limex.cpp" - "${LIBRARY_DIR}/src/nfagraph/ng_literal_analysis.cpp" - "${LIBRARY_DIR}/src/nfagraph/ng_literal_component.cpp" - "${LIBRARY_DIR}/src/nfagraph/ng_literal_decorated.cpp" - "${LIBRARY_DIR}/src/nfagraph/ng_mcclellan.cpp" - "${LIBRARY_DIR}/src/nfagraph/ng_misc_opt.cpp" - "${LIBRARY_DIR}/src/nfagraph/ng_netflow.cpp" - "${LIBRARY_DIR}/src/nfagraph/ng_prefilter.cpp" - "${LIBRARY_DIR}/src/nfagraph/ng_prune.cpp" - "${LIBRARY_DIR}/src/nfagraph/ng_puff.cpp" - "${LIBRARY_DIR}/src/nfagraph/ng_redundancy.cpp" - "${LIBRARY_DIR}/src/nfagraph/ng_region_redundancy.cpp" - "${LIBRARY_DIR}/src/nfagraph/ng_region.cpp" - "${LIBRARY_DIR}/src/nfagraph/ng_repeat.cpp" - "${LIBRARY_DIR}/src/nfagraph/ng_reports.cpp" - "${LIBRARY_DIR}/src/nfagraph/ng_restructuring.cpp" - "${LIBRARY_DIR}/src/nfagraph/ng_revacc.cpp" - "${LIBRARY_DIR}/src/nfagraph/ng_sep.cpp" - "${LIBRARY_DIR}/src/nfagraph/ng_small_literal_set.cpp" - "${LIBRARY_DIR}/src/nfagraph/ng_som_add_redundancy.cpp" - "${LIBRARY_DIR}/src/nfagraph/ng_som_util.cpp" - "${LIBRARY_DIR}/src/nfagraph/ng_som.cpp" - "${LIBRARY_DIR}/src/nfagraph/ng_split.cpp" - "${LIBRARY_DIR}/src/nfagraph/ng_squash.cpp" - "${LIBRARY_DIR}/src/nfagraph/ng_stop.cpp" - "${LIBRARY_DIR}/src/nfagraph/ng_uncalc_components.cpp" - "${LIBRARY_DIR}/src/nfagraph/ng_utf8.cpp" - "${LIBRARY_DIR}/src/nfagraph/ng_util.cpp" - "${LIBRARY_DIR}/src/nfagraph/ng_vacuous.cpp" - "${LIBRARY_DIR}/src/nfagraph/ng_violet.cpp" - "${LIBRARY_DIR}/src/nfagraph/ng_width.cpp" - "${LIBRARY_DIR}/src/nfagraph/ng.cpp" - "${LIBRARY_DIR}/src/parser/AsciiComponentClass.cpp" - "${LIBRARY_DIR}/src/parser/buildstate.cpp" - "${LIBRARY_DIR}/src/parser/check_refs.cpp" - "${LIBRARY_DIR}/src/parser/Component.cpp" - "${LIBRARY_DIR}/src/parser/ComponentAlternation.cpp" - "${LIBRARY_DIR}/src/parser/ComponentAssertion.cpp" - "${LIBRARY_DIR}/src/parser/ComponentAtomicGroup.cpp" - "${LIBRARY_DIR}/src/parser/ComponentBackReference.cpp" - "${LIBRARY_DIR}/src/parser/ComponentBoundary.cpp" - "${LIBRARY_DIR}/src/parser/ComponentByte.cpp" - "${LIBRARY_DIR}/src/parser/ComponentClass.cpp" - "${LIBRARY_DIR}/src/parser/ComponentCondReference.cpp" - "${LIBRARY_DIR}/src/parser/ComponentEmpty.cpp" - "${LIBRARY_DIR}/src/parser/ComponentEUS.cpp" - "${LIBRARY_DIR}/src/parser/ComponentRepeat.cpp" - "${LIBRARY_DIR}/src/parser/ComponentSequence.cpp" - "${LIBRARY_DIR}/src/parser/ComponentVisitor.cpp" - "${LIBRARY_DIR}/src/parser/ComponentWordBoundary.cpp" - "${LIBRARY_DIR}/src/parser/ConstComponentVisitor.cpp" - "${LIBRARY_DIR}/src/parser/control_verbs.cpp" - "${LIBRARY_DIR}/src/parser/logical_combination.cpp" - "${LIBRARY_DIR}/src/parser/parse_error.cpp" - "${LIBRARY_DIR}/src/parser/parser_util.cpp" - "${LIBRARY_DIR}/src/parser/Parser.cpp" - "${LIBRARY_DIR}/src/parser/prefilter.cpp" - "${LIBRARY_DIR}/src/parser/shortcut_literal.cpp" - "${LIBRARY_DIR}/src/parser/ucp_table.cpp" - "${LIBRARY_DIR}/src/parser/unsupported.cpp" - "${LIBRARY_DIR}/src/parser/utf8_validate.cpp" - "${LIBRARY_DIR}/src/parser/Utf8ComponentClass.cpp" - "${LIBRARY_DIR}/src/rose/block.c" - "${LIBRARY_DIR}/src/rose/catchup.c" - "${LIBRARY_DIR}/src/rose/init.c" - "${LIBRARY_DIR}/src/rose/match.c" - "${LIBRARY_DIR}/src/rose/program_runtime.c" - "${LIBRARY_DIR}/src/rose/rose_build_add_mask.cpp" - "${LIBRARY_DIR}/src/rose/rose_build_add.cpp" - "${LIBRARY_DIR}/src/rose/rose_build_anchored.cpp" - "${LIBRARY_DIR}/src/rose/rose_build_bytecode.cpp" - "${LIBRARY_DIR}/src/rose/rose_build_castle.cpp" - "${LIBRARY_DIR}/src/rose/rose_build_compile.cpp" - "${LIBRARY_DIR}/src/rose/rose_build_convert.cpp" - "${LIBRARY_DIR}/src/rose/rose_build_dedupe.cpp" - "${LIBRARY_DIR}/src/rose/rose_build_engine_blob.cpp" - "${LIBRARY_DIR}/src/rose/rose_build_exclusive.cpp" - "${LIBRARY_DIR}/src/rose/rose_build_groups.cpp" - "${LIBRARY_DIR}/src/rose/rose_build_infix.cpp" - "${LIBRARY_DIR}/src/rose/rose_build_instructions.cpp" - "${LIBRARY_DIR}/src/rose/rose_build_lit_accel.cpp" - "${LIBRARY_DIR}/src/rose/rose_build_long_lit.cpp" - "${LIBRARY_DIR}/src/rose/rose_build_lookaround.cpp" - "${LIBRARY_DIR}/src/rose/rose_build_matchers.cpp" - "${LIBRARY_DIR}/src/rose/rose_build_merge.cpp" - "${LIBRARY_DIR}/src/rose/rose_build_misc.cpp" - "${LIBRARY_DIR}/src/rose/rose_build_program.cpp" - "${LIBRARY_DIR}/src/rose/rose_build_role_aliasing.cpp" - "${LIBRARY_DIR}/src/rose/rose_build_scatter.cpp" - "${LIBRARY_DIR}/src/rose/rose_build_width.cpp" - "${LIBRARY_DIR}/src/rose/rose_in_util.cpp" - "${LIBRARY_DIR}/src/rose/stream.c" - "${LIBRARY_DIR}/src/runtime.c" - "${LIBRARY_DIR}/src/scratch.c" - "${LIBRARY_DIR}/src/smallwrite/smallwrite_build.cpp" - "${LIBRARY_DIR}/src/som/slot_manager.cpp" - "${LIBRARY_DIR}/src/som/som_runtime.c" - "${LIBRARY_DIR}/src/som/som_stream.c" - "${LIBRARY_DIR}/src/stream_compress.c" - "${LIBRARY_DIR}/src/util/alloc.cpp" - "${LIBRARY_DIR}/src/util/charreach.cpp" - "${LIBRARY_DIR}/src/util/clique.cpp" - "${LIBRARY_DIR}/src/util/compile_context.cpp" - "${LIBRARY_DIR}/src/util/compile_error.cpp" - "${LIBRARY_DIR}/src/util/cpuid_flags.c" - "${LIBRARY_DIR}/src/util/depth.cpp" - "${LIBRARY_DIR}/src/util/fatbit_build.cpp" - "${LIBRARY_DIR}/src/util/multibit_build.cpp" - "${LIBRARY_DIR}/src/util/multibit.c" - "${LIBRARY_DIR}/src/util/report_manager.cpp" - "${LIBRARY_DIR}/src/util/simd_utils.c" - "${LIBRARY_DIR}/src/util/state_compress.c" - "${LIBRARY_DIR}/src/util/target_info.cpp" - "${LIBRARY_DIR}/src/util/ue2string.cpp" - ) - - add_library (hyperscan ${SRCS}) - - target_compile_definitions (hyperscan PUBLIC USE_HYPERSCAN=1) - target_compile_options (hyperscan - PRIVATE -g0 # Library has too much debug information - -mno-avx -mno-avx2 # The library is using dynamic dispatch and is confused if AVX is enabled globally - -march=corei7 -O2 -fno-strict-aliasing -fno-omit-frame-pointer -fvisibility=hidden # The options from original build system - -fno-sanitize=undefined # Assume the library takes care of itself - ) - target_include_directories (hyperscan - PRIVATE - common - "${LIBRARY_DIR}/include" - ) - target_include_directories (hyperscan SYSTEM PUBLIC "${LIBRARY_DIR}/src") - if (ARCH_AMD64) - target_include_directories (hyperscan PRIVATE x86_64) - endif () - target_link_libraries (hyperscan PRIVATE boost::headers_only) - - set (USE_INTERNAL_HYPERSCAN_LIBRARY 1) -endif () - -message (STATUS "Using hyperscan") +add_library (ch_contrib::hyperscan ALIAS _hyperscan) diff --git a/contrib/icu-cmake/CMakeLists.txt b/contrib/icu-cmake/CMakeLists.txt index 26f3bb11006..ae19ef20e38 100644 --- a/contrib/icu-cmake/CMakeLists.txt +++ b/contrib/icu-cmake/CMakeLists.txt @@ -1,3 +1,14 @@ +if (OS_LINUX) + option(ENABLE_ICU "Enable ICU" ${ENABLE_LIBRARIES}) +else () + option(ENABLE_ICU "Enable ICU" 0) +endif () + +if (NOT ENABLE_ICU) + message(STATUS "Not using icu") + return() +endif() + set(ICU_SOURCE_DIR "${ClickHouse_SOURCE_DIR}/contrib/icu/icu4c/source") set(ICUDATA_SOURCE_DIR "${ClickHouse_SOURCE_DIR}/contrib/icudata/") @@ -447,19 +458,23 @@ set(ICUDATA_SOURCES # Note that we don't like any kind of binary plugins (because of runtime dependencies, vulnerabilities, ABI incompatibilities). add_definitions(-D_REENTRANT -DU_HAVE_ELF_H=1 -DU_HAVE_STRTOD_L=1 -DU_HAVE_XLOCALE_H=0 -DDEFAULT_ICU_PLUGINS="/dev/null") -add_library(icuuc ${ICUUC_SOURCES}) -add_library(icui18n ${ICUI18N_SOURCES}) -add_library(icudata ${ICUDATA_SOURCES}) +add_library(_icuuc ${ICUUC_SOURCES}) +add_library(_icui18n ${ICUI18N_SOURCES}) +add_library(_icudata ${ICUDATA_SOURCES}) -target_link_libraries(icuuc PRIVATE icudata) -target_link_libraries(icui18n PRIVATE icuuc) +target_link_libraries(_icuuc PRIVATE _icudata) +target_link_libraries(_icui18n PRIVATE _icuuc) -target_include_directories(icuuc SYSTEM PUBLIC "${ICU_SOURCE_DIR}/common/") -target_include_directories(icui18n SYSTEM PUBLIC "${ICU_SOURCE_DIR}/i18n/") +target_include_directories(_icuuc SYSTEM PUBLIC "${ICU_SOURCE_DIR}/common/") +target_include_directories(_icui18n SYSTEM PUBLIC "${ICU_SOURCE_DIR}/i18n/") -target_compile_definitions(icuuc PRIVATE -DU_COMMON_IMPLEMENTATION) -target_compile_definitions(icui18n PRIVATE -DU_I18N_IMPLEMENTATION) +target_compile_definitions(_icuuc PRIVATE -DU_COMMON_IMPLEMENTATION) +target_compile_definitions(_icui18n PRIVATE -DU_I18N_IMPLEMENTATION) if (COMPILER_CLANG) - target_compile_options(icudata PRIVATE -Wno-unused-command-line-argument) + target_compile_options(_icudata PRIVATE -Wno-unused-command-line-argument) endif () + +add_library(_icu INTERFACE) +target_link_libraries(_icu INTERFACE _icui18n _icuuc _icudata) +add_library(ch_contrib::icu ALIAS _icu) diff --git a/contrib/jemalloc-cmake/CMakeLists.txt b/contrib/jemalloc-cmake/CMakeLists.txt index fb11879fb21..b3845c7d56b 100644 --- a/contrib/jemalloc-cmake/CMakeLists.txt +++ b/contrib/jemalloc-cmake/CMakeLists.txt @@ -12,9 +12,6 @@ else () endif () if (NOT ENABLE_JEMALLOC) - add_library(jemalloc INTERFACE) - target_compile_definitions(jemalloc INTERFACE USE_JEMALLOC=0) - message (STATUS "Not using jemalloc") return() endif () @@ -90,9 +87,9 @@ if (OS_DARWIN) list(APPEND SRCS "${LIBRARY_DIR}/src/zone.c") endif () -add_library(jemalloc ${SRCS}) -target_include_directories(jemalloc PRIVATE "${LIBRARY_DIR}/include") -target_include_directories(jemalloc SYSTEM PUBLIC include) +add_library(_jemalloc ${SRCS}) +target_include_directories(_jemalloc PRIVATE "${LIBRARY_DIR}/include") +target_include_directories(_jemalloc SYSTEM PUBLIC include) set (JEMALLOC_INCLUDE_PREFIX) # OS_ @@ -120,26 +117,24 @@ endif () configure_file(${JEMALLOC_INCLUDE_PREFIX}/jemalloc/internal/jemalloc_internal_defs.h.in ${JEMALLOC_INCLUDE_PREFIX}/jemalloc/internal/jemalloc_internal_defs.h) -target_include_directories(jemalloc SYSTEM PRIVATE +target_include_directories(_jemalloc SYSTEM PRIVATE "${CMAKE_CURRENT_BINARY_DIR}/${JEMALLOC_INCLUDE_PREFIX}/jemalloc/internal") -target_compile_definitions(jemalloc PRIVATE -DJEMALLOC_NO_PRIVATE_NAMESPACE) +target_compile_definitions(_jemalloc PRIVATE -DJEMALLOC_NO_PRIVATE_NAMESPACE) if (CMAKE_BUILD_TYPE_UC STREQUAL "DEBUG") - target_compile_definitions(jemalloc PRIVATE -DJEMALLOC_DEBUG=1) + target_compile_definitions(_jemalloc PRIVATE -DJEMALLOC_DEBUG=1) endif () -target_compile_definitions(jemalloc PRIVATE -DJEMALLOC_PROF=1) +target_compile_definitions(_jemalloc PRIVATE -DJEMALLOC_PROF=1) if (USE_UNWIND) - target_compile_definitions (jemalloc PRIVATE -DJEMALLOC_PROF_LIBUNWIND=1) - target_link_libraries (jemalloc PRIVATE unwind) + target_compile_definitions (_jemalloc PRIVATE -DJEMALLOC_PROF_LIBUNWIND=1) + target_link_libraries (_jemalloc PRIVATE unwind) endif () -target_compile_options(jemalloc PRIVATE -Wno-redundant-decls) +target_compile_options(_jemalloc PRIVATE -Wno-redundant-decls) # for RTLD_NEXT -target_compile_options(jemalloc PRIVATE -D_GNU_SOURCE) +target_compile_options(_jemalloc PRIVATE -D_GNU_SOURCE) -set_property(TARGET jemalloc APPEND PROPERTY INTERFACE_COMPILE_DEFINITIONS USE_JEMALLOC=1) - -message (STATUS "Using jemalloc") +add_library(ch_contrib::jemalloc ALIAS _jemalloc) diff --git a/contrib/krb5-cmake/CMakeLists.txt b/contrib/krb5-cmake/CMakeLists.txt index f7318a5bf8a..685e8737ef0 100644 --- a/contrib/krb5-cmake/CMakeLists.txt +++ b/contrib/krb5-cmake/CMakeLists.txt @@ -1,3 +1,15 @@ +set (ENABLE_KRB5_DEFAULT 1) +if (NOT CMAKE_SYSTEM_NAME MATCHES "Linux" AND NOT (CMAKE_SYSTEM_NAME MATCHES "Darwin" AND NOT CMAKE_CROSSCOMPILING)) + message (WARNING "krb5 disabled in non-Linux and non-native-Darwin environments") + set (ENABLE_KRB5_DEFAULT 0) +endif () +OPTION(ENABLE_KRB5 "Enable krb5" ${ENABLE_KRB5_DEFAULT}) + +if (NOT ENABLE_KRB5) + message(STATUS "Not using krb5") + return() +endif () + find_program(AWK_PROGRAM awk) if(NOT AWK_PROGRAM) message(FATAL_ERROR "You need the awk program to build ClickHouse with krb5 enabled.") @@ -546,10 +558,10 @@ add_custom_target( VERBATIM ) -add_library(${KRB5_LIBRARY}) +add_library(_krb5) add_dependencies( - ${KRB5_LIBRARY} + _krb5 ERRMAP_H ERROR_MAP_H KRB_5_H @@ -567,7 +579,7 @@ if(CMAKE_SYSTEM_NAME MATCHES "Darwin") list(APPEND ALL_SRCS "${CMAKE_CURRENT_BINARY_DIR}/include_private/kcmrpc.c") endif() -target_sources(${KRB5_LIBRARY} PRIVATE +target_sources(_krb5 PRIVATE ${ALL_SRCS} ) @@ -639,12 +651,12 @@ add_custom_command( -target_include_directories(${KRB5_LIBRARY} PUBLIC +target_include_directories(_krb5 SYSTEM BEFORE PUBLIC "${KRB5_SOURCE_DIR}/include" "${CMAKE_CURRENT_BINARY_DIR}/include" ) -target_include_directories(${KRB5_LIBRARY} PRIVATE +target_include_directories(_krb5 PRIVATE "${CMAKE_CURRENT_BINARY_DIR}/include_private" # For autoconf.h and other generated headers. ${KRB5_SOURCE_DIR} "${KRB5_SOURCE_DIR}/include" @@ -664,10 +676,9 @@ target_include_directories(${KRB5_LIBRARY} PRIVATE "${KRB5_SOURCE_DIR}/lib/krb5/rcache" "${KRB5_SOURCE_DIR}/lib/krb5/unicode" "${KRB5_SOURCE_DIR}/lib/krb5/os" - # ${OPENSSL_INCLUDE_DIR} ) -target_compile_definitions(${KRB5_LIBRARY} PRIVATE +target_compile_definitions(_krb5 PRIVATE KRB5_PRIVATE _GSS_STATIC_LINK=1 KRB5_DEPRECATED=1 @@ -677,6 +688,6 @@ target_compile_definitions(${KRB5_LIBRARY} PRIVATE LIBDIR="/usr/local/lib" ) -target_link_libraries(${KRB5_LIBRARY} - PRIVATE ${OPENSSL_CRYPTO_LIBRARY} -) +target_link_libraries(_krb5 PRIVATE OpenSSL::Crypto OpenSSL::SSL) + +add_library(ch_contrib::krb5 ALIAS _krb5) diff --git a/contrib/lemmagen-c-cmake/CMakeLists.txt b/contrib/lemmagen-c-cmake/CMakeLists.txt index 3a067916bf6..67e1e5791ca 100644 --- a/contrib/lemmagen-c-cmake/CMakeLists.txt +++ b/contrib/lemmagen-c-cmake/CMakeLists.txt @@ -5,5 +5,6 @@ set(SRCS "${LIBRARY_DIR}/src/RdrLemmatizer.cpp" ) -add_library(lemmagen STATIC ${SRCS}) -target_include_directories(lemmagen SYSTEM PUBLIC "${LEMMAGEN_INCLUDE_DIR}") +add_library(_lemmagen STATIC ${SRCS}) +target_include_directories(_lemmagen SYSTEM PUBLIC "${LEMMAGEN_INCLUDE_DIR}") +add_library(ch_contrib::lemmagen ALIAS _lemmagen) diff --git a/contrib/libcpuid-cmake/CMakeLists.txt b/contrib/libcpuid-cmake/CMakeLists.txt index 9baebb3ba1b..1940b39b6aa 100644 --- a/contrib/libcpuid-cmake/CMakeLists.txt +++ b/contrib/libcpuid-cmake/CMakeLists.txt @@ -6,10 +6,7 @@ elseif(ENABLE_CPUID) endif() if (NOT ENABLE_CPUID) - add_library (cpuid INTERFACE) - - target_compile_definitions (cpuid INTERFACE USE_CPUID=0) - + message("Not using cpuid") return() endif() @@ -26,13 +23,12 @@ set (SRCS "${LIBRARY_DIR}/libcpuid/recog_intel.c" ) -add_library (cpuid ${SRCS}) +add_library (_cpuid ${SRCS}) -target_include_directories (cpuid SYSTEM PUBLIC "${LIBRARY_DIR}") -target_compile_definitions (cpuid PUBLIC USE_CPUID=1) -target_compile_definitions (cpuid PRIVATE VERSION="v0.4.1") +target_include_directories (_cpuid SYSTEM PUBLIC "${LIBRARY_DIR}") +target_compile_definitions (_cpuid PRIVATE VERSION="v0.4.1") if (COMPILER_CLANG) - target_compile_options (cpuid PRIVATE -Wno-reserved-id-macro) + target_compile_options (_cpuid PRIVATE -Wno-reserved-id-macro) endif () -message (STATUS "Using cpuid") +add_library(ch_contrib::cpuid ALIAS _cpuid) diff --git a/contrib/libdivide/CMakeLists.txt b/contrib/libdivide/CMakeLists.txt index 57e9f254db5..45cbc0a584b 100644 --- a/contrib/libdivide/CMakeLists.txt +++ b/contrib/libdivide/CMakeLists.txt @@ -1,2 +1,3 @@ -add_library (libdivide INTERFACE) -target_include_directories (libdivide SYSTEM BEFORE INTERFACE .) +add_library (_libdivide INTERFACE) +target_include_directories (_libdivide SYSTEM BEFORE INTERFACE .) +add_library (ch_contrib::libdivide ALIAS _libdivide) diff --git a/contrib/libfarmhash/CMakeLists.txt b/contrib/libfarmhash/CMakeLists.txt index 20bba58cde7..a0533a93f17 100644 --- a/contrib/libfarmhash/CMakeLists.txt +++ b/contrib/libfarmhash/CMakeLists.txt @@ -1,9 +1,11 @@ -add_library(farmhash - farmhash.cc - farmhash.h) +add_library(_farmhash + farmhash.cc + farmhash.h) if (MSVC) - target_compile_definitions (farmhash PRIVATE FARMHASH_NO_BUILTIN_EXPECT=1) + target_compile_definitions (_farmhash PRIVATE FARMHASH_NO_BUILTIN_EXPECT=1) endif () -target_include_directories (farmhash PUBLIC ${CMAKE_CURRENT_SOURCE_DIR}) +target_include_directories (_farmhash BEFORE PUBLIC ${CMAKE_CURRENT_SOURCE_DIR}) + +add_library(ch_contrib::farmhash ALIAS _farmhash) diff --git a/contrib/libgsasl-cmake/CMakeLists.txt b/contrib/libgsasl-cmake/CMakeLists.txt index 102ef12b9f5..4bb4ca9dc33 100644 --- a/contrib/libgsasl-cmake/CMakeLists.txt +++ b/contrib/libgsasl-cmake/CMakeLists.txt @@ -1,3 +1,10 @@ +option(ENABLE_GSASL_LIBRARY "Enable gsasl library" ${ENABLE_LIBRARIES}) + +if (NOT ENABLE_GSASL_LIBRARY) + message(STATUS "Not using gsasl library") + return() +endif() + set (SRC_DIR "${ClickHouse_SOURCE_DIR}/contrib/libgsasl") set(SRCS @@ -84,24 +91,26 @@ set(SRCS ${SRC_DIR}/login/server.c ) -if (USE_KRB5) +if (TARGET ch_contrib::krb5) set(SRCS ${SRCS} ${SRC_DIR}/gssapi/client.c ${SRC_DIR}/gssapi/mechinfo.c ${SRC_DIR}/gssapi/server.c) endif() -add_library(gsasl ${SRCS}) +add_library(_gsasl ${SRCS}) -target_include_directories(gsasl PUBLIC ${SRC_DIR}) -target_include_directories(gsasl PUBLIC ${SRC_DIR}/gl) -target_include_directories(gsasl PUBLIC ${SRC_DIR}/src) -target_include_directories(gsasl PUBLIC ${SRC_DIR}/digest-md5) -target_include_directories(gsasl PRIVATE "${ClickHouse_SOURCE_DIR}/contrib/libgsasl-cmake/linux_x86_64/include") +target_include_directories(_gsasl PUBLIC ${SRC_DIR}) +target_include_directories(_gsasl PUBLIC ${SRC_DIR}/gl) +target_include_directories(_gsasl PUBLIC ${SRC_DIR}/src) +target_include_directories(_gsasl PUBLIC ${SRC_DIR}/digest-md5) +target_include_directories(_gsasl PUBLIC "${ClickHouse_SOURCE_DIR}/contrib/libgsasl-cmake/linux_x86_64/include") -target_compile_definitions (gsasl PRIVATE HAVE_CONFIG_H=1) +target_compile_definitions(_gsasl PRIVATE HAVE_CONFIG_H=1) -if (USE_KRB5) - target_link_libraries(gsasl PUBLIC ${KRB5_LIBRARY}) - target_compile_definitions (gsasl PRIVATE HAVE_GSSAPI_H=1 USE_GSSAPI=1) +if (TARGET ch_contrib::krb5) + target_link_libraries(_gsasl PUBLIC ch_contrib::krb5) + target_compile_definitions(_gsasl PRIVATE HAVE_GSSAPI_H=1 USE_GSSAPI=1) endif() + +add_library(ch_contrib::gsasl ALIAS _gsasl) diff --git a/contrib/libhdfs3-cmake/CMake/FindGSasl.cmake b/contrib/libhdfs3-cmake/CMake/FindGSasl.cmake deleted file mode 100644 index 19ca7c30d1e..00000000000 --- a/contrib/libhdfs3-cmake/CMake/FindGSasl.cmake +++ /dev/null @@ -1,26 +0,0 @@ -# - Try to find the GNU sasl library (gsasl) -# -# Once done this will define -# -# GSASL_FOUND - System has gnutls -# GSASL_INCLUDE_DIR - The gnutls include directory -# GSASL_LIBRARIES - The libraries needed to use gnutls -# GSASL_DEFINITIONS - Compiler switches required for using gnutls - - -IF (GSASL_INCLUDE_DIR AND GSASL_LIBRARIES) - # in cache already - SET(GSasl_FIND_QUIETLY TRUE) -ENDIF (GSASL_INCLUDE_DIR AND GSASL_LIBRARIES) - -FIND_PATH(GSASL_INCLUDE_DIR gsasl.h) - -FIND_LIBRARY(GSASL_LIBRARIES gsasl) - -INCLUDE(FindPackageHandleStandardArgs) - -# handle the QUIETLY and REQUIRED arguments and set GSASL_FOUND to TRUE if -# all listed variables are TRUE -FIND_PACKAGE_HANDLE_STANDARD_ARGS(GSASL DEFAULT_MSG GSASL_LIBRARIES GSASL_INCLUDE_DIR) - -MARK_AS_ADVANCED(GSASL_INCLUDE_DIR GSASL_LIBRARIES) \ No newline at end of file diff --git a/contrib/libhdfs3-cmake/CMake/FindGoogleTest.cmake b/contrib/libhdfs3-cmake/CMake/FindGoogleTest.cmake deleted file mode 100644 index fd57c1e2abd..00000000000 --- a/contrib/libhdfs3-cmake/CMake/FindGoogleTest.cmake +++ /dev/null @@ -1,65 +0,0 @@ -include(CheckCXXSourceRuns) - -find_path(GTest_INCLUDE_DIR gtest/gtest.h - NO_DEFAULT_PATH - PATHS - "${PROJECT_SOURCE_DIR}/../thirdparty/googletest/googletest/include" - "/usr/local/include" - "/usr/include") - -find_path(GMock_INCLUDE_DIR gmock/gmock.h - NO_DEFAULT_PATH - PATHS - "${PROJECT_SOURCE_DIR}/../thirdparty/googletest/googlemock/include" - "/usr/local/include" - "/usr/include") - -find_library(Gtest_LIBRARY - NAMES libgtest.a - HINTS - "${PROJECT_SOURCE_DIR}/../thirdparty/googletest/build/googlemock/gtest" - "/usr/local/lib" - "/usr/lib") - -find_library(Gmock_LIBRARY - NAMES libgmock.a - HINTS - "${PROJECT_SOURCE_DIR}/../thirdparty/googletest/build/googlemock" - "/usr/local/lib" - "/usr/lib") - -message(STATUS "Find GoogleTest include path: ${GTest_INCLUDE_DIR}") -message(STATUS "Find GoogleMock include path: ${GMock_INCLUDE_DIR}") -message(STATUS "Find Gtest library path: ${Gtest_LIBRARY}") -message(STATUS "Find Gmock library path: ${Gmock_LIBRARY}") - -set(CMAKE_REQUIRED_INCLUDES ${GTest_INCLUDE_DIR} ${GMock_INCLUDE_DIR}) -set(CMAKE_REQUIRED_LIBRARIES ${Gtest_LIBRARY} ${Gmock_LIBRARY} -lpthread) -set(CMAKE_REQUIRED_FLAGS) -check_cxx_source_runs(" -#include -#include -int main(int argc, char *argv[]) -{ - double pi = 3.14; - EXPECT_EQ(pi, 3.14); - return 0; -} -" GoogleTest_CHECK_FINE) -message(STATUS "GoogleTest check: ${GoogleTest_CHECK_FINE}") - -include(FindPackageHandleStandardArgs) -find_package_handle_standard_args( - GoogleTest - REQUIRED_VARS - GTest_INCLUDE_DIR - GMock_INCLUDE_DIR - Gtest_LIBRARY - Gmock_LIBRARY - GoogleTest_CHECK_FINE) - -set(GoogleTest_INCLUDE_DIR ${GTest_INCLUDE_DIR} ${GMock_INCLUDE_DIR}) -set(GoogleTest_LIBRARIES ${Gtest_LIBRARY} ${Gmock_LIBRARY}) -mark_as_advanced( - GoogleTest_INCLUDE_DIR - GoogleTest_LIBRARIES) diff --git a/contrib/libhdfs3-cmake/CMake/FindKERBEROS.cmake b/contrib/libhdfs3-cmake/CMake/FindKERBEROS.cmake deleted file mode 100644 index 5fc58235a3f..00000000000 --- a/contrib/libhdfs3-cmake/CMake/FindKERBEROS.cmake +++ /dev/null @@ -1,23 +0,0 @@ -# - Find kerberos -# Find the native KERBEROS includes and library -# -# KERBEROS_INCLUDE_DIRS - where to find krb5.h, etc. -# KERBEROS_LIBRARIES - List of libraries when using krb5. -# KERBEROS_FOUND - True if krb5 found. - -IF (KERBEROS_INCLUDE_DIRS) - # Already in cache, be silent - SET(KERBEROS_FIND_QUIETLY TRUE) -ENDIF (KERBEROS_INCLUDE_DIRS) - -FIND_PATH(KERBEROS_INCLUDE_DIRS krb5.h) - -SET(KERBEROS_NAMES krb5 k5crypto com_err) -FIND_LIBRARY(KERBEROS_LIBRARIES NAMES ${KERBEROS_NAMES}) - -# handle the QUIETLY and REQUIRED arguments and set KERBEROS_FOUND to TRUE if -# all listed variables are TRUE -INCLUDE(FindPackageHandleStandardArgs) -FIND_PACKAGE_HANDLE_STANDARD_ARGS(KERBEROS DEFAULT_MSG KERBEROS_LIBRARIES KERBEROS_INCLUDE_DIRS) - -MARK_AS_ADVANCED(KERBEROS_LIBRARIES KERBEROS_INCLUDE_DIRS) diff --git a/contrib/libhdfs3-cmake/CMake/FindSSL.cmake b/contrib/libhdfs3-cmake/CMake/FindSSL.cmake deleted file mode 100644 index bcbc5d89653..00000000000 --- a/contrib/libhdfs3-cmake/CMake/FindSSL.cmake +++ /dev/null @@ -1,26 +0,0 @@ -# - Try to find the Open ssl library (ssl) -# -# Once done this will define -# -# SSL_FOUND - System has gnutls -# SSL_INCLUDE_DIR - The gnutls include directory -# SSL_LIBRARIES - The libraries needed to use gnutls -# SSL_DEFINITIONS - Compiler switches required for using gnutls - - -IF (SSL_INCLUDE_DIR AND SSL_LIBRARIES) - # in cache already - SET(SSL_FIND_QUIETLY TRUE) -ENDIF (SSL_INCLUDE_DIR AND SSL_LIBRARIES) - -FIND_PATH(SSL_INCLUDE_DIR openssl/opensslv.h) - -FIND_LIBRARY(SSL_LIBRARIES crypto) - -INCLUDE(FindPackageHandleStandardArgs) - -# handle the QUIETLY and REQUIRED arguments and set SSL_FOUND to TRUE if -# all listed variables are TRUE -FIND_PACKAGE_HANDLE_STANDARD_ARGS(SSL DEFAULT_MSG SSL_LIBRARIES SSL_INCLUDE_DIR) - -MARK_AS_ADVANCED(SSL_INCLUDE_DIR SSL_LIBRARIES) \ No newline at end of file diff --git a/contrib/libhdfs3-cmake/CMakeLists.txt b/contrib/libhdfs3-cmake/CMakeLists.txt index fcc4a15666c..b2f785fa06f 100644 --- a/contrib/libhdfs3-cmake/CMakeLists.txt +++ b/contrib/libhdfs3-cmake/CMakeLists.txt @@ -1,4 +1,16 @@ -if (${ENABLE_KRB5}) +if(NOT ARCH_ARM AND NOT OS_FREEBSD AND NOT APPLE AND NOT ARCH_PPC64LE) + option(ENABLE_HDFS "Enable HDFS" ${ENABLE_LIBRARIES}) +elseif(ENABLE_HDFS) + message (${RECONFIGURE_MESSAGE_LEVEL} "Cannot use HDFS3 with current configuration") +endif() + +if(NOT ENABLE_HDFS) + message(STATUS "Not using hdfs") + return() +endif() + +if (TARGET ch_contrib::krb5) + message(STATUS "Enable kerberos for HDFS") SET(WITH_KERBEROS 1) else() SET(WITH_KERBEROS 0) @@ -27,7 +39,7 @@ set(PROTO_FILES "${HDFS3_SOURCE_DIR}/proto/datatransfer.proto" ) -PROTOBUF_GENERATE_CPP(PROTO_SOURCES PROTO_HEADERS ${PROTO_FILES}) +PROTOBUF_GENERATE_CPP(PROTO_SOURCES PROTO_HEADERS ${PROTO_FILES} APPEND_PATH) configure_file("${HDFS3_SOURCE_DIR}/platform.h.in" "${CMAKE_CURRENT_BINARY_DIR}/platform.h") @@ -94,30 +106,26 @@ set(SRCS set_source_files_properties("${HDFS3_SOURCE_DIR}/rpc/RpcClient.cpp" PROPERTIES COMPILE_FLAGS "-DBOOST_UUID_RANDOM_PROVIDER_FORCE_POSIX=1") # target -add_library(hdfs3 ${SRCS}) +add_library(_hdfs3 ${SRCS}) -if(USE_INTERNAL_PROTOBUF_LIBRARY) - add_dependencies(hdfs3 protoc) -endif() +add_dependencies(_hdfs3 protoc) -target_include_directories(hdfs3 PRIVATE ${HDFS3_SOURCE_DIR}) -target_include_directories(hdfs3 PRIVATE ${HDFS3_COMMON_DIR}) -target_include_directories(hdfs3 PRIVATE ${CMAKE_CURRENT_BINARY_DIR}) +target_include_directories(_hdfs3 PRIVATE ${HDFS3_SOURCE_DIR}) +target_include_directories(_hdfs3 PRIVATE ${HDFS3_COMMON_DIR}) +target_include_directories(_hdfs3 PRIVATE ${CMAKE_CURRENT_BINARY_DIR}) -target_include_directories(hdfs3 PRIVATE ${LIBGSASL_INCLUDE_DIR}) -target_include_directories(hdfs3 PRIVATE ${LIBXML2_INCLUDE_DIR}) +target_include_directories(_hdfs3 SYSTEM BEFORE INTERFACE "${ClickHouse_SOURCE_DIR}/contrib/libhdfs3/include") -target_link_libraries(hdfs3 PRIVATE ${LIBGSASL_LIBRARY}) +target_link_libraries(_hdfs3 PRIVATE ch_contrib::gsasl) if (WITH_KERBEROS) - target_link_libraries(hdfs3 PRIVATE ${KRB5_LIBRARY}) + target_link_libraries(_hdfs3 PRIVATE ch_contrib::krb5) endif() -target_link_libraries(hdfs3 PRIVATE ${LIBXML2_LIBRARIES}) +target_link_libraries(_hdfs3 PRIVATE ch_contrib::libxml2) # inherit from parent cmake -target_include_directories(hdfs3 PRIVATE ${Protobuf_INCLUDE_DIR}) -target_link_libraries(hdfs3 PRIVATE ${Protobuf_LIBRARY} boost::headers_only) - -if(OPENSSL_INCLUDE_DIR AND OPENSSL_LIBRARIES) - target_include_directories(hdfs3 PRIVATE ${OPENSSL_INCLUDE_DIR}) - target_link_libraries(hdfs3 PRIVATE ${OPENSSL_LIBRARIES}) +target_link_libraries(_hdfs3 PRIVATE ch_contrib::protobuf boost::headers_only) +if (TARGET OpenSSL::SSL) + target_link_libraries(_hdfs3 PRIVATE OpenSSL::Crypto OpenSSL::SSL) endif() + +add_library(ch_contrib::hdfs ALIAS _hdfs3) diff --git a/contrib/libmetrohash/CMakeLists.txt b/contrib/libmetrohash/CMakeLists.txt index 4ec5a58717d..9f7984acf8b 100644 --- a/contrib/libmetrohash/CMakeLists.txt +++ b/contrib/libmetrohash/CMakeLists.txt @@ -2,5 +2,6 @@ set (SRCS src/metrohash64.cpp src/metrohash128.cpp ) -add_library(metrohash ${SRCS}) -target_include_directories(metrohash PUBLIC src) +add_library(_metrohash ${SRCS}) +target_include_directories(_metrohash PUBLIC src) +add_library(ch_contrib::metrohash ALIAS _metrohash) diff --git a/contrib/libpq-cmake/CMakeLists.txt b/contrib/libpq-cmake/CMakeLists.txt index 2d2e0c428fe..280c0381393 100644 --- a/contrib/libpq-cmake/CMakeLists.txt +++ b/contrib/libpq-cmake/CMakeLists.txt @@ -1,3 +1,7 @@ +if (NOT ENABLE_LIBPQXX) + return() +endif() + set(LIBPQ_SOURCE_DIR "${ClickHouse_SOURCE_DIR}/contrib/libpq") set(SRCS @@ -53,10 +57,12 @@ set(SRCS "${LIBPQ_SOURCE_DIR}/port/explicit_bzero.c" ) -add_library(libpq ${SRCS}) +add_library(_libpq ${SRCS}) -target_include_directories (libpq SYSTEM PUBLIC ${LIBPQ_SOURCE_DIR}) -target_include_directories (libpq SYSTEM PUBLIC "${LIBPQ_SOURCE_DIR}/include") -target_include_directories (libpq SYSTEM PRIVATE "${LIBPQ_SOURCE_DIR}/configs") +target_include_directories (_libpq SYSTEM PUBLIC ${LIBPQ_SOURCE_DIR}) +target_include_directories (_libpq SYSTEM PUBLIC "${LIBPQ_SOURCE_DIR}/include") +target_include_directories (_libpq SYSTEM PRIVATE "${LIBPQ_SOURCE_DIR}/configs") -target_link_libraries (libpq PRIVATE ssl) +target_link_libraries (_libpq PRIVATE OpenSSL::SSL) + +add_library(ch_contrib::libpq ALIAS _libpq) diff --git a/contrib/libpqxx-cmake/CMakeLists.txt b/contrib/libpqxx-cmake/CMakeLists.txt index 2804a875436..a3317404f95 100644 --- a/contrib/libpqxx-cmake/CMakeLists.txt +++ b/contrib/libpqxx-cmake/CMakeLists.txt @@ -1,3 +1,10 @@ +option(ENABLE_LIBPQXX "Enalbe libpqxx" ${ENABLE_LIBRARIES}) + +if (NOT ENABLE_LIBPQXX) + message(STATUS "Not using libpqxx") + return() +endif() + set (LIBRARY_DIR "${ClickHouse_SOURCE_DIR}/contrib/libpqxx") set (SRCS @@ -63,8 +70,9 @@ set (HDRS "${LIBRARY_DIR}/include/pqxx/zview.hxx" ) -add_library(libpqxx ${SRCS} ${HDRS}) +add_library(_libpqxx ${SRCS} ${HDRS}) -target_link_libraries(libpqxx PUBLIC ${LIBPQ_LIBRARY}) -target_include_directories (libpqxx SYSTEM PRIVATE "${LIBRARY_DIR}/include") +target_link_libraries(_libpqxx PUBLIC ch_contrib::libpq) +target_include_directories (_libpqxx SYSTEM BEFORE PUBLIC "${LIBRARY_DIR}/include") +add_library(ch_contrib::libpqxx ALIAS _libpqxx) diff --git a/contrib/libprotobuf-mutator-cmake/CMakeLists.txt b/contrib/libprotobuf-mutator-cmake/CMakeLists.txt index 978b1e732ba..a623f95c418 100644 --- a/contrib/libprotobuf-mutator-cmake/CMakeLists.txt +++ b/contrib/libprotobuf-mutator-cmake/CMakeLists.txt @@ -1,6 +1,12 @@ +option(USE_LIBPROTOBUF_MUTATOR "Enable libprotobuf-mutator" ${ENABLE_FUZZING}) + +if (NOT USE_LIBPROTOBUF_MUTATOR) + return() +endif() + set(LIBRARY_DIR ${ClickHouse_SOURCE_DIR}/contrib/libprotobuf-mutator) -add_library(protobuf-mutator +add_library(_protobuf-mutator ${LIBRARY_DIR}/src/libfuzzer/libfuzzer_macro.cc ${LIBRARY_DIR}/src/libfuzzer/libfuzzer_mutator.cc ${LIBRARY_DIR}/src/binary_format.cc @@ -8,7 +14,9 @@ add_library(protobuf-mutator ${LIBRARY_DIR}/src/text_format.cc ${LIBRARY_DIR}/src/utf8_fix.cc) -target_include_directories(protobuf-mutator BEFORE PRIVATE "${LIBRARY_DIR}") -target_include_directories(protobuf-mutator BEFORE PRIVATE "${ClickHouse_SOURCE_DIR}/contrib/protobuf/src") +target_include_directories(_protobuf-mutator BEFORE INTERFACE "${LIBRARY_DIR}") +target_include_directories(_protobuf-mutator BEFORE INTERFACE "${ClickHouse_SOURCE_DIR}/contrib/protobuf/src") -target_link_libraries(protobuf-mutator ${Protobuf_LIBRARY}) +target_link_libraries(_protobuf-mutator ch_contrib::protobuf) + +add_library(ch_contrib::protobuf_mutator ALIAS _protobuf-mutator) diff --git a/contrib/librdkafka-cmake/CMakeLists.txt b/contrib/librdkafka-cmake/CMakeLists.txt index 97b6a7e1ec5..d84abd06dec 100644 --- a/contrib/librdkafka-cmake/CMakeLists.txt +++ b/contrib/librdkafka-cmake/CMakeLists.txt @@ -1,3 +1,10 @@ +option (ENABLE_KAFKA "Enable kafka" ${ENABLE_LIBRARIES}) + +if (NOT ENABLE_KAFKA) + message(STATUS "Not using librdkafka") + return() +endif() + set(RDKAFKA_SOURCE_DIR "${ClickHouse_SOURCE_DIR}/contrib/librdkafka/src") set(SRCS @@ -80,24 +87,18 @@ set(SRCS "${RDKAFKA_SOURCE_DIR}/tinycthread_extra.c" ) -if(${ENABLE_CYRUS_SASL}) +if(TARGET ch_contrib::sasl2) message (STATUS "librdkafka with SASL support") set(WITH_SASL_CYRUS 1) endif() -if(OPENSSL_FOUND) - message (STATUS "librdkafka with SSL support") - set(WITH_SSL 1) - - if(${ENABLE_CYRUS_SASL}) - set(WITH_SASL_SCRAM 1) - set(WITH_SASL_OAUTHBEARER 1) - endif() -endif() - -if(WITH_SSL) - list(APPEND SRCS "${RDKAFKA_SOURCE_DIR}/rdkafka_ssl.c") +message (STATUS "librdkafka with SSL support") +set(WITH_SSL 1) +if(WITH_SASL_CYRUS) + set(WITH_SASL_SCRAM 1) + set(WITH_SASL_OAUTHBEARER 1) endif() +list(APPEND SRCS "${RDKAFKA_SOURCE_DIR}/rdkafka_ssl.c") if(WITH_SASL_CYRUS) list(APPEND SRCS "${RDKAFKA_SOURCE_DIR}/rdkafka_sasl_cyrus.c") # needed to support Kerberos, requires cyrus-sasl @@ -111,19 +112,23 @@ if(WITH_SASL_OAUTHBEARER) list(APPEND SRCS "${RDKAFKA_SOURCE_DIR}/rdkafka_sasl_oauthbearer.c") endif() -add_library(rdkafka ${SRCS}) -target_compile_options(rdkafka PRIVATE -fno-sanitize=undefined) -# target_include_directories(rdkafka SYSTEM PUBLIC include) -target_include_directories(rdkafka SYSTEM PUBLIC "${CMAKE_CURRENT_SOURCE_DIR}/include") # for "librdkafka/rdkafka.h" -target_include_directories(rdkafka SYSTEM PUBLIC ${RDKAFKA_SOURCE_DIR}) # Because weird logic with "include_next" is used. -target_include_directories(rdkafka SYSTEM PUBLIC "${CMAKE_CURRENT_BINARY_DIR}/auxdir") # for "../config.h" -target_include_directories(rdkafka SYSTEM PRIVATE "${ZSTD_INCLUDE_DIR}/common") # Because wrong path to "zstd_errors.h" is used. -target_link_libraries(rdkafka PRIVATE lz4 ${ZLIB_LIBRARIES} ${ZSTD_LIBRARY}) -if(OPENSSL_SSL_LIBRARY AND OPENSSL_CRYPTO_LIBRARY) - target_link_libraries(rdkafka PRIVATE ${OPENSSL_SSL_LIBRARY} ${OPENSSL_CRYPTO_LIBRARY}) -endif() -if(${ENABLE_CYRUS_SASL}) - target_link_libraries(rdkafka PRIVATE ${CYRUS_SASL_LIBRARY}) +add_library(_rdkafka ${SRCS}) +add_library(ch_contrib::rdkafka ALIAS _rdkafka) + +target_compile_options(_rdkafka PRIVATE -fno-sanitize=undefined) +# target_include_directories(_rdkafka SYSTEM PUBLIC include) +target_include_directories(_rdkafka SYSTEM PUBLIC "${CMAKE_CURRENT_SOURCE_DIR}/include") # for "librdkafka/rdkafka.h" +target_include_directories(_rdkafka SYSTEM PUBLIC ${RDKAFKA_SOURCE_DIR}) # Because weird logic with "include_next" is used. +target_include_directories(_rdkafka SYSTEM PUBLIC "${CMAKE_CURRENT_BINARY_DIR}/auxdir") # for "../config.h" +target_link_libraries(_rdkafka + PRIVATE + ch_contrib::lz4 + ch_contrib::zlib + ch_contrib::zstd + OpenSSL::Crypto OpenSSL::SSL +) +if(WITH_SASL_CYRUS) + target_link_libraries(_rdkafka PRIVATE ch_contrib::sasl2) endif() file(MAKE_DIRECTORY "${CMAKE_CURRENT_BINARY_DIR}/auxdir") diff --git a/contrib/libstemmer-c-cmake/CMakeLists.txt b/contrib/libstemmer-c-cmake/CMakeLists.txt index b5cd59e4633..6dc65c56c6c 100644 --- a/contrib/libstemmer-c-cmake/CMakeLists.txt +++ b/contrib/libstemmer-c-cmake/CMakeLists.txt @@ -27,5 +27,6 @@ FOREACH ( LINE ${_CONTENT} ) endforeach () # all the sources parsed. Now just add the lib -add_library ( stemmer STATIC ${_SOURCES} ${_HEADERS} ) -target_include_directories (stemmer SYSTEM PUBLIC "${STEMMER_INCLUDE_DIR}") +add_library(_stemmer STATIC ${_SOURCES} ${_HEADERS} ) +target_include_directories(_stemmer SYSTEM PUBLIC "${STEMMER_INCLUDE_DIR}") +add_library(ch_contrib::stemmer ALIAS _stemmer) diff --git a/contrib/libuv-cmake/CMakeLists.txt b/contrib/libuv-cmake/CMakeLists.txt index dc47b0bf496..45f6d8e2083 100644 --- a/contrib/libuv-cmake/CMakeLists.txt +++ b/contrib/libuv-cmake/CMakeLists.txt @@ -1,3 +1,8 @@ +if (OS_DARWIN AND COMPILER_GCC) + message (WARNING "libuv cannot be built with GCC in macOS due to a bug: https://gcc.gnu.org/bugzilla/show_bug.cgi?id=93082") + return() +endif() + # This file is a modified version of contrib/libuv/CMakeLists.txt set (SOURCE_DIR "${CMAKE_SOURCE_DIR}/contrib/libuv") @@ -122,19 +127,17 @@ set(uv_sources "${uv_sources_tmp}") list(APPEND uv_defines CLICKHOUSE_GLIBC_COMPATIBILITY) -add_library(uv ${uv_sources}) -target_compile_definitions(uv - INTERFACE USING_UV_SHARED=1 - PRIVATE ${uv_defines} BUILDING_UV_SHARED=1) -target_compile_options(uv PRIVATE ${uv_cflags}) -target_include_directories(uv PUBLIC ${SOURCE_DIR}/include PRIVATE ${SOURCE_DIR}/src) -target_link_libraries(uv ${uv_libraries}) +add_library(_uv ${uv_sources}) +add_library(ch_contrib::uv ALIAS _uv) -add_library(uv_a STATIC ${uv_sources}) -target_compile_definitions(uv_a PRIVATE ${uv_defines}) -target_compile_options(uv_a PRIVATE ${uv_cflags}) -target_include_directories(uv_a PUBLIC ${SOURCE_DIR}/include PRIVATE ${SOURCE_DIR}/src) -target_link_libraries(uv_a ${uv_libraries}) +target_compile_definitions(_uv PRIVATE ${uv_defines}) +target_include_directories(_uv SYSTEM PUBLIC ${SOURCE_DIR}/include PRIVATE ${SOURCE_DIR}/src) +target_link_libraries(_uv ${uv_libraries}) +if (NOT USE_STATIC_LIBRARIES) + target_compile_definitions(_uv + INTERFACE USING_UV_SHARED=1 + PRIVATE BUILDING_UV_SHARED=1) +endif() if(UNIX) # Now for some gibbering horrors from beyond the stars... @@ -145,7 +148,6 @@ if(UNIX) string(REGEX MATCH [0-9]+[.][0-9]+[.][0-9]+ PACKAGE_VERSION "${configure_ac}") string(REGEX MATCH ^[0-9]+ UV_VERSION_MAJOR "${PACKAGE_VERSION}") # The version in the filename is mirroring the behaviour of autotools. - set_target_properties(uv PROPERTIES VERSION ${UV_VERSION_MAJOR}.0.0 - SOVERSION ${UV_VERSION_MAJOR}) + set_target_properties(_uv PROPERTIES VERSION ${UV_VERSION_MAJOR}.0.0 + SOVERSION ${UV_VERSION_MAJOR}) endif() - diff --git a/contrib/libxml2-cmake/CMakeLists.txt b/contrib/libxml2-cmake/CMakeLists.txt index 8fda0399ea3..e9c4641c161 100644 --- a/contrib/libxml2-cmake/CMakeLists.txt +++ b/contrib/libxml2-cmake/CMakeLists.txt @@ -50,13 +50,14 @@ set(SRCS "${LIBXML2_SOURCE_DIR}/schematron.c" "${LIBXML2_SOURCE_DIR}/xzlib.c" ) -add_library(libxml2 ${SRCS}) +add_library(_libxml2 ${SRCS}) -target_link_libraries(libxml2 PRIVATE ${ZLIB_LIBRARIES}) +target_link_libraries(_libxml2 PRIVATE ch_contrib::zlib) if(M_LIBRARY) - target_link_libraries(libxml2 PRIVATE ${M_LIBRARY}) + target_link_libraries(_libxml2 PRIVATE ${M_LIBRARY}) endif() -target_include_directories(libxml2 PUBLIC "${CMAKE_CURRENT_SOURCE_DIR}/linux_x86_64/include") -target_include_directories(libxml2 PUBLIC "${LIBXML2_SOURCE_DIR}/include") -target_include_directories(libxml2 SYSTEM BEFORE PRIVATE ${ZLIB_INCLUDE_DIR}) +target_include_directories(_libxml2 BEFORE PUBLIC "${CMAKE_CURRENT_SOURCE_DIR}/linux_x86_64/include") +target_include_directories(_libxml2 BEFORE PUBLIC "${LIBXML2_SOURCE_DIR}/include") + +add_library(ch_contrib::libxml2 ALIAS _libxml2) diff --git a/contrib/llvm-cmake/CMakeLists.txt b/contrib/llvm-cmake/CMakeLists.txt new file mode 100644 index 00000000000..d240924cac3 --- /dev/null +++ b/contrib/llvm-cmake/CMakeLists.txt @@ -0,0 +1,98 @@ +if (APPLE OR NOT ARCH_AMD64 OR SANITIZE STREQUAL "undefined") + set (ENABLE_EMBEDDED_COMPILER_DEFAULT OFF) +else() + set (ENABLE_EMBEDDED_COMPILER_DEFAULT ON) +endif() +option (ENABLE_EMBEDDED_COMPILER "Enable support for 'compile_expressions' option for query execution" ${ENABLE_EMBEDDED_COMPILER_DEFAULT}) + +if (NOT ENABLE_EMBEDDED_COMPILER) + set (USE_EMBEDDED_COMPILER 0) + return() +endif() + +set (LLVM_FOUND 1) +set (LLVM_VERSION "12.0.0bundled") +set (LLVM_INCLUDE_DIRS + "${ClickHouse_SOURCE_DIR}/contrib/llvm/llvm/include" + "${ClickHouse_BINARY_DIR}/contrib/llvm/llvm/include" +) +set (LLVM_LIBRARY_DIRS "${ClickHouse_BINARY_DIR}/contrib/llvm/llvm") + +# This list was generated by listing all LLVM libraries, compiling the binary and removing all libraries while it still compiles. +set (REQUIRED_LLVM_LIBRARIES + LLVMExecutionEngine + LLVMRuntimeDyld + LLVMX86CodeGen + LLVMX86Desc + LLVMX86Info + LLVMAsmPrinter + LLVMDebugInfoDWARF + LLVMGlobalISel + LLVMSelectionDAG + LLVMMCDisassembler + LLVMPasses + LLVMCodeGen + LLVMipo + LLVMBitWriter + LLVMInstrumentation + LLVMScalarOpts + LLVMAggressiveInstCombine + LLVMInstCombine + LLVMVectorize + LLVMTransformUtils + LLVMTarget + LLVMAnalysis + LLVMProfileData + LLVMObject + LLVMBitReader + LLVMCore + LLVMRemarks + LLVMBitstreamReader + LLVMMCParser + LLVMMC + LLVMBinaryFormat + LLVMDebugInfoCodeView + LLVMSupport + LLVMDemangle +) + +#function(llvm_libs_all REQUIRED_LLVM_LIBRARIES) +# llvm_map_components_to_libnames (result all) +# if (USE_STATIC_LIBRARIES OR NOT "LLVM" IN_LIST result) +# list (REMOVE_ITEM result "LTO" "LLVM") +# else() +# set (result "LLVM") +# endif () +# list (APPEND result ${CMAKE_DL_LIBS} ch_contrib::zlib) +# set (${REQUIRED_LLVM_LIBRARIES} ${result} PARENT_SCOPE) +#endfunction() + +message (STATUS "LLVM include Directory: ${LLVM_INCLUDE_DIRS}") +message (STATUS "LLVM library Directory: ${LLVM_LIBRARY_DIRS}") +message (STATUS "LLVM C++ compiler flags: ${LLVM_CXXFLAGS}") + +# ld: unknown option: --color-diagnostics +if (APPLE) + set (LINKER_SUPPORTS_COLOR_DIAGNOSTICS 0 CACHE INTERNAL "") +endif () + +# Do not adjust RPATH in llvm, since then it will not be able to find libcxx/libcxxabi/libunwind +set (CMAKE_INSTALL_RPATH "ON") +set (LLVM_COMPILER_CHECKED 1 CACHE INTERNAL "") +set (LLVM_ENABLE_EH 1 CACHE INTERNAL "") +set (LLVM_ENABLE_RTTI 1 CACHE INTERNAL "") +set (LLVM_ENABLE_PIC 0 CACHE INTERNAL "") +set (LLVM_TARGETS_TO_BUILD "X86;AArch64" CACHE STRING "") + +# Need to use C++17 since the compilation is not possible with C++20 currently, due to ambiguous operator != etc. +# LLVM project will set its default value for the -std=... but our global setting from CMake will override it. +set (CMAKE_CXX_STANDARD 17) + +set (LLVM_SOURCE_DIR "${ClickHouse_SOURCE_DIR}/contrib/llvm/llvm") +set (LLVM_BINARY_DIR "${ClickHouse_BINARY_DIR}/contrib/llvm/llvm") +add_subdirectory ("${LLVM_SOURCE_DIR}" "${LLVM_BINARY_DIR}") + +add_library (_llvm INTERFACE) +target_link_libraries (_llvm INTERFACE ${REQUIRED_LLVM_LIBRARIES}) +target_include_directories (_llvm SYSTEM BEFORE INTERFACE ${LLVM_INCLUDE_DIRS}) +add_library(ch_contrib::llvm ALIAS _llvm) diff --git a/contrib/lz4-cmake/CMakeLists.txt b/contrib/lz4-cmake/CMakeLists.txt index 2c412d6e36a..94def029410 100644 --- a/contrib/lz4-cmake/CMakeLists.txt +++ b/contrib/lz4-cmake/CMakeLists.txt @@ -1,37 +1,18 @@ -option (USE_INTERNAL_LZ4_LIBRARY "Use internal lz4 library" ON) +# lz4 is the main compression method, cannot be disabled. -if (NOT USE_INTERNAL_LZ4_LIBRARY) - find_library (LIBRARY_LZ4 lz4) - find_path (INCLUDE_LZ4 lz4.h) +set (LIBRARY_DIR "${ClickHouse_SOURCE_DIR}/contrib/lz4") +set (SRCS + "${LIBRARY_DIR}/lib/lz4.c" + "${LIBRARY_DIR}/lib/lz4hc.c" + "${LIBRARY_DIR}/lib/lz4frame.c" + "${LIBRARY_DIR}/lib/xxhash.c" +) - if (LIBRARY_LZ4 AND INCLUDE_LZ4) - set(EXTERNAL_LZ4_LIBRARY_FOUND 1) - add_library (lz4 INTERFACE) - set_property (TARGET lz4 PROPERTY INTERFACE_LINK_LIBRARIES ${LIBRARY_LZ4}) - set_property (TARGET lz4 PROPERTY INTERFACE_INCLUDE_DIRECTORIES ${INCLUDE_LZ4}) - set_property (TARGET lz4 APPEND PROPERTY INTERFACE_COMPILE_DEFINITIONS USE_XXHASH=0) - else() - set(EXTERNAL_LZ4_LIBRARY_FOUND 0) - message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find system lz4") - endif() -endif() +add_library (_lz4 ${SRCS}) +add_library (ch_contrib::lz4 ALIAS _lz4) -if (NOT EXTERNAL_LZ4_LIBRARY_FOUND) - set (USE_INTERNAL_LZ4_LIBRARY 1) - set (LIBRARY_DIR "${ClickHouse_SOURCE_DIR}/contrib/lz4") - - set (SRCS - "${LIBRARY_DIR}/lib/lz4.c" - "${LIBRARY_DIR}/lib/lz4hc.c" - "${LIBRARY_DIR}/lib/lz4frame.c" - "${LIBRARY_DIR}/lib/xxhash.c" - ) - - add_library (lz4 ${SRCS}) - - target_compile_definitions (lz4 PUBLIC LZ4_DISABLE_DEPRECATE_WARNINGS=1 USE_XXHASH=1) - if (SANITIZE STREQUAL "undefined") - target_compile_options (lz4 PRIVATE -fno-sanitize=undefined) - endif () - target_include_directories(lz4 PUBLIC "${LIBRARY_DIR}/lib") +target_compile_definitions (_lz4 PUBLIC LZ4_DISABLE_DEPRECATE_WARNINGS=1) +if (SANITIZE STREQUAL "undefined") + target_compile_options (_lz4 PRIVATE -fno-sanitize=undefined) endif () +target_include_directories(_lz4 PUBLIC "${LIBRARY_DIR}/lib") diff --git a/contrib/magic-enum-cmake/CMakeLists.txt b/contrib/magic-enum-cmake/CMakeLists.txt index fae2c9c2d05..f1face02de1 100644 --- a/contrib/magic-enum-cmake/CMakeLists.txt +++ b/contrib/magic-enum-cmake/CMakeLists.txt @@ -1,3 +1,4 @@ -set (LIBRARY_DIR "${ClickHouse_SOURCE_DIR}/contrib/magic_enum") -add_library (magic_enum INTERFACE) -target_include_directories(magic_enum SYSTEM INTERFACE ${LIBRARY_DIR}/include) +set(LIBRARY_DIR "${ClickHouse_SOURCE_DIR}/contrib/magic_enum") +add_library(_magic_enum INTERFACE) +target_include_directories(_magic_enum SYSTEM INTERFACE ${LIBRARY_DIR}/include) +add_library(ch_contrib::magic_enum ALIAS _magic_enum) diff --git a/contrib/mariadb-connector-c-cmake/CMakeLists.txt b/contrib/mariadb-connector-c-cmake/CMakeLists.txt index 7c3f25cdf87..1d9f750acee 100644 --- a/contrib/mariadb-connector-c-cmake/CMakeLists.txt +++ b/contrib/mariadb-connector-c-cmake/CMakeLists.txt @@ -1,3 +1,14 @@ +if(OS_LINUX AND TARGET OpenSSL::SSL) + option(ENABLE_MYSQL "Enable MySQL" ${ENABLE_LIBRARIES}) +else () + option(ENABLE_MYSQL "Enable MySQL" FALSE) +endif () + +if(NOT ENABLE_MYSQL) + message (STATUS "Build without mysqlclient (support for MYSQL dictionary source will be disabled)") + return() +endif() + if (GLIBC_COMPATIBILITY) set(LIBM glibc-compatibility) endif() @@ -80,7 +91,7 @@ set(SIZEOF_SIZE_T 8) set(SOCKET_SIZE_TYPE socklen_t) -set(SYSTEM_LIBS ${SYSTEM_LIBS} zlib) +set(SYSTEM_LIBS ${SYSTEM_LIBS} ch_contrib::zlib) if(CMAKE_HAVE_PTHREAD_H) set(CMAKE_REQUIRED_INCLUDES pthread.h) @@ -93,8 +104,7 @@ set(HAVE_THREADS 1) set(DEFAULT_CHARSET "utf8mb4") add_definitions(-DHAVE_OPENSSL -DHAVE_TLS) -set(SSL_LIBRARIES ${OPENSSL_SSL_LIBRARY} ${OPENSSL_CRYPTO_LIBRARY}) -include_directories(BEFORE ${OPENSSL_INCLUDE_DIR}) +set(SSL_LIBRARIES OpenSSL::Crypto OpenSSL::SSL) set(TLS_LIBRARY_VERSION "OpenSSL ${OPENSSL_VERSION}") set(ENABLED_LOCAL_INFILE OFF) @@ -222,10 +232,6 @@ if(ICONV_INCLUDE_DIR) endif() add_definitions(-DLIBICONV_PLUG) -if(ZLIB_FOUND AND WITH_EXTERNAL_ZLIB) - include_directories(${ZLIB_INCLUDE_DIR}) -endif() - if(WITH_DYNCOL) set(LIBMARIADB_SOURCES ${LIBMARIADB_SOURCES} ${CC_SOURCE_DIR}/libmariadb/mariadb_dyncol.c) endif() @@ -233,10 +239,12 @@ endif() set(LIBMARIADB_SOURCES ${LIBMARIADB_SOURCES} ${CC_SOURCE_DIR}/libmariadb/mariadb_async.c ${CC_SOURCE_DIR}/libmariadb/ma_context.c) -add_library(mariadbclient STATIC ${LIBMARIADB_SOURCES}) -target_link_libraries(mariadbclient ${SYSTEM_LIBS}) +add_library(_mariadbclient STATIC ${LIBMARIADB_SOURCES}) +target_link_libraries(_mariadbclient ${SYSTEM_LIBS}) -target_include_directories(mariadbclient PRIVATE ${CC_BINARY_DIR}/include-private) -target_include_directories(mariadbclient SYSTEM PUBLIC ${CC_BINARY_DIR}/include-public ${CC_SOURCE_DIR}/include ${CC_SOURCE_DIR}/libmariadb) +target_include_directories(_mariadbclient PRIVATE ${CC_BINARY_DIR}/include-private) +target_include_directories(_mariadbclient SYSTEM PUBLIC ${CC_BINARY_DIR}/include-public ${CC_SOURCE_DIR}/include ${CC_SOURCE_DIR}/libmariadb) -set_target_properties(mariadbclient PROPERTIES IMPORTED_INTERFACE_LINK_LIBRARIES "${SYSTEM_LIBS}") +set_target_properties(_mariadbclient PROPERTIES IMPORTED_INTERFACE_LINK_LIBRARIES "${SYSTEM_LIBS}") + +add_library(ch_contrib::mariadbclient ALIAS _mariadbclient) diff --git a/contrib/miniselect-cmake/CMakeLists.txt b/contrib/miniselect-cmake/CMakeLists.txt new file mode 100644 index 00000000000..f6dda7a1474 --- /dev/null +++ b/contrib/miniselect-cmake/CMakeLists.txt @@ -0,0 +1,3 @@ +add_library(_miniselect INTERFACE) +target_include_directories(_miniselect BEFORE INTERFACE ${ClickHouse_SOURCE_DIR}/contrib/miniselect/include) +add_library(ch_contrib::miniselect ALIAS _miniselect) diff --git a/contrib/msgpack-c-cmake/CMakeLists.txt b/contrib/msgpack-c-cmake/CMakeLists.txt new file mode 100644 index 00000000000..3232b0a9534 --- /dev/null +++ b/contrib/msgpack-c-cmake/CMakeLists.txt @@ -0,0 +1,10 @@ +option (ENABLE_MSGPACK "Enable msgpack library" ${ENABLE_LIBRARIES}) + +if(NOT ENABLE_MSGPACK) + message(STATUS "Not using msgpack") + return() +endif() + +add_library(_msgpack INTERFACE) +target_include_directories(_msgpack SYSTEM BEFORE INTERFACE "${ClickHouse_SOURCE_DIR}/contrib/msgpack-c/include") +add_library(ch_contrib::msgpack ALIAS _msgpack) diff --git a/contrib/murmurhash/CMakeLists.txt b/contrib/murmurhash/CMakeLists.txt index 2d9cb3e6382..5b04974d3c5 100644 --- a/contrib/murmurhash/CMakeLists.txt +++ b/contrib/murmurhash/CMakeLists.txt @@ -1,7 +1,8 @@ -add_library(murmurhash +add_library(_murmurhash src/MurmurHash2.cpp src/MurmurHash3.cpp include/MurmurHash2.h include/MurmurHash3.h) -target_include_directories (murmurhash PUBLIC include) +target_include_directories(_murmurhash PUBLIC include) +add_library(ch_contrib::murmurhash ALIAS _murmurhash) diff --git a/contrib/nanodbc-cmake/CMakeLists.txt b/contrib/nanodbc-cmake/CMakeLists.txt index 26a030c3995..9ed6c9525b6 100644 --- a/contrib/nanodbc-cmake/CMakeLists.txt +++ b/contrib/nanodbc-cmake/CMakeLists.txt @@ -1,10 +1,10 @@ -if (NOT USE_INTERNAL_NANODBC_LIBRARY) +if (NOT ENABLE_ODBC) return () endif () set (LIBRARY_DIR "${ClickHouse_SOURCE_DIR}/contrib/nanodbc") -if (NOT TARGET unixodbc) +if (NOT TARGET ch_contrib::unixodbc) message(FATAL_ERROR "Configuration error: unixodbc is not a target") endif() @@ -12,7 +12,7 @@ set (SRCS "${LIBRARY_DIR}/nanodbc/nanodbc.cpp" ) -add_library(nanodbc ${SRCS}) - -target_link_libraries (nanodbc PUBLIC unixodbc) -target_include_directories (nanodbc SYSTEM PUBLIC "${LIBRARY_DIR}/") +add_library(_nanodbc ${SRCS}) +target_link_libraries(_nanodbc PUBLIC ch_contrib::unixodbc) +target_include_directories(_nanodbc SYSTEM PUBLIC "${LIBRARY_DIR}/") +add_library(ch_contrib::nanodbc ALIAS _nanodbc) diff --git a/contrib/nlp-data-cmake/CMakeLists.txt b/contrib/nlp-data-cmake/CMakeLists.txt index d13258725d5..5380269c479 100644 --- a/contrib/nlp-data-cmake/CMakeLists.txt +++ b/contrib/nlp-data-cmake/CMakeLists.txt @@ -2,13 +2,14 @@ include(${ClickHouse_SOURCE_DIR}/cmake/embed_binary.cmake) set(LIBRARY_DIR "${ClickHouse_SOURCE_DIR}/contrib/nlp-data") -add_library (nlp_data INTERFACE) +add_library (_nlp_data INTERFACE) clickhouse_embed_binaries( - TARGET nlp_dicts + TARGET nlp_dictionaries RESOURCE_DIR "${LIBRARY_DIR}" RESOURCES charset.zst tonality_ru.zst programming.zst ) -add_dependencies(nlp_data nlp_dicts) -target_link_libraries(nlp_data INTERFACE "-Wl,${WHOLE_ARCHIVE} $ -Wl,${NO_WHOLE_ARCHIVE}") +add_dependencies(_nlp_data nlp_dictionaries) +target_link_libraries(_nlp_data INTERFACE "-Wl,${WHOLE_ARCHIVE} $ -Wl,${NO_WHOLE_ARCHIVE}") +add_library(ch_contrib::nlp_data ALIAS _nlp_data) diff --git a/contrib/nuraft-cmake/CMakeLists.txt b/contrib/nuraft-cmake/CMakeLists.txt index d9e0aa6efc7..eaca00566d6 100644 --- a/contrib/nuraft-cmake/CMakeLists.txt +++ b/contrib/nuraft-cmake/CMakeLists.txt @@ -1,3 +1,15 @@ +set(ENABLE_NURAFT_DEFAULT ${ENABLE_LIBRARIES}) +if (OS_FREEBSD) + set(ENABLE_NURAFT_DEFAULT OFF) + message (STATUS "Using internal NuRaft library on FreeBSD and Darwin is not supported") +endif() +option(ENABLE_NURAFT "Enable NuRaft" ${ENABLE_NURAFT_DEFAULT}) + +if (NOT ENABLE_NURAFT) + message(STATUS "Not using NuRaft") + return() +endif() + set(LIBRARY_DIR "${ClickHouse_SOURCE_DIR}/contrib/NuRaft") set(SRCS @@ -29,23 +41,25 @@ set(SRCS ) -add_library(nuraft ${SRCS}) +add_library(_nuraft ${SRCS}) -if (NOT OPENSSL_SSL_LIBRARY OR NOT OPENSSL_CRYPTO_LIBRARY) - target_compile_definitions(nuraft PRIVATE USE_BOOST_ASIO=1 BOOST_ASIO_STANDALONE=1 SSL_LIBRARY_NOT_FOUND=1) +if(NOT TARGET OpenSSL::Crypto) + target_compile_definitions(_nuraft PRIVATE USE_BOOST_ASIO=1 BOOST_ASIO_STANDALONE=1 SSL_LIBRARY_NOT_FOUND=1) else() - target_compile_definitions(nuraft PRIVATE USE_BOOST_ASIO=1 BOOST_ASIO_STANDALONE=1) + target_compile_definitions(_nuraft PRIVATE USE_BOOST_ASIO=1 BOOST_ASIO_STANDALONE=1) endif() -target_include_directories (nuraft SYSTEM PRIVATE "${LIBRARY_DIR}/include/libnuraft") +target_include_directories (_nuraft SYSTEM PRIVATE "${LIBRARY_DIR}/include/libnuraft") # for some reason include "asio.h" directly without "boost/" prefix. -target_include_directories (nuraft SYSTEM PRIVATE "${ClickHouse_SOURCE_DIR}/contrib/boost/boost") +target_include_directories (_nuraft SYSTEM PRIVATE "${ClickHouse_SOURCE_DIR}/contrib/boost/boost") -target_link_libraries (nuraft PRIVATE boost::headers_only boost::coroutine) +target_link_libraries (_nuraft PRIVATE boost::headers_only boost::coroutine) -if(OPENSSL_SSL_LIBRARY AND OPENSSL_CRYPTO_LIBRARY) - target_link_libraries (nuraft PRIVATE ${OPENSSL_SSL_LIBRARY} ${OPENSSL_CRYPTO_LIBRARY}) +if(TARGET OpenSSL::Crypto) + target_link_libraries (_nuraft PRIVATE OpenSSL::Crypto OpenSSL::SSL) endif() -target_include_directories (nuraft SYSTEM PUBLIC "${LIBRARY_DIR}/include") +target_include_directories (_nuraft SYSTEM PUBLIC "${LIBRARY_DIR}/include") + +add_library(ch_contrib::nuraft ALIAS _nuraft) diff --git a/contrib/openldap-cmake/CMakeLists.txt b/contrib/openldap-cmake/CMakeLists.txt index 0892403bb62..f5966474b0d 100644 --- a/contrib/openldap-cmake/CMakeLists.txt +++ b/contrib/openldap-cmake/CMakeLists.txt @@ -1,13 +1,37 @@ +option (ENABLE_LDAP "Enable LDAP" ${ENABLE_LIBRARIES}) + +if (NOT ENABLE_LDAP) + message(STATUS "Not using ldap") + return() +endif() + +string (TOLOWER "${CMAKE_SYSTEM_NAME}" _system_name) +string (TOLOWER "${CMAKE_SYSTEM_PROCESSOR}" _system_processor) +if ( + "${_system_processor}" STREQUAL "amd64" OR + "${_system_processor}" STREQUAL "x64" +) + set (_system_processor "x86_64") +elseif ("${_system_processor}" STREQUAL "arm64") + set (_system_processor "aarch64") +endif () +if (NOT( + ( "${_system_name}" STREQUAL "linux" AND "${_system_processor}" STREQUAL "x86_64" ) OR + ( "${_system_name}" STREQUAL "linux" AND "${_system_processor}" STREQUAL "aarch64" ) OR + ( "${_system_name}" STREQUAL "linux" AND "${_system_processor}" STREQUAL "ppc64le" ) OR + ( "${_system_name}" STREQUAL "freebsd" AND "${_system_processor}" STREQUAL "x86_64" ) OR + ( "${_system_name}" STREQUAL "freebsd" AND "${_system_processor}" STREQUAL "aarch64" ) OR + ( "${_system_name}" STREQUAL "darwin" AND "${_system_processor}" STREQUAL "x86_64" ) OR + ( "${_system_name}" STREQUAL "darwin" AND "${_system_processor}" STREQUAL "aarch64" ) +)) + message (${RECONFIGURE_MESSAGE_LEVEL} "LDAP support using the bundled library is not implemented for ${CMAKE_SYSTEM_NAME} ${CMAKE_SYSTEM_PROCESSOR} platform.") +endif () + set(OPENLDAP_SOURCE_DIR "${ClickHouse_SOURCE_DIR}/contrib/openldap") # How these lists were generated? # I compiled the original OpenLDAP with it's original build system and copied the list of source files from build commands. -set(_libs_type SHARED) -if(OPENLDAP_USE_STATIC_LIBS) - set(_libs_type STATIC) -endif() - set(OPENLDAP_VERSION_STRING "2.5.X") macro(mkversion _lib_name) @@ -51,23 +75,22 @@ set(_lber_srcs mkversion(lber) -add_library(lber ${_libs_type} +add_library(_lber ${_lber_srcs} "${CMAKE_CURRENT_BINARY_DIR}/lber-version.c" ) -target_link_libraries(lber - PRIVATE ${OPENSSL_LIBRARIES} +target_link_libraries(_lber + PRIVATE OpenSSL::Crypto OpenSSL::SSL ) -target_include_directories(lber - PRIVATE ${_extra_build_dir}/include - PRIVATE "${OPENLDAP_SOURCE_DIR}/include" +target_include_directories(_lber SYSTEM + PUBLIC ${_extra_build_dir}/include + PUBLIC "${OPENLDAP_SOURCE_DIR}/include" PRIVATE "${OPENLDAP_SOURCE_DIR}/libraries/liblber" - PRIVATE ${OPENSSL_INCLUDE_DIR} ) -target_compile_definitions(lber +target_compile_definitions(_lber PRIVATE LBER_LIBRARY ) @@ -141,24 +164,23 @@ set(_ldap_srcs mkversion(ldap) -add_library(ldap ${_libs_type} +add_library(_ldap ${_ldap_srcs} "${CMAKE_CURRENT_BINARY_DIR}/ldap-version.c" ) -target_link_libraries(ldap - PRIVATE lber - PRIVATE ${OPENSSL_LIBRARIES} +target_link_libraries(_ldap + PRIVATE _lber + PRIVATE OpenSSL::Crypto OpenSSL::SSL ) -target_include_directories(ldap - PRIVATE ${_extra_build_dir}/include - PRIVATE "${OPENLDAP_SOURCE_DIR}/include" +target_include_directories(_ldap SYSTEM + PUBLIC ${_extra_build_dir}/include + PUBLIC "${OPENLDAP_SOURCE_DIR}/include" PRIVATE "${OPENLDAP_SOURCE_DIR}/libraries/libldap" - PRIVATE ${OPENSSL_INCLUDE_DIR} ) -target_compile_definitions(ldap +target_compile_definitions(_ldap PRIVATE LDAP_LIBRARY ) @@ -177,26 +199,28 @@ set(_ldap_r_specific_srcs mkversion(ldap_r) -add_library(ldap_r ${_libs_type} +add_library(_ldap_r ${_ldap_r_specific_srcs} ${_ldap_srcs} "${CMAKE_CURRENT_BINARY_DIR}/ldap_r-version.c" ) -target_link_libraries(ldap_r - PRIVATE lber - PRIVATE ${OPENSSL_LIBRARIES} +target_link_libraries(_ldap_r + PRIVATE _lber + PRIVATE OpenSSL::Crypto OpenSSL::SSL ) -target_include_directories(ldap_r - PRIVATE ${_extra_build_dir}/include - PRIVATE "${OPENLDAP_SOURCE_DIR}/include" +target_include_directories(_ldap_r SYSTEM + PUBLIC ${_extra_build_dir}/include + PUBLIC "${OPENLDAP_SOURCE_DIR}/include" PRIVATE "${OPENLDAP_SOURCE_DIR}/libraries/libldap_r" PRIVATE "${OPENLDAP_SOURCE_DIR}/libraries/libldap" - PRIVATE ${OPENSSL_INCLUDE_DIR} ) -target_compile_definitions(ldap_r +target_compile_definitions(_ldap_r PRIVATE LDAP_R_COMPILE PRIVATE LDAP_LIBRARY ) + +add_library(ch_contrib::ldap ALIAS _ldap_r) +add_library(ch_contrib::lber ALIAS _lber) diff --git a/contrib/pdqsort-cmake/CMakeLists.txt b/contrib/pdqsort-cmake/CMakeLists.txt new file mode 100644 index 00000000000..485f345807e --- /dev/null +++ b/contrib/pdqsort-cmake/CMakeLists.txt @@ -0,0 +1,3 @@ +add_library(_pdqsort INTERFACE) +target_include_directories(_pdqsort SYSTEM BEFORE INTERFACE ${ClickHouse_SOURCE_DIR}/contrib/pdqsort) +add_library(ch_contrib::pdqsort ALIAS _pdqsort) diff --git a/contrib/poco-cmake/Crypto/CMakeLists.txt b/contrib/poco-cmake/Crypto/CMakeLists.txt index e93ed5cf17d..9886a05b21b 100644 --- a/contrib/poco-cmake/Crypto/CMakeLists.txt +++ b/contrib/poco-cmake/Crypto/CMakeLists.txt @@ -1,46 +1,35 @@ if (ENABLE_SSL) - if (USE_INTERNAL_POCO_LIBRARY) - set (SRCS - "${LIBRARY_DIR}/Crypto/src/Cipher.cpp" - "${LIBRARY_DIR}/Crypto/src/CipherFactory.cpp" - "${LIBRARY_DIR}/Crypto/src/CipherImpl.cpp" - "${LIBRARY_DIR}/Crypto/src/CipherKey.cpp" - "${LIBRARY_DIR}/Crypto/src/CipherKeyImpl.cpp" - "${LIBRARY_DIR}/Crypto/src/CryptoException.cpp" - "${LIBRARY_DIR}/Crypto/src/CryptoStream.cpp" - "${LIBRARY_DIR}/Crypto/src/CryptoTransform.cpp" - "${LIBRARY_DIR}/Crypto/src/DigestEngine.cpp" - "${LIBRARY_DIR}/Crypto/src/ECDSADigestEngine.cpp" - "${LIBRARY_DIR}/Crypto/src/ECKey.cpp" - "${LIBRARY_DIR}/Crypto/src/ECKeyImpl.cpp" - "${LIBRARY_DIR}/Crypto/src/EVPPKey.cpp" - "${LIBRARY_DIR}/Crypto/src/KeyPair.cpp" - "${LIBRARY_DIR}/Crypto/src/KeyPairImpl.cpp" - "${LIBRARY_DIR}/Crypto/src/OpenSSLInitializer.cpp" - "${LIBRARY_DIR}/Crypto/src/PKCS12Container.cpp" - "${LIBRARY_DIR}/Crypto/src/RSACipherImpl.cpp" - "${LIBRARY_DIR}/Crypto/src/RSADigestEngine.cpp" - "${LIBRARY_DIR}/Crypto/src/RSAKey.cpp" - "${LIBRARY_DIR}/Crypto/src/RSAKeyImpl.cpp" - "${LIBRARY_DIR}/Crypto/src/X509Certificate.cpp" - ) + set (SRCS + "${LIBRARY_DIR}/Crypto/src/Cipher.cpp" + "${LIBRARY_DIR}/Crypto/src/CipherFactory.cpp" + "${LIBRARY_DIR}/Crypto/src/CipherImpl.cpp" + "${LIBRARY_DIR}/Crypto/src/CipherKey.cpp" + "${LIBRARY_DIR}/Crypto/src/CipherKeyImpl.cpp" + "${LIBRARY_DIR}/Crypto/src/CryptoException.cpp" + "${LIBRARY_DIR}/Crypto/src/CryptoStream.cpp" + "${LIBRARY_DIR}/Crypto/src/CryptoTransform.cpp" + "${LIBRARY_DIR}/Crypto/src/DigestEngine.cpp" + "${LIBRARY_DIR}/Crypto/src/ECDSADigestEngine.cpp" + "${LIBRARY_DIR}/Crypto/src/ECKey.cpp" + "${LIBRARY_DIR}/Crypto/src/ECKeyImpl.cpp" + "${LIBRARY_DIR}/Crypto/src/EVPPKey.cpp" + "${LIBRARY_DIR}/Crypto/src/KeyPair.cpp" + "${LIBRARY_DIR}/Crypto/src/KeyPairImpl.cpp" + "${LIBRARY_DIR}/Crypto/src/OpenSSLInitializer.cpp" + "${LIBRARY_DIR}/Crypto/src/PKCS12Container.cpp" + "${LIBRARY_DIR}/Crypto/src/RSACipherImpl.cpp" + "${LIBRARY_DIR}/Crypto/src/RSADigestEngine.cpp" + "${LIBRARY_DIR}/Crypto/src/RSAKey.cpp" + "${LIBRARY_DIR}/Crypto/src/RSAKeyImpl.cpp" + "${LIBRARY_DIR}/Crypto/src/X509Certificate.cpp" + ) - add_library (_poco_crypto ${SRCS}) - add_library (Poco::Crypto ALIAS _poco_crypto) + add_library (_poco_crypto ${SRCS}) + add_library (Poco::Crypto ALIAS _poco_crypto) - target_compile_options (_poco_crypto PRIVATE -Wno-newline-eof) - target_include_directories (_poco_crypto SYSTEM PUBLIC "${LIBRARY_DIR}/Crypto/include") - target_link_libraries (_poco_crypto PUBLIC Poco::Foundation ssl crypto) - else () - add_library (Poco::Crypto UNKNOWN IMPORTED GLOBAL) - - find_library(LIBRARY_POCO_CRYPTO PocoCrypto) - find_path(INCLUDE_POCO_CRYPTO Poco/Crypto/Crypto.h) - set_target_properties (Poco::Crypto PROPERTIES IMPORTED_LOCATION ${LIBRARY_POCO_CRYPTO}) - set_target_properties (Poco::Crypto PROPERTIES INTERFACE_INCLUDE_DIRECTORIES ${INCLUDE_POCO_CRYPTO}) - - target_link_libraries (Poco::Crypto INTERFACE Poco::Foundation) - endif () + target_compile_options (_poco_crypto PRIVATE -Wno-newline-eof) + target_include_directories (_poco_crypto SYSTEM PUBLIC "${LIBRARY_DIR}/Crypto/include") + target_link_libraries (_poco_crypto PUBLIC Poco::Foundation OpenSSL::SSL OpenSSL::Crypto) message (STATUS "Using Poco::Crypto") else () diff --git a/contrib/poco-cmake/Data/CMakeLists.txt b/contrib/poco-cmake/Data/CMakeLists.txt index 4fdd755b45d..b13c07583ad 100644 --- a/contrib/poco-cmake/Data/CMakeLists.txt +++ b/contrib/poco-cmake/Data/CMakeLists.txt @@ -1,60 +1,46 @@ -if (USE_INTERNAL_POCO_LIBRARY) - set (SRCS - "${LIBRARY_DIR}/Data/src/AbstractBinder.cpp" - "${LIBRARY_DIR}/Data/src/AbstractBinding.cpp" - "${LIBRARY_DIR}/Data/src/AbstractExtraction.cpp" - "${LIBRARY_DIR}/Data/src/AbstractExtractor.cpp" - "${LIBRARY_DIR}/Data/src/AbstractPreparation.cpp" - "${LIBRARY_DIR}/Data/src/AbstractPreparator.cpp" - "${LIBRARY_DIR}/Data/src/ArchiveStrategy.cpp" - "${LIBRARY_DIR}/Data/src/Bulk.cpp" - "${LIBRARY_DIR}/Data/src/Connector.cpp" - "${LIBRARY_DIR}/Data/src/DataException.cpp" - "${LIBRARY_DIR}/Data/src/Date.cpp" - "${LIBRARY_DIR}/Data/src/DynamicLOB.cpp" - "${LIBRARY_DIR}/Data/src/Limit.cpp" - "${LIBRARY_DIR}/Data/src/MetaColumn.cpp" - "${LIBRARY_DIR}/Data/src/PooledSessionHolder.cpp" - "${LIBRARY_DIR}/Data/src/PooledSessionImpl.cpp" - "${LIBRARY_DIR}/Data/src/Position.cpp" - "${LIBRARY_DIR}/Data/src/Range.cpp" - "${LIBRARY_DIR}/Data/src/RecordSet.cpp" - "${LIBRARY_DIR}/Data/src/Row.cpp" - "${LIBRARY_DIR}/Data/src/RowFilter.cpp" - "${LIBRARY_DIR}/Data/src/RowFormatter.cpp" - "${LIBRARY_DIR}/Data/src/RowIterator.cpp" - "${LIBRARY_DIR}/Data/src/Session.cpp" - "${LIBRARY_DIR}/Data/src/SessionFactory.cpp" - "${LIBRARY_DIR}/Data/src/SessionImpl.cpp" - "${LIBRARY_DIR}/Data/src/SessionPool.cpp" - "${LIBRARY_DIR}/Data/src/SessionPoolContainer.cpp" - "${LIBRARY_DIR}/Data/src/SimpleRowFormatter.cpp" - "${LIBRARY_DIR}/Data/src/SQLChannel.cpp" - "${LIBRARY_DIR}/Data/src/Statement.cpp" - "${LIBRARY_DIR}/Data/src/StatementCreator.cpp" - "${LIBRARY_DIR}/Data/src/StatementImpl.cpp" - "${LIBRARY_DIR}/Data/src/Time.cpp" - "${LIBRARY_DIR}/Data/src/Transaction.cpp" - ) +set (SRCS + "${LIBRARY_DIR}/Data/src/AbstractBinder.cpp" + "${LIBRARY_DIR}/Data/src/AbstractBinding.cpp" + "${LIBRARY_DIR}/Data/src/AbstractExtraction.cpp" + "${LIBRARY_DIR}/Data/src/AbstractExtractor.cpp" + "${LIBRARY_DIR}/Data/src/AbstractPreparation.cpp" + "${LIBRARY_DIR}/Data/src/AbstractPreparator.cpp" + "${LIBRARY_DIR}/Data/src/ArchiveStrategy.cpp" + "${LIBRARY_DIR}/Data/src/Bulk.cpp" + "${LIBRARY_DIR}/Data/src/Connector.cpp" + "${LIBRARY_DIR}/Data/src/DataException.cpp" + "${LIBRARY_DIR}/Data/src/Date.cpp" + "${LIBRARY_DIR}/Data/src/DynamicLOB.cpp" + "${LIBRARY_DIR}/Data/src/Limit.cpp" + "${LIBRARY_DIR}/Data/src/MetaColumn.cpp" + "${LIBRARY_DIR}/Data/src/PooledSessionHolder.cpp" + "${LIBRARY_DIR}/Data/src/PooledSessionImpl.cpp" + "${LIBRARY_DIR}/Data/src/Position.cpp" + "${LIBRARY_DIR}/Data/src/Range.cpp" + "${LIBRARY_DIR}/Data/src/RecordSet.cpp" + "${LIBRARY_DIR}/Data/src/Row.cpp" + "${LIBRARY_DIR}/Data/src/RowFilter.cpp" + "${LIBRARY_DIR}/Data/src/RowFormatter.cpp" + "${LIBRARY_DIR}/Data/src/RowIterator.cpp" + "${LIBRARY_DIR}/Data/src/Session.cpp" + "${LIBRARY_DIR}/Data/src/SessionFactory.cpp" + "${LIBRARY_DIR}/Data/src/SessionImpl.cpp" + "${LIBRARY_DIR}/Data/src/SessionPool.cpp" + "${LIBRARY_DIR}/Data/src/SessionPoolContainer.cpp" + "${LIBRARY_DIR}/Data/src/SimpleRowFormatter.cpp" + "${LIBRARY_DIR}/Data/src/SQLChannel.cpp" + "${LIBRARY_DIR}/Data/src/Statement.cpp" + "${LIBRARY_DIR}/Data/src/StatementCreator.cpp" + "${LIBRARY_DIR}/Data/src/StatementImpl.cpp" + "${LIBRARY_DIR}/Data/src/Time.cpp" + "${LIBRARY_DIR}/Data/src/Transaction.cpp" +) - add_library (_poco_data ${SRCS}) - add_library (Poco::Data ALIAS _poco_data) +add_library (_poco_data ${SRCS}) +add_library (Poco::Data ALIAS _poco_data) - if (COMPILER_GCC) - target_compile_options (_poco_data PRIVATE -Wno-deprecated-copy) - endif () - target_include_directories (_poco_data SYSTEM PUBLIC "${LIBRARY_DIR}/Data/include") - target_link_libraries (_poco_data PUBLIC Poco::Foundation) -else () - # NOTE: don't know why, but the GLOBAL is required here. - add_library (Poco::Data UNKNOWN IMPORTED GLOBAL) - - find_library(LIBRARY_POCO_DATA PocoData) - find_path(INCLUDE_POCO_DATA Poco/Data/Data.h) - set_target_properties (Poco::Data PROPERTIES IMPORTED_LOCATION ${LIBRARY_POCO_DATA}) - set_target_properties (Poco::Data PROPERTIES INTERFACE_INCLUDE_DIRECTORIES ${INCLUDE_POCO_DATA}) - - target_link_libraries (Poco::Data INTERFACE Poco::Foundation) - - message (STATUS "Using Poco::Data: ${LIBRARY_POCO_DATA} ${INCLUDE_POCO_DATA}") +if (COMPILER_GCC) + target_compile_options (_poco_data PRIVATE -Wno-deprecated-copy) endif () +target_include_directories (_poco_data SYSTEM PUBLIC "${LIBRARY_DIR}/Data/include") +target_link_libraries (_poco_data PUBLIC Poco::Foundation) diff --git a/contrib/poco-cmake/Data/ODBC/CMakeLists.txt b/contrib/poco-cmake/Data/ODBC/CMakeLists.txt index a3561304541..7de77cdacf7 100644 --- a/contrib/poco-cmake/Data/ODBC/CMakeLists.txt +++ b/contrib/poco-cmake/Data/ODBC/CMakeLists.txt @@ -1,48 +1,39 @@ if (ENABLE_ODBC) - if (NOT TARGET unixodbc) + if (NOT TARGET ch_contrib::unixodbc) message(FATAL_ERROR "Configuration error: unixodbc is not a target") endif() - if (USE_INTERNAL_POCO_LIBRARY) - set (SRCS - "${LIBRARY_DIR}/Data/ODBC/src/Binder.cpp" - "${LIBRARY_DIR}/Data/ODBC/src/ConnectionHandle.cpp" - "${LIBRARY_DIR}/Data/ODBC/src/Connector.cpp" - "${LIBRARY_DIR}/Data/ODBC/src/EnvironmentHandle.cpp" - "${LIBRARY_DIR}/Data/ODBC/src/Extractor.cpp" - "${LIBRARY_DIR}/Data/ODBC/src/ODBCException.cpp" - "${LIBRARY_DIR}/Data/ODBC/src/ODBCMetaColumn.cpp" - "${LIBRARY_DIR}/Data/ODBC/src/ODBCStatementImpl.cpp" - "${LIBRARY_DIR}/Data/ODBC/src/Parameter.cpp" - "${LIBRARY_DIR}/Data/ODBC/src/Preparator.cpp" - "${LIBRARY_DIR}/Data/ODBC/src/SessionImpl.cpp" - "${LIBRARY_DIR}/Data/ODBC/src/TypeInfo.cpp" - "${LIBRARY_DIR}/Data/ODBC/src/Unicode.cpp" - "${LIBRARY_DIR}/Data/ODBC/src/Utility.cpp" - ) + set (SRCS + "${LIBRARY_DIR}/Data/ODBC/src/Binder.cpp" + "${LIBRARY_DIR}/Data/ODBC/src/ConnectionHandle.cpp" + "${LIBRARY_DIR}/Data/ODBC/src/Connector.cpp" + "${LIBRARY_DIR}/Data/ODBC/src/EnvironmentHandle.cpp" + "${LIBRARY_DIR}/Data/ODBC/src/Extractor.cpp" + "${LIBRARY_DIR}/Data/ODBC/src/ODBCException.cpp" + "${LIBRARY_DIR}/Data/ODBC/src/ODBCMetaColumn.cpp" + "${LIBRARY_DIR}/Data/ODBC/src/ODBCStatementImpl.cpp" + "${LIBRARY_DIR}/Data/ODBC/src/Parameter.cpp" + "${LIBRARY_DIR}/Data/ODBC/src/Preparator.cpp" + "${LIBRARY_DIR}/Data/ODBC/src/SessionImpl.cpp" + "${LIBRARY_DIR}/Data/ODBC/src/TypeInfo.cpp" + "${LIBRARY_DIR}/Data/ODBC/src/Unicode.cpp" + "${LIBRARY_DIR}/Data/ODBC/src/Utility.cpp" + ) - add_library (_poco_data_odbc ${SRCS}) - add_library (Poco::Data::ODBC ALIAS _poco_data_odbc) + add_library (_poco_data_odbc ${SRCS}) + add_library (Poco::Data::ODBC ALIAS _poco_data_odbc) - target_compile_options (_poco_data_odbc PRIVATE -Wno-unused-variable) - target_include_directories (_poco_data_odbc SYSTEM PUBLIC "${LIBRARY_DIR}/Data/ODBC/include") - target_link_libraries (_poco_data_odbc PUBLIC Poco::Data unixodbc) - else () - add_library (Poco::Data::ODBC UNKNOWN IMPORTED GLOBAL) - - find_library(LIBRARY_POCO_DATA_ODBC PocoDataODBC) - find_path(INCLUDE_POCO_DATA_ODBC Poco/Data/ODBC/ODBC.h) - set_target_properties (Poco::Data::ODBC PROPERTIES IMPORTED_LOCATION ${LIBRARY_POCO_DATA_ODBC}) - set_target_properties (Poco::Data::ODBC PROPERTIES INTERFACE_INCLUDE_DIRECTORIES ${INCLUDE_POCO_DATA_ODBC}) - - target_link_libraries (Poco::Data::ODBC INTERFACE Poco::Data) - endif () + target_compile_options (_poco_data_odbc PRIVATE -Wno-unused-variable) + target_include_directories (_poco_data_odbc SYSTEM PUBLIC "${LIBRARY_DIR}/Data/ODBC/include") + target_link_libraries (_poco_data_odbc PUBLIC Poco::Data ch_contrib::unixodbc) message (STATUS "Using Poco::Data::ODBC") else () add_library (_poco_data_odbc INTERFACE) add_library (Poco::Data::ODBC ALIAS _poco_data_odbc) - target_link_libraries (_poco_data_odbc INTERFACE unixodbc) + if (TARGET ch_contrib::unixodbc) + target_link_libraries (_poco_data_odbc INTERFACE ch_contrib::unixodbc) + endif() message (STATUS "Not using Poco::Data::ODBC") endif () diff --git a/contrib/poco-cmake/Foundation/CMakeLists.txt b/contrib/poco-cmake/Foundation/CMakeLists.txt index 0c13d109344..795ec985cb4 100644 --- a/contrib/poco-cmake/Foundation/CMakeLists.txt +++ b/contrib/poco-cmake/Foundation/CMakeLists.txt @@ -1,239 +1,232 @@ -if (USE_INTERNAL_POCO_LIBRARY) - # Foundation (pcre) +# Foundation (pcre) - set (SRCS_PCRE - "${LIBRARY_DIR}/Foundation/src/pcre_config.c" - "${LIBRARY_DIR}/Foundation/src/pcre_byte_order.c" - "${LIBRARY_DIR}/Foundation/src/pcre_chartables.c" - "${LIBRARY_DIR}/Foundation/src/pcre_compile.c" - "${LIBRARY_DIR}/Foundation/src/pcre_exec.c" - "${LIBRARY_DIR}/Foundation/src/pcre_fullinfo.c" - "${LIBRARY_DIR}/Foundation/src/pcre_globals.c" - "${LIBRARY_DIR}/Foundation/src/pcre_maketables.c" - "${LIBRARY_DIR}/Foundation/src/pcre_newline.c" - "${LIBRARY_DIR}/Foundation/src/pcre_ord2utf8.c" - "${LIBRARY_DIR}/Foundation/src/pcre_study.c" - "${LIBRARY_DIR}/Foundation/src/pcre_tables.c" - "${LIBRARY_DIR}/Foundation/src/pcre_dfa_exec.c" - "${LIBRARY_DIR}/Foundation/src/pcre_get.c" - "${LIBRARY_DIR}/Foundation/src/pcre_jit_compile.c" - "${LIBRARY_DIR}/Foundation/src/pcre_refcount.c" - "${LIBRARY_DIR}/Foundation/src/pcre_string_utils.c" - "${LIBRARY_DIR}/Foundation/src/pcre_version.c" - "${LIBRARY_DIR}/Foundation/src/pcre_ucd.c" - "${LIBRARY_DIR}/Foundation/src/pcre_valid_utf8.c" - "${LIBRARY_DIR}/Foundation/src/pcre_xclass.c" - ) +set (SRCS_PCRE + "${LIBRARY_DIR}/Foundation/src/pcre_config.c" + "${LIBRARY_DIR}/Foundation/src/pcre_byte_order.c" + "${LIBRARY_DIR}/Foundation/src/pcre_chartables.c" + "${LIBRARY_DIR}/Foundation/src/pcre_compile.c" + "${LIBRARY_DIR}/Foundation/src/pcre_exec.c" + "${LIBRARY_DIR}/Foundation/src/pcre_fullinfo.c" + "${LIBRARY_DIR}/Foundation/src/pcre_globals.c" + "${LIBRARY_DIR}/Foundation/src/pcre_maketables.c" + "${LIBRARY_DIR}/Foundation/src/pcre_newline.c" + "${LIBRARY_DIR}/Foundation/src/pcre_ord2utf8.c" + "${LIBRARY_DIR}/Foundation/src/pcre_study.c" + "${LIBRARY_DIR}/Foundation/src/pcre_tables.c" + "${LIBRARY_DIR}/Foundation/src/pcre_dfa_exec.c" + "${LIBRARY_DIR}/Foundation/src/pcre_get.c" + "${LIBRARY_DIR}/Foundation/src/pcre_jit_compile.c" + "${LIBRARY_DIR}/Foundation/src/pcre_refcount.c" + "${LIBRARY_DIR}/Foundation/src/pcre_string_utils.c" + "${LIBRARY_DIR}/Foundation/src/pcre_version.c" + "${LIBRARY_DIR}/Foundation/src/pcre_ucd.c" + "${LIBRARY_DIR}/Foundation/src/pcre_valid_utf8.c" + "${LIBRARY_DIR}/Foundation/src/pcre_xclass.c" +) - add_library (_poco_foundation_pcre ${SRCS_PCRE}) - add_library (Poco::Foundation::PCRE ALIAS _poco_foundation_pcre) +add_library (_poco_foundation_pcre ${SRCS_PCRE}) +add_library (Poco::Foundation::PCRE ALIAS _poco_foundation_pcre) - target_compile_options (_poco_foundation_pcre PRIVATE -Wno-sign-compare) +target_compile_options (_poco_foundation_pcre PRIVATE -Wno-sign-compare) - # Foundation +# Foundation - set (SRCS - "${LIBRARY_DIR}/Foundation/src/AbstractObserver.cpp" - "${LIBRARY_DIR}/Foundation/src/ActiveDispatcher.cpp" - "${LIBRARY_DIR}/Foundation/src/ArchiveStrategy.cpp" - "${LIBRARY_DIR}/Foundation/src/Ascii.cpp" - "${LIBRARY_DIR}/Foundation/src/ASCIIEncoding.cpp" - "${LIBRARY_DIR}/Foundation/src/AsyncChannel.cpp" - "${LIBRARY_DIR}/Foundation/src/AtomicCounter.cpp" - "${LIBRARY_DIR}/Foundation/src/Base32Decoder.cpp" - "${LIBRARY_DIR}/Foundation/src/Base32Encoder.cpp" - "${LIBRARY_DIR}/Foundation/src/Base64Decoder.cpp" - "${LIBRARY_DIR}/Foundation/src/Base64Encoder.cpp" - "${LIBRARY_DIR}/Foundation/src/BinaryReader.cpp" - "${LIBRARY_DIR}/Foundation/src/BinaryWriter.cpp" - "${LIBRARY_DIR}/Foundation/src/Bugcheck.cpp" - "${LIBRARY_DIR}/Foundation/src/ByteOrder.cpp" - "${LIBRARY_DIR}/Foundation/src/Channel.cpp" - "${LIBRARY_DIR}/Foundation/src/Checksum.cpp" - "${LIBRARY_DIR}/Foundation/src/Clock.cpp" - "${LIBRARY_DIR}/Foundation/src/CompressedLogFile.cpp" - "${LIBRARY_DIR}/Foundation/src/Condition.cpp" - "${LIBRARY_DIR}/Foundation/src/Configurable.cpp" - "${LIBRARY_DIR}/Foundation/src/ConsoleChannel.cpp" - "${LIBRARY_DIR}/Foundation/src/CountingStream.cpp" - "${LIBRARY_DIR}/Foundation/src/DateTime.cpp" - "${LIBRARY_DIR}/Foundation/src/DateTimeFormat.cpp" - "${LIBRARY_DIR}/Foundation/src/DateTimeFormatter.cpp" - "${LIBRARY_DIR}/Foundation/src/DateTimeParser.cpp" - "${LIBRARY_DIR}/Foundation/src/Debugger.cpp" - "${LIBRARY_DIR}/Foundation/src/DeflatingStream.cpp" - "${LIBRARY_DIR}/Foundation/src/DigestEngine.cpp" - "${LIBRARY_DIR}/Foundation/src/DigestStream.cpp" - "${LIBRARY_DIR}/Foundation/src/DirectoryIterator.cpp" - "${LIBRARY_DIR}/Foundation/src/DirectoryIteratorStrategy.cpp" - "${LIBRARY_DIR}/Foundation/src/DirectoryWatcher.cpp" - "${LIBRARY_DIR}/Foundation/src/Environment.cpp" - "${LIBRARY_DIR}/Foundation/src/Error.cpp" - "${LIBRARY_DIR}/Foundation/src/ErrorHandler.cpp" - "${LIBRARY_DIR}/Foundation/src/Event.cpp" - "${LIBRARY_DIR}/Foundation/src/EventArgs.cpp" - "${LIBRARY_DIR}/Foundation/src/EventChannel.cpp" - "${LIBRARY_DIR}/Foundation/src/Exception.cpp" - "${LIBRARY_DIR}/Foundation/src/FIFOBufferStream.cpp" - "${LIBRARY_DIR}/Foundation/src/File.cpp" - "${LIBRARY_DIR}/Foundation/src/FileChannel.cpp" - "${LIBRARY_DIR}/Foundation/src/FileStream.cpp" - "${LIBRARY_DIR}/Foundation/src/FileStreamFactory.cpp" - "${LIBRARY_DIR}/Foundation/src/Format.cpp" - "${LIBRARY_DIR}/Foundation/src/Formatter.cpp" - "${LIBRARY_DIR}/Foundation/src/FormattingChannel.cpp" - "${LIBRARY_DIR}/Foundation/src/FPEnvironment.cpp" - "${LIBRARY_DIR}/Foundation/src/Glob.cpp" - "${LIBRARY_DIR}/Foundation/src/Hash.cpp" - "${LIBRARY_DIR}/Foundation/src/HashStatistic.cpp" - "${LIBRARY_DIR}/Foundation/src/HexBinaryDecoder.cpp" - "${LIBRARY_DIR}/Foundation/src/HexBinaryEncoder.cpp" - "${LIBRARY_DIR}/Foundation/src/InflatingStream.cpp" - "${LIBRARY_DIR}/Foundation/src/JSONString.cpp" - "${LIBRARY_DIR}/Foundation/src/Latin1Encoding.cpp" - "${LIBRARY_DIR}/Foundation/src/Latin2Encoding.cpp" - "${LIBRARY_DIR}/Foundation/src/Latin9Encoding.cpp" - "${LIBRARY_DIR}/Foundation/src/LineEndingConverter.cpp" - "${LIBRARY_DIR}/Foundation/src/LocalDateTime.cpp" - "${LIBRARY_DIR}/Foundation/src/LogFile.cpp" - "${LIBRARY_DIR}/Foundation/src/Logger.cpp" - "${LIBRARY_DIR}/Foundation/src/LoggingFactory.cpp" - "${LIBRARY_DIR}/Foundation/src/LoggingRegistry.cpp" - "${LIBRARY_DIR}/Foundation/src/LogStream.cpp" - "${LIBRARY_DIR}/Foundation/src/Manifest.cpp" - "${LIBRARY_DIR}/Foundation/src/MD4Engine.cpp" - "${LIBRARY_DIR}/Foundation/src/MD5Engine.cpp" - "${LIBRARY_DIR}/Foundation/src/MemoryPool.cpp" - "${LIBRARY_DIR}/Foundation/src/MemoryStream.cpp" - "${LIBRARY_DIR}/Foundation/src/Message.cpp" - "${LIBRARY_DIR}/Foundation/src/Mutex.cpp" - "${LIBRARY_DIR}/Foundation/src/NamedEvent.cpp" - "${LIBRARY_DIR}/Foundation/src/NamedMutex.cpp" - "${LIBRARY_DIR}/Foundation/src/NestedDiagnosticContext.cpp" - "${LIBRARY_DIR}/Foundation/src/Notification.cpp" - "${LIBRARY_DIR}/Foundation/src/NotificationCenter.cpp" - "${LIBRARY_DIR}/Foundation/src/NotificationQueue.cpp" - "${LIBRARY_DIR}/Foundation/src/NullChannel.cpp" - "${LIBRARY_DIR}/Foundation/src/NullStream.cpp" - "${LIBRARY_DIR}/Foundation/src/NumberFormatter.cpp" - "${LIBRARY_DIR}/Foundation/src/NumberParser.cpp" - "${LIBRARY_DIR}/Foundation/src/NumericString.cpp" - "${LIBRARY_DIR}/Foundation/src/Path.cpp" - "${LIBRARY_DIR}/Foundation/src/PatternFormatter.cpp" - "${LIBRARY_DIR}/Foundation/src/Pipe.cpp" - "${LIBRARY_DIR}/Foundation/src/PipeImpl.cpp" - "${LIBRARY_DIR}/Foundation/src/PipeStream.cpp" - "${LIBRARY_DIR}/Foundation/src/PriorityNotificationQueue.cpp" - "${LIBRARY_DIR}/Foundation/src/Process.cpp" - "${LIBRARY_DIR}/Foundation/src/PurgeStrategy.cpp" - "${LIBRARY_DIR}/Foundation/src/Random.cpp" - "${LIBRARY_DIR}/Foundation/src/RandomStream.cpp" - "${LIBRARY_DIR}/Foundation/src/RefCountedObject.cpp" - "${LIBRARY_DIR}/Foundation/src/RegularExpression.cpp" - "${LIBRARY_DIR}/Foundation/src/RotateStrategy.cpp" - "${LIBRARY_DIR}/Foundation/src/Runnable.cpp" - "${LIBRARY_DIR}/Foundation/src/RWLock.cpp" - "${LIBRARY_DIR}/Foundation/src/Semaphore.cpp" - "${LIBRARY_DIR}/Foundation/src/SHA1Engine.cpp" - "${LIBRARY_DIR}/Foundation/src/SharedLibrary.cpp" - "${LIBRARY_DIR}/Foundation/src/SharedMemory.cpp" - "${LIBRARY_DIR}/Foundation/src/SignalHandler.cpp" - "${LIBRARY_DIR}/Foundation/src/SimpleFileChannel.cpp" - "${LIBRARY_DIR}/Foundation/src/SortedDirectoryIterator.cpp" - "${LIBRARY_DIR}/Foundation/src/SplitterChannel.cpp" - "${LIBRARY_DIR}/Foundation/src/Stopwatch.cpp" - "${LIBRARY_DIR}/Foundation/src/StreamChannel.cpp" - "${LIBRARY_DIR}/Foundation/src/StreamConverter.cpp" - "${LIBRARY_DIR}/Foundation/src/StreamCopier.cpp" - "${LIBRARY_DIR}/Foundation/src/StreamTokenizer.cpp" - "${LIBRARY_DIR}/Foundation/src/String.cpp" - "${LIBRARY_DIR}/Foundation/src/StringTokenizer.cpp" - "${LIBRARY_DIR}/Foundation/src/SynchronizedObject.cpp" - "${LIBRARY_DIR}/Foundation/src/SyslogChannel.cpp" - "${LIBRARY_DIR}/Foundation/src/Task.cpp" - "${LIBRARY_DIR}/Foundation/src/TaskManager.cpp" - "${LIBRARY_DIR}/Foundation/src/TaskNotification.cpp" - "${LIBRARY_DIR}/Foundation/src/TeeStream.cpp" - "${LIBRARY_DIR}/Foundation/src/TemporaryFile.cpp" - "${LIBRARY_DIR}/Foundation/src/TextBufferIterator.cpp" - "${LIBRARY_DIR}/Foundation/src/TextConverter.cpp" - "${LIBRARY_DIR}/Foundation/src/TextEncoding.cpp" - "${LIBRARY_DIR}/Foundation/src/TextIterator.cpp" - "${LIBRARY_DIR}/Foundation/src/Thread.cpp" - "${LIBRARY_DIR}/Foundation/src/ThreadLocal.cpp" - "${LIBRARY_DIR}/Foundation/src/ThreadPool.cpp" - "${LIBRARY_DIR}/Foundation/src/ThreadTarget.cpp" - "${LIBRARY_DIR}/Foundation/src/TimedNotificationQueue.cpp" - "${LIBRARY_DIR}/Foundation/src/Timer.cpp" - "${LIBRARY_DIR}/Foundation/src/Timespan.cpp" - "${LIBRARY_DIR}/Foundation/src/Timestamp.cpp" - "${LIBRARY_DIR}/Foundation/src/Timezone.cpp" - "${LIBRARY_DIR}/Foundation/src/Token.cpp" - "${LIBRARY_DIR}/Foundation/src/Unicode.cpp" - "${LIBRARY_DIR}/Foundation/src/UnicodeConverter.cpp" - "${LIBRARY_DIR}/Foundation/src/URI.cpp" - "${LIBRARY_DIR}/Foundation/src/URIStreamFactory.cpp" - "${LIBRARY_DIR}/Foundation/src/URIStreamOpener.cpp" - "${LIBRARY_DIR}/Foundation/src/UTF16Encoding.cpp" - "${LIBRARY_DIR}/Foundation/src/UTF32Encoding.cpp" - "${LIBRARY_DIR}/Foundation/src/UTF8Encoding.cpp" - "${LIBRARY_DIR}/Foundation/src/UTF8String.cpp" - "${LIBRARY_DIR}/Foundation/src/UUID.cpp" - "${LIBRARY_DIR}/Foundation/src/UUIDGenerator.cpp" - "${LIBRARY_DIR}/Foundation/src/Var.cpp" - "${LIBRARY_DIR}/Foundation/src/VarHolder.cpp" - "${LIBRARY_DIR}/Foundation/src/VarIterator.cpp" - "${LIBRARY_DIR}/Foundation/src/Void.cpp" - "${LIBRARY_DIR}/Foundation/src/Windows1250Encoding.cpp" - "${LIBRARY_DIR}/Foundation/src/Windows1251Encoding.cpp" - "${LIBRARY_DIR}/Foundation/src/Windows1252Encoding.cpp" - ) +set (SRCS + "${LIBRARY_DIR}/Foundation/src/AbstractObserver.cpp" + "${LIBRARY_DIR}/Foundation/src/ActiveDispatcher.cpp" + "${LIBRARY_DIR}/Foundation/src/ArchiveStrategy.cpp" + "${LIBRARY_DIR}/Foundation/src/Ascii.cpp" + "${LIBRARY_DIR}/Foundation/src/ASCIIEncoding.cpp" + "${LIBRARY_DIR}/Foundation/src/AsyncChannel.cpp" + "${LIBRARY_DIR}/Foundation/src/AtomicCounter.cpp" + "${LIBRARY_DIR}/Foundation/src/Base32Decoder.cpp" + "${LIBRARY_DIR}/Foundation/src/Base32Encoder.cpp" + "${LIBRARY_DIR}/Foundation/src/Base64Decoder.cpp" + "${LIBRARY_DIR}/Foundation/src/Base64Encoder.cpp" + "${LIBRARY_DIR}/Foundation/src/BinaryReader.cpp" + "${LIBRARY_DIR}/Foundation/src/BinaryWriter.cpp" + "${LIBRARY_DIR}/Foundation/src/Bugcheck.cpp" + "${LIBRARY_DIR}/Foundation/src/ByteOrder.cpp" + "${LIBRARY_DIR}/Foundation/src/Channel.cpp" + "${LIBRARY_DIR}/Foundation/src/Checksum.cpp" + "${LIBRARY_DIR}/Foundation/src/Clock.cpp" + "${LIBRARY_DIR}/Foundation/src/CompressedLogFile.cpp" + "${LIBRARY_DIR}/Foundation/src/Condition.cpp" + "${LIBRARY_DIR}/Foundation/src/Configurable.cpp" + "${LIBRARY_DIR}/Foundation/src/ConsoleChannel.cpp" + "${LIBRARY_DIR}/Foundation/src/CountingStream.cpp" + "${LIBRARY_DIR}/Foundation/src/DateTime.cpp" + "${LIBRARY_DIR}/Foundation/src/DateTimeFormat.cpp" + "${LIBRARY_DIR}/Foundation/src/DateTimeFormatter.cpp" + "${LIBRARY_DIR}/Foundation/src/DateTimeParser.cpp" + "${LIBRARY_DIR}/Foundation/src/Debugger.cpp" + "${LIBRARY_DIR}/Foundation/src/DeflatingStream.cpp" + "${LIBRARY_DIR}/Foundation/src/DigestEngine.cpp" + "${LIBRARY_DIR}/Foundation/src/DigestStream.cpp" + "${LIBRARY_DIR}/Foundation/src/DirectoryIterator.cpp" + "${LIBRARY_DIR}/Foundation/src/DirectoryIteratorStrategy.cpp" + "${LIBRARY_DIR}/Foundation/src/DirectoryWatcher.cpp" + "${LIBRARY_DIR}/Foundation/src/Environment.cpp" + "${LIBRARY_DIR}/Foundation/src/Error.cpp" + "${LIBRARY_DIR}/Foundation/src/ErrorHandler.cpp" + "${LIBRARY_DIR}/Foundation/src/Event.cpp" + "${LIBRARY_DIR}/Foundation/src/EventArgs.cpp" + "${LIBRARY_DIR}/Foundation/src/EventChannel.cpp" + "${LIBRARY_DIR}/Foundation/src/Exception.cpp" + "${LIBRARY_DIR}/Foundation/src/FIFOBufferStream.cpp" + "${LIBRARY_DIR}/Foundation/src/File.cpp" + "${LIBRARY_DIR}/Foundation/src/FileChannel.cpp" + "${LIBRARY_DIR}/Foundation/src/FileStream.cpp" + "${LIBRARY_DIR}/Foundation/src/FileStreamFactory.cpp" + "${LIBRARY_DIR}/Foundation/src/Format.cpp" + "${LIBRARY_DIR}/Foundation/src/Formatter.cpp" + "${LIBRARY_DIR}/Foundation/src/FormattingChannel.cpp" + "${LIBRARY_DIR}/Foundation/src/FPEnvironment.cpp" + "${LIBRARY_DIR}/Foundation/src/Glob.cpp" + "${LIBRARY_DIR}/Foundation/src/Hash.cpp" + "${LIBRARY_DIR}/Foundation/src/HashStatistic.cpp" + "${LIBRARY_DIR}/Foundation/src/HexBinaryDecoder.cpp" + "${LIBRARY_DIR}/Foundation/src/HexBinaryEncoder.cpp" + "${LIBRARY_DIR}/Foundation/src/InflatingStream.cpp" + "${LIBRARY_DIR}/Foundation/src/JSONString.cpp" + "${LIBRARY_DIR}/Foundation/src/Latin1Encoding.cpp" + "${LIBRARY_DIR}/Foundation/src/Latin2Encoding.cpp" + "${LIBRARY_DIR}/Foundation/src/Latin9Encoding.cpp" + "${LIBRARY_DIR}/Foundation/src/LineEndingConverter.cpp" + "${LIBRARY_DIR}/Foundation/src/LocalDateTime.cpp" + "${LIBRARY_DIR}/Foundation/src/LogFile.cpp" + "${LIBRARY_DIR}/Foundation/src/Logger.cpp" + "${LIBRARY_DIR}/Foundation/src/LoggingFactory.cpp" + "${LIBRARY_DIR}/Foundation/src/LoggingRegistry.cpp" + "${LIBRARY_DIR}/Foundation/src/LogStream.cpp" + "${LIBRARY_DIR}/Foundation/src/Manifest.cpp" + "${LIBRARY_DIR}/Foundation/src/MD4Engine.cpp" + "${LIBRARY_DIR}/Foundation/src/MD5Engine.cpp" + "${LIBRARY_DIR}/Foundation/src/MemoryPool.cpp" + "${LIBRARY_DIR}/Foundation/src/MemoryStream.cpp" + "${LIBRARY_DIR}/Foundation/src/Message.cpp" + "${LIBRARY_DIR}/Foundation/src/Mutex.cpp" + "${LIBRARY_DIR}/Foundation/src/NamedEvent.cpp" + "${LIBRARY_DIR}/Foundation/src/NamedMutex.cpp" + "${LIBRARY_DIR}/Foundation/src/NestedDiagnosticContext.cpp" + "${LIBRARY_DIR}/Foundation/src/Notification.cpp" + "${LIBRARY_DIR}/Foundation/src/NotificationCenter.cpp" + "${LIBRARY_DIR}/Foundation/src/NotificationQueue.cpp" + "${LIBRARY_DIR}/Foundation/src/NullChannel.cpp" + "${LIBRARY_DIR}/Foundation/src/NullStream.cpp" + "${LIBRARY_DIR}/Foundation/src/NumberFormatter.cpp" + "${LIBRARY_DIR}/Foundation/src/NumberParser.cpp" + "${LIBRARY_DIR}/Foundation/src/NumericString.cpp" + "${LIBRARY_DIR}/Foundation/src/Path.cpp" + "${LIBRARY_DIR}/Foundation/src/PatternFormatter.cpp" + "${LIBRARY_DIR}/Foundation/src/Pipe.cpp" + "${LIBRARY_DIR}/Foundation/src/PipeImpl.cpp" + "${LIBRARY_DIR}/Foundation/src/PipeStream.cpp" + "${LIBRARY_DIR}/Foundation/src/PriorityNotificationQueue.cpp" + "${LIBRARY_DIR}/Foundation/src/Process.cpp" + "${LIBRARY_DIR}/Foundation/src/PurgeStrategy.cpp" + "${LIBRARY_DIR}/Foundation/src/Random.cpp" + "${LIBRARY_DIR}/Foundation/src/RandomStream.cpp" + "${LIBRARY_DIR}/Foundation/src/RefCountedObject.cpp" + "${LIBRARY_DIR}/Foundation/src/RegularExpression.cpp" + "${LIBRARY_DIR}/Foundation/src/RotateStrategy.cpp" + "${LIBRARY_DIR}/Foundation/src/Runnable.cpp" + "${LIBRARY_DIR}/Foundation/src/RWLock.cpp" + "${LIBRARY_DIR}/Foundation/src/Semaphore.cpp" + "${LIBRARY_DIR}/Foundation/src/SHA1Engine.cpp" + "${LIBRARY_DIR}/Foundation/src/SharedLibrary.cpp" + "${LIBRARY_DIR}/Foundation/src/SharedMemory.cpp" + "${LIBRARY_DIR}/Foundation/src/SignalHandler.cpp" + "${LIBRARY_DIR}/Foundation/src/SimpleFileChannel.cpp" + "${LIBRARY_DIR}/Foundation/src/SortedDirectoryIterator.cpp" + "${LIBRARY_DIR}/Foundation/src/SplitterChannel.cpp" + "${LIBRARY_DIR}/Foundation/src/Stopwatch.cpp" + "${LIBRARY_DIR}/Foundation/src/StreamChannel.cpp" + "${LIBRARY_DIR}/Foundation/src/StreamConverter.cpp" + "${LIBRARY_DIR}/Foundation/src/StreamCopier.cpp" + "${LIBRARY_DIR}/Foundation/src/StreamTokenizer.cpp" + "${LIBRARY_DIR}/Foundation/src/String.cpp" + "${LIBRARY_DIR}/Foundation/src/StringTokenizer.cpp" + "${LIBRARY_DIR}/Foundation/src/SynchronizedObject.cpp" + "${LIBRARY_DIR}/Foundation/src/SyslogChannel.cpp" + "${LIBRARY_DIR}/Foundation/src/Task.cpp" + "${LIBRARY_DIR}/Foundation/src/TaskManager.cpp" + "${LIBRARY_DIR}/Foundation/src/TaskNotification.cpp" + "${LIBRARY_DIR}/Foundation/src/TeeStream.cpp" + "${LIBRARY_DIR}/Foundation/src/TemporaryFile.cpp" + "${LIBRARY_DIR}/Foundation/src/TextBufferIterator.cpp" + "${LIBRARY_DIR}/Foundation/src/TextConverter.cpp" + "${LIBRARY_DIR}/Foundation/src/TextEncoding.cpp" + "${LIBRARY_DIR}/Foundation/src/TextIterator.cpp" + "${LIBRARY_DIR}/Foundation/src/Thread.cpp" + "${LIBRARY_DIR}/Foundation/src/ThreadLocal.cpp" + "${LIBRARY_DIR}/Foundation/src/ThreadPool.cpp" + "${LIBRARY_DIR}/Foundation/src/ThreadTarget.cpp" + "${LIBRARY_DIR}/Foundation/src/TimedNotificationQueue.cpp" + "${LIBRARY_DIR}/Foundation/src/Timer.cpp" + "${LIBRARY_DIR}/Foundation/src/Timespan.cpp" + "${LIBRARY_DIR}/Foundation/src/Timestamp.cpp" + "${LIBRARY_DIR}/Foundation/src/Timezone.cpp" + "${LIBRARY_DIR}/Foundation/src/Token.cpp" + "${LIBRARY_DIR}/Foundation/src/Unicode.cpp" + "${LIBRARY_DIR}/Foundation/src/UnicodeConverter.cpp" + "${LIBRARY_DIR}/Foundation/src/URI.cpp" + "${LIBRARY_DIR}/Foundation/src/URIStreamFactory.cpp" + "${LIBRARY_DIR}/Foundation/src/URIStreamOpener.cpp" + "${LIBRARY_DIR}/Foundation/src/UTF16Encoding.cpp" + "${LIBRARY_DIR}/Foundation/src/UTF32Encoding.cpp" + "${LIBRARY_DIR}/Foundation/src/UTF8Encoding.cpp" + "${LIBRARY_DIR}/Foundation/src/UTF8String.cpp" + "${LIBRARY_DIR}/Foundation/src/UUID.cpp" + "${LIBRARY_DIR}/Foundation/src/UUIDGenerator.cpp" + "${LIBRARY_DIR}/Foundation/src/Var.cpp" + "${LIBRARY_DIR}/Foundation/src/VarHolder.cpp" + "${LIBRARY_DIR}/Foundation/src/VarIterator.cpp" + "${LIBRARY_DIR}/Foundation/src/Void.cpp" + "${LIBRARY_DIR}/Foundation/src/Windows1250Encoding.cpp" + "${LIBRARY_DIR}/Foundation/src/Windows1251Encoding.cpp" + "${LIBRARY_DIR}/Foundation/src/Windows1252Encoding.cpp" +) - add_library (_poco_foundation ${SRCS}) - add_library (Poco::Foundation ALIAS _poco_foundation) +add_library (_poco_foundation ${SRCS}) +add_library (Poco::Foundation ALIAS _poco_foundation) - if (COMPILER_GCC) - target_compile_options (_poco_foundation - PRIVATE - -Wno-suggest-override - ) - elseif (COMPILER_CLANG) - target_compile_options (_poco_foundation - PRIVATE - -Wno-atomic-implicit-seq-cst - -Wno-deprecated - -Wno-extra-semi-stmt - -Wno-zero-as-null-pointer-constant - -Wno-implicit-int-float-conversion - -Wno-thread-safety-analysis - -Wno-thread-safety-negative - ) - endif () +if (COMPILER_GCC) target_compile_options (_poco_foundation PRIVATE - -Wno-sign-compare - -Wno-unused-parameter + -Wno-suggest-override ) - target_compile_definitions (_poco_foundation +elseif (COMPILER_CLANG) + target_compile_options (_poco_foundation PRIVATE - POCO_UNBUNDLED - POCO_UNBUNDLED_ZLIB - PUBLIC - POCO_ENABLE_CPP11 - POCO_OS_FAMILY_UNIX + -Wno-atomic-implicit-seq-cst + -Wno-deprecated + -Wno-extra-semi-stmt + -Wno-zero-as-null-pointer-constant + -Wno-implicit-int-float-conversion + -Wno-thread-safety-analysis + -Wno-thread-safety-negative ) - target_include_directories (_poco_foundation SYSTEM PUBLIC "${LIBRARY_DIR}/Foundation/include") - target_link_libraries (_poco_foundation PRIVATE Poco::Foundation::PCRE ${ZLIB_LIBRARIES} lz4) -else () - add_library (Poco::Foundation UNKNOWN IMPORTED GLOBAL) - - find_library (LIBRARY_POCO_FOUNDATION PocoFoundation) - find_path (INCLUDE_POCO_FOUNDATION Poco/Foundation.h) - set_target_properties (Poco::Foundation PROPERTIES IMPORTED_LOCATION ${LIBRARY_POCO_FOUNDATION}) - set_target_properties (Poco::Foundation PROPERTIES INTERFACE_INCLUDE_DIRECTORIES ${INCLUDE_POCO_FOUNDATION}) - - message (STATUS "Using Poco::Foundation: ${LIBRARY_POCO_FOUNDATION} ${INCLUDE_POCO_FOUNDATION}") endif () +target_compile_options (_poco_foundation + PRIVATE + -Wno-sign-compare + -Wno-unused-parameter +) +target_compile_definitions (_poco_foundation + PRIVATE + POCO_UNBUNDLED + POCO_UNBUNDLED_ZLIB + PUBLIC + POCO_ENABLE_CPP11 + POCO_OS_FAMILY_UNIX +) +target_include_directories (_poco_foundation SYSTEM PUBLIC "${LIBRARY_DIR}/Foundation/include") +target_link_libraries (_poco_foundation + PRIVATE + Poco::Foundation::PCRE + ch_contrib::zlib + ch_contrib::lz4) if(OS_DARWIN AND ARCH_AARCH64) target_compile_definitions (_poco_foundation diff --git a/contrib/poco-cmake/JSON/CMakeLists.txt b/contrib/poco-cmake/JSON/CMakeLists.txt index 7033b800d5d..e138dd046a8 100644 --- a/contrib/poco-cmake/JSON/CMakeLists.txt +++ b/contrib/poco-cmake/JSON/CMakeLists.txt @@ -1,42 +1,31 @@ -if (USE_INTERNAL_POCO_LIBRARY) - # Poco::JSON (pdjson) +# Poco::JSON (pdjson) - set (SRCS_PDJSON - "${LIBRARY_DIR}/JSON/src/pdjson.c" - ) +set (SRCS_PDJSON + "${LIBRARY_DIR}/JSON/src/pdjson.c" +) - add_library (_poco_json_pdjson ${SRCS_PDJSON}) - add_library (Poco::JSON::Pdjson ALIAS _poco_json_pdjson) +add_library (_poco_json_pdjson ${SRCS_PDJSON}) +add_library (Poco::JSON::Pdjson ALIAS _poco_json_pdjson) - # Poco::JSON +# Poco::JSON - set (SRCS - "${LIBRARY_DIR}/JSON/src/Array.cpp" - "${LIBRARY_DIR}/JSON/src/Handler.cpp" - "${LIBRARY_DIR}/JSON/src/JSONException.cpp" - "${LIBRARY_DIR}/JSON/src/Object.cpp" - "${LIBRARY_DIR}/JSON/src/ParseHandler.cpp" - "${LIBRARY_DIR}/JSON/src/Parser.cpp" - "${LIBRARY_DIR}/JSON/src/ParserImpl.cpp" - "${LIBRARY_DIR}/JSON/src/PrintHandler.cpp" - "${LIBRARY_DIR}/JSON/src/Query.cpp" - "${LIBRARY_DIR}/JSON/src/Stringifier.cpp" - "${LIBRARY_DIR}/JSON/src/Template.cpp" - "${LIBRARY_DIR}/JSON/src/TemplateCache.cpp" - ) +set (SRCS + "${LIBRARY_DIR}/JSON/src/Array.cpp" + "${LIBRARY_DIR}/JSON/src/Handler.cpp" + "${LIBRARY_DIR}/JSON/src/JSONException.cpp" + "${LIBRARY_DIR}/JSON/src/Object.cpp" + "${LIBRARY_DIR}/JSON/src/ParseHandler.cpp" + "${LIBRARY_DIR}/JSON/src/Parser.cpp" + "${LIBRARY_DIR}/JSON/src/ParserImpl.cpp" + "${LIBRARY_DIR}/JSON/src/PrintHandler.cpp" + "${LIBRARY_DIR}/JSON/src/Query.cpp" + "${LIBRARY_DIR}/JSON/src/Stringifier.cpp" + "${LIBRARY_DIR}/JSON/src/Template.cpp" + "${LIBRARY_DIR}/JSON/src/TemplateCache.cpp" +) - add_library (_poco_json ${SRCS}) - add_library (Poco::JSON ALIAS _poco_json) +add_library (_poco_json ${SRCS}) +add_library (Poco::JSON ALIAS _poco_json) - target_include_directories (_poco_json SYSTEM PUBLIC "${LIBRARY_DIR}/JSON/include") - target_link_libraries (_poco_json PUBLIC Poco::Foundation Poco::JSON::Pdjson) -else () - add_library (Poco::JSON UNKNOWN IMPORTED GLOBAL) - - find_library (LIBRARY_POCO_JSON PocoJSON) - find_path (INCLUDE_POCO_JSON Poco/JSON/JSON.h) - set_target_properties (Poco::JSON PROPERTIES IMPORTED_LOCATION ${LIBRARY_POCO_JSON}) - set_target_properties (Poco::JSON PROPERTIES INTERFACE_INCLUDE_DIRECTORIES ${INCLUDE_POCO_JSON}) - - message (STATUS "Using Poco::JSON: ${LIBRARY_POCO_JSON} ${INCLUDE_POCO_JSON}") -endif () +target_include_directories (_poco_json SYSTEM PUBLIC "${LIBRARY_DIR}/JSON/include") +target_link_libraries (_poco_json PUBLIC Poco::Foundation Poco::JSON::Pdjson) diff --git a/contrib/poco-cmake/MongoDB/CMakeLists.txt b/contrib/poco-cmake/MongoDB/CMakeLists.txt index e3dce7ac5cd..fec256b4dcd 100644 --- a/contrib/poco-cmake/MongoDB/CMakeLists.txt +++ b/contrib/poco-cmake/MongoDB/CMakeLists.txt @@ -1,40 +1,29 @@ -if (USE_INTERNAL_POCO_LIBRARY) - set (SRCS - "${LIBRARY_DIR}/MongoDB/src/Array.cpp" - "${LIBRARY_DIR}/MongoDB/src/Binary.cpp" - "${LIBRARY_DIR}/MongoDB/src/Connection.cpp" - "${LIBRARY_DIR}/MongoDB/src/Cursor.cpp" - "${LIBRARY_DIR}/MongoDB/src/Database.cpp" - "${LIBRARY_DIR}/MongoDB/src/DeleteRequest.cpp" - "${LIBRARY_DIR}/MongoDB/src/Document.cpp" - "${LIBRARY_DIR}/MongoDB/src/Element.cpp" - "${LIBRARY_DIR}/MongoDB/src/GetMoreRequest.cpp" - "${LIBRARY_DIR}/MongoDB/src/InsertRequest.cpp" - "${LIBRARY_DIR}/MongoDB/src/JavaScriptCode.cpp" - "${LIBRARY_DIR}/MongoDB/src/KillCursorsRequest.cpp" - "${LIBRARY_DIR}/MongoDB/src/Message.cpp" - "${LIBRARY_DIR}/MongoDB/src/MessageHeader.cpp" - "${LIBRARY_DIR}/MongoDB/src/ObjectId.cpp" - "${LIBRARY_DIR}/MongoDB/src/QueryRequest.cpp" - "${LIBRARY_DIR}/MongoDB/src/RegularExpression.cpp" - "${LIBRARY_DIR}/MongoDB/src/ReplicaSet.cpp" - "${LIBRARY_DIR}/MongoDB/src/RequestMessage.cpp" - "${LIBRARY_DIR}/MongoDB/src/ResponseMessage.cpp" - "${LIBRARY_DIR}/MongoDB/src/UpdateRequest.cpp" - ) +set (SRCS + "${LIBRARY_DIR}/MongoDB/src/Array.cpp" + "${LIBRARY_DIR}/MongoDB/src/Binary.cpp" + "${LIBRARY_DIR}/MongoDB/src/Connection.cpp" + "${LIBRARY_DIR}/MongoDB/src/Cursor.cpp" + "${LIBRARY_DIR}/MongoDB/src/Database.cpp" + "${LIBRARY_DIR}/MongoDB/src/DeleteRequest.cpp" + "${LIBRARY_DIR}/MongoDB/src/Document.cpp" + "${LIBRARY_DIR}/MongoDB/src/Element.cpp" + "${LIBRARY_DIR}/MongoDB/src/GetMoreRequest.cpp" + "${LIBRARY_DIR}/MongoDB/src/InsertRequest.cpp" + "${LIBRARY_DIR}/MongoDB/src/JavaScriptCode.cpp" + "${LIBRARY_DIR}/MongoDB/src/KillCursorsRequest.cpp" + "${LIBRARY_DIR}/MongoDB/src/Message.cpp" + "${LIBRARY_DIR}/MongoDB/src/MessageHeader.cpp" + "${LIBRARY_DIR}/MongoDB/src/ObjectId.cpp" + "${LIBRARY_DIR}/MongoDB/src/QueryRequest.cpp" + "${LIBRARY_DIR}/MongoDB/src/RegularExpression.cpp" + "${LIBRARY_DIR}/MongoDB/src/ReplicaSet.cpp" + "${LIBRARY_DIR}/MongoDB/src/RequestMessage.cpp" + "${LIBRARY_DIR}/MongoDB/src/ResponseMessage.cpp" + "${LIBRARY_DIR}/MongoDB/src/UpdateRequest.cpp" +) - add_library (_poco_mongodb ${SRCS}) - add_library (Poco::MongoDB ALIAS _poco_mongodb) +add_library (_poco_mongodb ${SRCS}) +add_library (Poco::MongoDB ALIAS _poco_mongodb) - target_include_directories (_poco_mongodb SYSTEM PUBLIC "${LIBRARY_DIR}/MongoDB/include") - target_link_libraries (_poco_mongodb PUBLIC Poco::Net) -else () - add_library (Poco::MongoDB UNKNOWN IMPORTED GLOBAL) - - find_library (LIBRARY_POCO_MONGODB PocoMongoDB) - find_path (INCLUDE_POCO_MONGODB Poco/MongoDB/MongoDB.h) - set_target_properties (Poco::MongoDB PROPERTIES IMPORTED_LOCATION ${LIBRARY_POCO_MONGODB}) - set_target_properties (Poco::MongoDB PROPERTIES INTERFACE_INCLUDE_DIRECTORIES ${INCLUDE_POCO_MONGODB}) - - message (STATUS "Using Poco::MongoDB: ${LIBRARY_POCO_MONGODB} ${INCLUDE_POCO_MONGODB}") -endif () +target_include_directories (_poco_mongodb SYSTEM PUBLIC "${LIBRARY_DIR}/MongoDB/include") +target_link_libraries (_poco_mongodb PUBLIC Poco::Net) diff --git a/contrib/poco-cmake/Net/CMakeLists.txt b/contrib/poco-cmake/Net/CMakeLists.txt index 45989af8d45..30ff799ccfc 100644 --- a/contrib/poco-cmake/Net/CMakeLists.txt +++ b/contrib/poco-cmake/Net/CMakeLists.txt @@ -1,139 +1,128 @@ -if (USE_INTERNAL_POCO_LIBRARY) - set (SRCS - "${LIBRARY_DIR}/Net/src/AbstractHTTPRequestHandler.cpp" - "${LIBRARY_DIR}/Net/src/DatagramSocket.cpp" - "${LIBRARY_DIR}/Net/src/DatagramSocketImpl.cpp" - "${LIBRARY_DIR}/Net/src/DialogSocket.cpp" - "${LIBRARY_DIR}/Net/src/DNS.cpp" - "${LIBRARY_DIR}/Net/src/FilePartSource.cpp" - "${LIBRARY_DIR}/Net/src/FTPClientSession.cpp" - "${LIBRARY_DIR}/Net/src/FTPStreamFactory.cpp" - "${LIBRARY_DIR}/Net/src/HostEntry.cpp" - "${LIBRARY_DIR}/Net/src/HTMLForm.cpp" - "${LIBRARY_DIR}/Net/src/HTTPAuthenticationParams.cpp" - "${LIBRARY_DIR}/Net/src/HTTPBasicCredentials.cpp" - "${LIBRARY_DIR}/Net/src/HTTPBufferAllocator.cpp" - "${LIBRARY_DIR}/Net/src/HTTPChunkedStream.cpp" - "${LIBRARY_DIR}/Net/src/HTTPClientSession.cpp" - "${LIBRARY_DIR}/Net/src/HTTPCookie.cpp" - "${LIBRARY_DIR}/Net/src/HTTPCredentials.cpp" - "${LIBRARY_DIR}/Net/src/HTTPDigestCredentials.cpp" - "${LIBRARY_DIR}/Net/src/HTTPFixedLengthStream.cpp" - "${LIBRARY_DIR}/Net/src/HTTPHeaderStream.cpp" - "${LIBRARY_DIR}/Net/src/HTTPIOStream.cpp" - "${LIBRARY_DIR}/Net/src/HTTPMessage.cpp" - "${LIBRARY_DIR}/Net/src/HTTPRequest.cpp" - "${LIBRARY_DIR}/Net/src/HTTPRequestHandler.cpp" - "${LIBRARY_DIR}/Net/src/HTTPRequestHandlerFactory.cpp" - "${LIBRARY_DIR}/Net/src/HTTPResponse.cpp" - "${LIBRARY_DIR}/Net/src/HTTPServer.cpp" - "${LIBRARY_DIR}/Net/src/HTTPServerConnection.cpp" - "${LIBRARY_DIR}/Net/src/HTTPServerConnectionFactory.cpp" - "${LIBRARY_DIR}/Net/src/HTTPServerParams.cpp" - "${LIBRARY_DIR}/Net/src/HTTPServerRequest.cpp" - "${LIBRARY_DIR}/Net/src/HTTPServerRequestImpl.cpp" - "${LIBRARY_DIR}/Net/src/HTTPServerResponse.cpp" - "${LIBRARY_DIR}/Net/src/HTTPServerResponseImpl.cpp" - "${LIBRARY_DIR}/Net/src/HTTPServerSession.cpp" - "${LIBRARY_DIR}/Net/src/HTTPSession.cpp" - "${LIBRARY_DIR}/Net/src/HTTPSessionFactory.cpp" - "${LIBRARY_DIR}/Net/src/HTTPSessionInstantiator.cpp" - "${LIBRARY_DIR}/Net/src/HTTPStream.cpp" - "${LIBRARY_DIR}/Net/src/HTTPStreamFactory.cpp" - "${LIBRARY_DIR}/Net/src/ICMPClient.cpp" - "${LIBRARY_DIR}/Net/src/ICMPEventArgs.cpp" - "${LIBRARY_DIR}/Net/src/ICMPPacket.cpp" - "${LIBRARY_DIR}/Net/src/ICMPPacketImpl.cpp" - "${LIBRARY_DIR}/Net/src/ICMPSocket.cpp" - "${LIBRARY_DIR}/Net/src/ICMPSocketImpl.cpp" - "${LIBRARY_DIR}/Net/src/ICMPv4PacketImpl.cpp" - "${LIBRARY_DIR}/Net/src/IPAddress.cpp" - "${LIBRARY_DIR}/Net/src/IPAddressImpl.cpp" - "${LIBRARY_DIR}/Net/src/MailMessage.cpp" - "${LIBRARY_DIR}/Net/src/MailRecipient.cpp" - "${LIBRARY_DIR}/Net/src/MailStream.cpp" - "${LIBRARY_DIR}/Net/src/MediaType.cpp" - "${LIBRARY_DIR}/Net/src/MessageHeader.cpp" - "${LIBRARY_DIR}/Net/src/MulticastSocket.cpp" - "${LIBRARY_DIR}/Net/src/MultipartReader.cpp" - "${LIBRARY_DIR}/Net/src/MultipartWriter.cpp" - "${LIBRARY_DIR}/Net/src/NameValueCollection.cpp" - "${LIBRARY_DIR}/Net/src/Net.cpp" - "${LIBRARY_DIR}/Net/src/NetException.cpp" - "${LIBRARY_DIR}/Net/src/NetworkInterface.cpp" - "${LIBRARY_DIR}/Net/src/NTPClient.cpp" - "${LIBRARY_DIR}/Net/src/NTPEventArgs.cpp" - "${LIBRARY_DIR}/Net/src/NTPPacket.cpp" - "${LIBRARY_DIR}/Net/src/NullPartHandler.cpp" - "${LIBRARY_DIR}/Net/src/OAuth10Credentials.cpp" - "${LIBRARY_DIR}/Net/src/OAuth20Credentials.cpp" - "${LIBRARY_DIR}/Net/src/PartHandler.cpp" - "${LIBRARY_DIR}/Net/src/PartSource.cpp" - "${LIBRARY_DIR}/Net/src/PartStore.cpp" - "${LIBRARY_DIR}/Net/src/PollSet.cpp" - "${LIBRARY_DIR}/Net/src/POP3ClientSession.cpp" - "${LIBRARY_DIR}/Net/src/QuotedPrintableDecoder.cpp" - "${LIBRARY_DIR}/Net/src/QuotedPrintableEncoder.cpp" - "${LIBRARY_DIR}/Net/src/RawSocket.cpp" - "${LIBRARY_DIR}/Net/src/RawSocketImpl.cpp" - "${LIBRARY_DIR}/Net/src/RemoteSyslogChannel.cpp" - "${LIBRARY_DIR}/Net/src/RemoteSyslogListener.cpp" - "${LIBRARY_DIR}/Net/src/ServerSocket.cpp" - "${LIBRARY_DIR}/Net/src/ServerSocketImpl.cpp" - "${LIBRARY_DIR}/Net/src/SMTPChannel.cpp" - "${LIBRARY_DIR}/Net/src/SMTPClientSession.cpp" - "${LIBRARY_DIR}/Net/src/Socket.cpp" - "${LIBRARY_DIR}/Net/src/SocketAddress.cpp" - "${LIBRARY_DIR}/Net/src/SocketAddressImpl.cpp" - "${LIBRARY_DIR}/Net/src/SocketImpl.cpp" - "${LIBRARY_DIR}/Net/src/SocketNotification.cpp" - "${LIBRARY_DIR}/Net/src/SocketNotifier.cpp" - "${LIBRARY_DIR}/Net/src/SocketReactor.cpp" - "${LIBRARY_DIR}/Net/src/SocketStream.cpp" - "${LIBRARY_DIR}/Net/src/StreamSocket.cpp" - "${LIBRARY_DIR}/Net/src/StreamSocketImpl.cpp" - "${LIBRARY_DIR}/Net/src/StringPartSource.cpp" - "${LIBRARY_DIR}/Net/src/TCPServer.cpp" - "${LIBRARY_DIR}/Net/src/TCPServerConnection.cpp" - "${LIBRARY_DIR}/Net/src/TCPServerConnectionFactory.cpp" - "${LIBRARY_DIR}/Net/src/TCPServerDispatcher.cpp" - "${LIBRARY_DIR}/Net/src/TCPServerParams.cpp" - "${LIBRARY_DIR}/Net/src/WebSocket.cpp" - "${LIBRARY_DIR}/Net/src/WebSocketImpl.cpp" - ) +set (SRCS + "${LIBRARY_DIR}/Net/src/AbstractHTTPRequestHandler.cpp" + "${LIBRARY_DIR}/Net/src/DatagramSocket.cpp" + "${LIBRARY_DIR}/Net/src/DatagramSocketImpl.cpp" + "${LIBRARY_DIR}/Net/src/DialogSocket.cpp" + "${LIBRARY_DIR}/Net/src/DNS.cpp" + "${LIBRARY_DIR}/Net/src/FilePartSource.cpp" + "${LIBRARY_DIR}/Net/src/FTPClientSession.cpp" + "${LIBRARY_DIR}/Net/src/FTPStreamFactory.cpp" + "${LIBRARY_DIR}/Net/src/HostEntry.cpp" + "${LIBRARY_DIR}/Net/src/HTMLForm.cpp" + "${LIBRARY_DIR}/Net/src/HTTPAuthenticationParams.cpp" + "${LIBRARY_DIR}/Net/src/HTTPBasicCredentials.cpp" + "${LIBRARY_DIR}/Net/src/HTTPBufferAllocator.cpp" + "${LIBRARY_DIR}/Net/src/HTTPChunkedStream.cpp" + "${LIBRARY_DIR}/Net/src/HTTPClientSession.cpp" + "${LIBRARY_DIR}/Net/src/HTTPCookie.cpp" + "${LIBRARY_DIR}/Net/src/HTTPCredentials.cpp" + "${LIBRARY_DIR}/Net/src/HTTPDigestCredentials.cpp" + "${LIBRARY_DIR}/Net/src/HTTPFixedLengthStream.cpp" + "${LIBRARY_DIR}/Net/src/HTTPHeaderStream.cpp" + "${LIBRARY_DIR}/Net/src/HTTPIOStream.cpp" + "${LIBRARY_DIR}/Net/src/HTTPMessage.cpp" + "${LIBRARY_DIR}/Net/src/HTTPRequest.cpp" + "${LIBRARY_DIR}/Net/src/HTTPRequestHandler.cpp" + "${LIBRARY_DIR}/Net/src/HTTPRequestHandlerFactory.cpp" + "${LIBRARY_DIR}/Net/src/HTTPResponse.cpp" + "${LIBRARY_DIR}/Net/src/HTTPServer.cpp" + "${LIBRARY_DIR}/Net/src/HTTPServerConnection.cpp" + "${LIBRARY_DIR}/Net/src/HTTPServerConnectionFactory.cpp" + "${LIBRARY_DIR}/Net/src/HTTPServerParams.cpp" + "${LIBRARY_DIR}/Net/src/HTTPServerRequest.cpp" + "${LIBRARY_DIR}/Net/src/HTTPServerRequestImpl.cpp" + "${LIBRARY_DIR}/Net/src/HTTPServerResponse.cpp" + "${LIBRARY_DIR}/Net/src/HTTPServerResponseImpl.cpp" + "${LIBRARY_DIR}/Net/src/HTTPServerSession.cpp" + "${LIBRARY_DIR}/Net/src/HTTPSession.cpp" + "${LIBRARY_DIR}/Net/src/HTTPSessionFactory.cpp" + "${LIBRARY_DIR}/Net/src/HTTPSessionInstantiator.cpp" + "${LIBRARY_DIR}/Net/src/HTTPStream.cpp" + "${LIBRARY_DIR}/Net/src/HTTPStreamFactory.cpp" + "${LIBRARY_DIR}/Net/src/ICMPClient.cpp" + "${LIBRARY_DIR}/Net/src/ICMPEventArgs.cpp" + "${LIBRARY_DIR}/Net/src/ICMPPacket.cpp" + "${LIBRARY_DIR}/Net/src/ICMPPacketImpl.cpp" + "${LIBRARY_DIR}/Net/src/ICMPSocket.cpp" + "${LIBRARY_DIR}/Net/src/ICMPSocketImpl.cpp" + "${LIBRARY_DIR}/Net/src/ICMPv4PacketImpl.cpp" + "${LIBRARY_DIR}/Net/src/IPAddress.cpp" + "${LIBRARY_DIR}/Net/src/IPAddressImpl.cpp" + "${LIBRARY_DIR}/Net/src/MailMessage.cpp" + "${LIBRARY_DIR}/Net/src/MailRecipient.cpp" + "${LIBRARY_DIR}/Net/src/MailStream.cpp" + "${LIBRARY_DIR}/Net/src/MediaType.cpp" + "${LIBRARY_DIR}/Net/src/MessageHeader.cpp" + "${LIBRARY_DIR}/Net/src/MulticastSocket.cpp" + "${LIBRARY_DIR}/Net/src/MultipartReader.cpp" + "${LIBRARY_DIR}/Net/src/MultipartWriter.cpp" + "${LIBRARY_DIR}/Net/src/NameValueCollection.cpp" + "${LIBRARY_DIR}/Net/src/Net.cpp" + "${LIBRARY_DIR}/Net/src/NetException.cpp" + "${LIBRARY_DIR}/Net/src/NetworkInterface.cpp" + "${LIBRARY_DIR}/Net/src/NTPClient.cpp" + "${LIBRARY_DIR}/Net/src/NTPEventArgs.cpp" + "${LIBRARY_DIR}/Net/src/NTPPacket.cpp" + "${LIBRARY_DIR}/Net/src/NullPartHandler.cpp" + "${LIBRARY_DIR}/Net/src/OAuth10Credentials.cpp" + "${LIBRARY_DIR}/Net/src/OAuth20Credentials.cpp" + "${LIBRARY_DIR}/Net/src/PartHandler.cpp" + "${LIBRARY_DIR}/Net/src/PartSource.cpp" + "${LIBRARY_DIR}/Net/src/PartStore.cpp" + "${LIBRARY_DIR}/Net/src/PollSet.cpp" + "${LIBRARY_DIR}/Net/src/POP3ClientSession.cpp" + "${LIBRARY_DIR}/Net/src/QuotedPrintableDecoder.cpp" + "${LIBRARY_DIR}/Net/src/QuotedPrintableEncoder.cpp" + "${LIBRARY_DIR}/Net/src/RawSocket.cpp" + "${LIBRARY_DIR}/Net/src/RawSocketImpl.cpp" + "${LIBRARY_DIR}/Net/src/RemoteSyslogChannel.cpp" + "${LIBRARY_DIR}/Net/src/RemoteSyslogListener.cpp" + "${LIBRARY_DIR}/Net/src/ServerSocket.cpp" + "${LIBRARY_DIR}/Net/src/ServerSocketImpl.cpp" + "${LIBRARY_DIR}/Net/src/SMTPChannel.cpp" + "${LIBRARY_DIR}/Net/src/SMTPClientSession.cpp" + "${LIBRARY_DIR}/Net/src/Socket.cpp" + "${LIBRARY_DIR}/Net/src/SocketAddress.cpp" + "${LIBRARY_DIR}/Net/src/SocketAddressImpl.cpp" + "${LIBRARY_DIR}/Net/src/SocketImpl.cpp" + "${LIBRARY_DIR}/Net/src/SocketNotification.cpp" + "${LIBRARY_DIR}/Net/src/SocketNotifier.cpp" + "${LIBRARY_DIR}/Net/src/SocketReactor.cpp" + "${LIBRARY_DIR}/Net/src/SocketStream.cpp" + "${LIBRARY_DIR}/Net/src/StreamSocket.cpp" + "${LIBRARY_DIR}/Net/src/StreamSocketImpl.cpp" + "${LIBRARY_DIR}/Net/src/StringPartSource.cpp" + "${LIBRARY_DIR}/Net/src/TCPServer.cpp" + "${LIBRARY_DIR}/Net/src/TCPServerConnection.cpp" + "${LIBRARY_DIR}/Net/src/TCPServerConnectionFactory.cpp" + "${LIBRARY_DIR}/Net/src/TCPServerDispatcher.cpp" + "${LIBRARY_DIR}/Net/src/TCPServerParams.cpp" + "${LIBRARY_DIR}/Net/src/WebSocket.cpp" + "${LIBRARY_DIR}/Net/src/WebSocketImpl.cpp" +) - add_library (_poco_net ${SRCS}) - add_library (Poco::Net ALIAS _poco_net) +add_library (_poco_net ${SRCS}) +add_library (Poco::Net ALIAS _poco_net) - if (OS_LINUX) - target_compile_definitions (_poco_net PUBLIC POCO_HAVE_FD_EPOLL) - elseif (OS_DARWIN OR OS_FREEBSD) - target_compile_definitions (_poco_net PUBLIC POCO_HAVE_FD_POLL) - endif () +if (OS_LINUX) + target_compile_definitions (_poco_net PUBLIC POCO_HAVE_FD_EPOLL) +elseif (OS_DARWIN OR OS_FREEBSD) + target_compile_definitions (_poco_net PUBLIC POCO_HAVE_FD_POLL) +endif () - if (COMPILER_CLANG) - # clang-specific warnings - target_compile_options (_poco_net - PRIVATE - -Wno-atomic-implicit-seq-cst - -Wno-extra-semi-stmt - -Wno-extra-semi - ) - endif () +if (COMPILER_CLANG) + # clang-specific warnings target_compile_options (_poco_net PRIVATE - -Wno-deprecated + -Wno-atomic-implicit-seq-cst + -Wno-extra-semi-stmt -Wno-extra-semi ) - target_include_directories (_poco_net SYSTEM PUBLIC "${LIBRARY_DIR}/Net/include") - target_link_libraries (_poco_net PUBLIC Poco::Foundation) -else () - add_library (Poco::Net UNKNOWN IMPORTED GLOBAL) - - find_library (LIBRARY_POCO_NET PocoNet) - find_path (INCLUDE_POCO_NET Poco/Net/Net.h) - set_target_properties (Poco::Net PROPERTIES IMPORTED_LOCATION ${LIBRARY_POCO_NET}) - set_target_properties (Poco::Net PROPERTIES INTERFACE_INCLUDE_DIRECTORIES ${INCLUDE_POCO_NET}) - - message (STATUS "Using Poco::Net: ${LIBRARY_POCO_NET} ${INCLUDE_POCO_NET}") endif () +target_compile_options (_poco_net + PRIVATE + -Wno-deprecated + -Wno-extra-semi +) +target_include_directories (_poco_net SYSTEM PUBLIC "${LIBRARY_DIR}/Net/include") +target_link_libraries (_poco_net PUBLIC Poco::Foundation) diff --git a/contrib/poco-cmake/Net/SSL/CMakeLists.txt b/contrib/poco-cmake/Net/SSL/CMakeLists.txt index 4b3adacfb8f..de2bb624a8b 100644 --- a/contrib/poco-cmake/Net/SSL/CMakeLists.txt +++ b/contrib/poco-cmake/Net/SSL/CMakeLists.txt @@ -1,50 +1,39 @@ if (ENABLE_SSL) - if (USE_INTERNAL_POCO_LIBRARY) - set (SRCS - "${LIBRARY_DIR}/NetSSL_OpenSSL/src/AcceptCertificateHandler.cpp" - "${LIBRARY_DIR}/NetSSL_OpenSSL/src/CertificateHandlerFactory.cpp" - "${LIBRARY_DIR}/NetSSL_OpenSSL/src/CertificateHandlerFactoryMgr.cpp" - "${LIBRARY_DIR}/NetSSL_OpenSSL/src/ConsoleCertificateHandler.cpp" - "${LIBRARY_DIR}/NetSSL_OpenSSL/src/Context.cpp" - "${LIBRARY_DIR}/NetSSL_OpenSSL/src/HTTPSClientSession.cpp" - "${LIBRARY_DIR}/NetSSL_OpenSSL/src/HTTPSSessionInstantiator.cpp" - "${LIBRARY_DIR}/NetSSL_OpenSSL/src/HTTPSStreamFactory.cpp" - "${LIBRARY_DIR}/NetSSL_OpenSSL/src/InvalidCertificateHandler.cpp" - "${LIBRARY_DIR}/NetSSL_OpenSSL/src/KeyConsoleHandler.cpp" - "${LIBRARY_DIR}/NetSSL_OpenSSL/src/KeyFileHandler.cpp" - "${LIBRARY_DIR}/NetSSL_OpenSSL/src/PrivateKeyFactory.cpp" - "${LIBRARY_DIR}/NetSSL_OpenSSL/src/PrivateKeyFactoryMgr.cpp" - "${LIBRARY_DIR}/NetSSL_OpenSSL/src/PrivateKeyPassphraseHandler.cpp" - "${LIBRARY_DIR}/NetSSL_OpenSSL/src/RejectCertificateHandler.cpp" - "${LIBRARY_DIR}/NetSSL_OpenSSL/src/SecureServerSocket.cpp" - "${LIBRARY_DIR}/NetSSL_OpenSSL/src/SecureServerSocketImpl.cpp" - "${LIBRARY_DIR}/NetSSL_OpenSSL/src/SecureSMTPClientSession.cpp" - "${LIBRARY_DIR}/NetSSL_OpenSSL/src/SecureSocketImpl.cpp" - "${LIBRARY_DIR}/NetSSL_OpenSSL/src/SecureStreamSocket.cpp" - "${LIBRARY_DIR}/NetSSL_OpenSSL/src/SecureStreamSocketImpl.cpp" - "${LIBRARY_DIR}/NetSSL_OpenSSL/src/Session.cpp" - "${LIBRARY_DIR}/NetSSL_OpenSSL/src/SSLException.cpp" - "${LIBRARY_DIR}/NetSSL_OpenSSL/src/SSLManager.cpp" - "${LIBRARY_DIR}/NetSSL_OpenSSL/src/Utility.cpp" - "${LIBRARY_DIR}/NetSSL_OpenSSL/src/VerificationErrorArgs.cpp" - "${LIBRARY_DIR}/NetSSL_OpenSSL/src/X509Certificate.cpp" - ) + set (SRCS + "${LIBRARY_DIR}/NetSSL_OpenSSL/src/AcceptCertificateHandler.cpp" + "${LIBRARY_DIR}/NetSSL_OpenSSL/src/CertificateHandlerFactory.cpp" + "${LIBRARY_DIR}/NetSSL_OpenSSL/src/CertificateHandlerFactoryMgr.cpp" + "${LIBRARY_DIR}/NetSSL_OpenSSL/src/ConsoleCertificateHandler.cpp" + "${LIBRARY_DIR}/NetSSL_OpenSSL/src/Context.cpp" + "${LIBRARY_DIR}/NetSSL_OpenSSL/src/HTTPSClientSession.cpp" + "${LIBRARY_DIR}/NetSSL_OpenSSL/src/HTTPSSessionInstantiator.cpp" + "${LIBRARY_DIR}/NetSSL_OpenSSL/src/HTTPSStreamFactory.cpp" + "${LIBRARY_DIR}/NetSSL_OpenSSL/src/InvalidCertificateHandler.cpp" + "${LIBRARY_DIR}/NetSSL_OpenSSL/src/KeyConsoleHandler.cpp" + "${LIBRARY_DIR}/NetSSL_OpenSSL/src/KeyFileHandler.cpp" + "${LIBRARY_DIR}/NetSSL_OpenSSL/src/PrivateKeyFactory.cpp" + "${LIBRARY_DIR}/NetSSL_OpenSSL/src/PrivateKeyFactoryMgr.cpp" + "${LIBRARY_DIR}/NetSSL_OpenSSL/src/PrivateKeyPassphraseHandler.cpp" + "${LIBRARY_DIR}/NetSSL_OpenSSL/src/RejectCertificateHandler.cpp" + "${LIBRARY_DIR}/NetSSL_OpenSSL/src/SecureServerSocket.cpp" + "${LIBRARY_DIR}/NetSSL_OpenSSL/src/SecureServerSocketImpl.cpp" + "${LIBRARY_DIR}/NetSSL_OpenSSL/src/SecureSMTPClientSession.cpp" + "${LIBRARY_DIR}/NetSSL_OpenSSL/src/SecureSocketImpl.cpp" + "${LIBRARY_DIR}/NetSSL_OpenSSL/src/SecureStreamSocket.cpp" + "${LIBRARY_DIR}/NetSSL_OpenSSL/src/SecureStreamSocketImpl.cpp" + "${LIBRARY_DIR}/NetSSL_OpenSSL/src/Session.cpp" + "${LIBRARY_DIR}/NetSSL_OpenSSL/src/SSLException.cpp" + "${LIBRARY_DIR}/NetSSL_OpenSSL/src/SSLManager.cpp" + "${LIBRARY_DIR}/NetSSL_OpenSSL/src/Utility.cpp" + "${LIBRARY_DIR}/NetSSL_OpenSSL/src/VerificationErrorArgs.cpp" + "${LIBRARY_DIR}/NetSSL_OpenSSL/src/X509Certificate.cpp" + ) - add_library (_poco_net_ssl ${SRCS}) - add_library (Poco::Net::SSL ALIAS _poco_net_ssl) + add_library (_poco_net_ssl ${SRCS}) + add_library (Poco::Net::SSL ALIAS _poco_net_ssl) - target_include_directories (_poco_net_ssl SYSTEM PUBLIC "${LIBRARY_DIR}/NetSSL_OpenSSL/include") - target_link_libraries (_poco_net_ssl PUBLIC Poco::Crypto Poco::Net Poco::Util) - else () - add_library (Poco::Net::SSL UNKNOWN IMPORTED GLOBAL) - - find_library (LIBRARY_POCO_NET_SSL PocoNetSSL) - find_path (INCLUDE_POCO_NET_SSL Poco/Net/NetSSL.h) - set_target_properties (Poco::Net::SSL PROPERTIES IMPORTED_LOCATION ${LIBRARY_POCO_NET_SSL}) - set_target_properties (Poco::Net::SSL PROPERTIES INTERFACE_INCLUDE_DIRECTORIES ${INCLUDE_POCO_NET_SSL}) - - message (STATUS "Using Poco::Net::SSL: ${LIBRARY_POCO_NET_SSL} ${INCLUDE_POCO_NET_SSL}") - endif () + target_include_directories (_poco_net_ssl SYSTEM PUBLIC "${LIBRARY_DIR}/NetSSL_OpenSSL/include") + target_link_libraries (_poco_net_ssl PUBLIC Poco::Crypto Poco::Net Poco::Util) else () add_library (_poco_net_ssl INTERFACE) add_library (Poco::Net::SSL ALIAS _poco_net_ssl) diff --git a/contrib/poco-cmake/Redis/CMakeLists.txt b/contrib/poco-cmake/Redis/CMakeLists.txt index b5892addd85..98e86a8592b 100644 --- a/contrib/poco-cmake/Redis/CMakeLists.txt +++ b/contrib/poco-cmake/Redis/CMakeLists.txt @@ -1,34 +1,21 @@ -if (USE_INTERNAL_POCO_LIBRARY) - set (SRCS - "${LIBRARY_DIR}/Redis/src/Array.cpp" - "${LIBRARY_DIR}/Redis/src/AsyncReader.cpp" - "${LIBRARY_DIR}/Redis/src/Client.cpp" - "${LIBRARY_DIR}/Redis/src/Command.cpp" - "${LIBRARY_DIR}/Redis/src/Error.cpp" - "${LIBRARY_DIR}/Redis/src/Exception.cpp" - "${LIBRARY_DIR}/Redis/src/RedisEventArgs.cpp" - "${LIBRARY_DIR}/Redis/src/RedisStream.cpp" - "${LIBRARY_DIR}/Redis/src/Type.cpp" - ) +set (SRCS + "${LIBRARY_DIR}/Redis/src/Array.cpp" + "${LIBRARY_DIR}/Redis/src/AsyncReader.cpp" + "${LIBRARY_DIR}/Redis/src/Client.cpp" + "${LIBRARY_DIR}/Redis/src/Command.cpp" + "${LIBRARY_DIR}/Redis/src/Error.cpp" + "${LIBRARY_DIR}/Redis/src/Exception.cpp" + "${LIBRARY_DIR}/Redis/src/RedisEventArgs.cpp" + "${LIBRARY_DIR}/Redis/src/RedisStream.cpp" + "${LIBRARY_DIR}/Redis/src/Type.cpp" +) - add_library (_poco_redis ${SRCS}) - add_library (Poco::Redis ALIAS _poco_redis) +add_library (_poco_redis ${SRCS}) +add_library (Poco::Redis ALIAS _poco_redis) - if (COMPILER_GCC) - target_compile_options (_poco_redis PRIVATE -Wno-deprecated-copy) - endif () - target_compile_options (_poco_redis PRIVATE -Wno-shadow) - target_include_directories (_poco_redis SYSTEM PUBLIC "${LIBRARY_DIR}/Redis/include") - target_link_libraries (_poco_redis PUBLIC Poco::Net) -else () - add_library (Poco::Redis UNKNOWN IMPORTED GLOBAL) - - find_library (LIBRARY_POCO_REDIS PocoRedis) - find_path (INCLUDE_POCO_REDIS Poco/Redis/Redis.h) - set_target_properties (Poco::Redis PROPERTIES IMPORTED_LOCATION ${LIBRARY_POCO_REDIS}) - set_target_properties (Poco::Redis PROPERTIES INTERFACE_INCLUDE_DIRECTORIES ${INCLUDE_POCO_REDIS}) - - target_link_libraries (Poco::Redis INTERFACE Poco::Net) - - message (STATUS "Using Poco::Redis: ${LIBRARY_POCO_REDIS} ${INCLUDE_POCO_REDIS}") +if (COMPILER_GCC) + target_compile_options (_poco_redis PRIVATE -Wno-deprecated-copy) endif () +target_compile_options (_poco_redis PRIVATE -Wno-shadow) +target_include_directories (_poco_redis SYSTEM PUBLIC "${LIBRARY_DIR}/Redis/include") +target_link_libraries (_poco_redis PUBLIC Poco::Net) diff --git a/contrib/poco-cmake/Util/CMakeLists.txt b/contrib/poco-cmake/Util/CMakeLists.txt index e233e65cfea..dc355e47658 100644 --- a/contrib/poco-cmake/Util/CMakeLists.txt +++ b/contrib/poco-cmake/Util/CMakeLists.txt @@ -1,46 +1,35 @@ -if (USE_INTERNAL_POCO_LIBRARY) - set (SRCS - "${LIBRARY_DIR}/Util/src/AbstractConfiguration.cpp" - "${LIBRARY_DIR}/Util/src/Application.cpp" - "${LIBRARY_DIR}/Util/src/ConfigurationMapper.cpp" - "${LIBRARY_DIR}/Util/src/ConfigurationView.cpp" - "${LIBRARY_DIR}/Util/src/FilesystemConfiguration.cpp" - "${LIBRARY_DIR}/Util/src/HelpFormatter.cpp" - "${LIBRARY_DIR}/Util/src/IniFileConfiguration.cpp" - "${LIBRARY_DIR}/Util/src/IntValidator.cpp" - "${LIBRARY_DIR}/Util/src/JSONConfiguration.cpp" - "${LIBRARY_DIR}/Util/src/LayeredConfiguration.cpp" - "${LIBRARY_DIR}/Util/src/LoggingConfigurator.cpp" - "${LIBRARY_DIR}/Util/src/LoggingSubsystem.cpp" - "${LIBRARY_DIR}/Util/src/MapConfiguration.cpp" - "${LIBRARY_DIR}/Util/src/Option.cpp" - "${LIBRARY_DIR}/Util/src/OptionCallback.cpp" - "${LIBRARY_DIR}/Util/src/OptionException.cpp" - "${LIBRARY_DIR}/Util/src/OptionProcessor.cpp" - "${LIBRARY_DIR}/Util/src/OptionSet.cpp" - "${LIBRARY_DIR}/Util/src/PropertyFileConfiguration.cpp" - "${LIBRARY_DIR}/Util/src/RegExpValidator.cpp" - "${LIBRARY_DIR}/Util/src/ServerApplication.cpp" - "${LIBRARY_DIR}/Util/src/Subsystem.cpp" - "${LIBRARY_DIR}/Util/src/SystemConfiguration.cpp" - "${LIBRARY_DIR}/Util/src/Timer.cpp" - "${LIBRARY_DIR}/Util/src/TimerTask.cpp" - "${LIBRARY_DIR}/Util/src/Validator.cpp" - "${LIBRARY_DIR}/Util/src/XMLConfiguration.cpp" - ) +set (SRCS + "${LIBRARY_DIR}/Util/src/AbstractConfiguration.cpp" + "${LIBRARY_DIR}/Util/src/Application.cpp" + "${LIBRARY_DIR}/Util/src/ConfigurationMapper.cpp" + "${LIBRARY_DIR}/Util/src/ConfigurationView.cpp" + "${LIBRARY_DIR}/Util/src/FilesystemConfiguration.cpp" + "${LIBRARY_DIR}/Util/src/HelpFormatter.cpp" + "${LIBRARY_DIR}/Util/src/IniFileConfiguration.cpp" + "${LIBRARY_DIR}/Util/src/IntValidator.cpp" + "${LIBRARY_DIR}/Util/src/JSONConfiguration.cpp" + "${LIBRARY_DIR}/Util/src/LayeredConfiguration.cpp" + "${LIBRARY_DIR}/Util/src/LoggingConfigurator.cpp" + "${LIBRARY_DIR}/Util/src/LoggingSubsystem.cpp" + "${LIBRARY_DIR}/Util/src/MapConfiguration.cpp" + "${LIBRARY_DIR}/Util/src/Option.cpp" + "${LIBRARY_DIR}/Util/src/OptionCallback.cpp" + "${LIBRARY_DIR}/Util/src/OptionException.cpp" + "${LIBRARY_DIR}/Util/src/OptionProcessor.cpp" + "${LIBRARY_DIR}/Util/src/OptionSet.cpp" + "${LIBRARY_DIR}/Util/src/PropertyFileConfiguration.cpp" + "${LIBRARY_DIR}/Util/src/RegExpValidator.cpp" + "${LIBRARY_DIR}/Util/src/ServerApplication.cpp" + "${LIBRARY_DIR}/Util/src/Subsystem.cpp" + "${LIBRARY_DIR}/Util/src/SystemConfiguration.cpp" + "${LIBRARY_DIR}/Util/src/Timer.cpp" + "${LIBRARY_DIR}/Util/src/TimerTask.cpp" + "${LIBRARY_DIR}/Util/src/Validator.cpp" + "${LIBRARY_DIR}/Util/src/XMLConfiguration.cpp" +) - add_library (_poco_util ${SRCS}) - add_library (Poco::Util ALIAS _poco_util) +add_library (_poco_util ${SRCS}) +add_library (Poco::Util ALIAS _poco_util) - target_include_directories (_poco_util SYSTEM PUBLIC "${LIBRARY_DIR}/Util/include") - target_link_libraries (_poco_util PUBLIC Poco::JSON Poco::XML) -else () - add_library (Poco::Util UNKNOWN IMPORTED GLOBAL) - - find_library (LIBRARY_POCO_UTIL PocoUtil) - find_path (INCLUDE_POCO_UTIL Poco/Util/Util.h) - set_target_properties (Poco::Util PROPERTIES IMPORTED_LOCATION ${LIBRARY_POCO_UTIL}) - set_target_properties (Poco::Util PROPERTIES INTERFACE_INCLUDE_DIRECTORIES ${INCLUDE_POCO_UTIL}) - - message (STATUS "Using Poco::Util: ${LIBRARY_POCO_UTIL} ${INCLUDE_POCO_UTIL}") -endif () +target_include_directories (_poco_util SYSTEM PUBLIC "${LIBRARY_DIR}/Util/include") +target_link_libraries (_poco_util PUBLIC Poco::JSON Poco::XML) diff --git a/contrib/poco-cmake/XML/CMakeLists.txt b/contrib/poco-cmake/XML/CMakeLists.txt index af801a65f03..45100f11eb7 100644 --- a/contrib/poco-cmake/XML/CMakeLists.txt +++ b/contrib/poco-cmake/XML/CMakeLists.txt @@ -1,110 +1,99 @@ -if (USE_INTERNAL_POCO_LIBRARY) - # Poco::XML (expat) +# Poco::XML (expat) - set (SRCS_EXPAT - "${LIBRARY_DIR}/XML/src/xmlrole.c" - "${LIBRARY_DIR}/XML/src/xmltok_impl.c" - "${LIBRARY_DIR}/XML/src/xmltok_ns.c" - "${LIBRARY_DIR}/XML/src/xmltok.c" - ) +set (SRCS_EXPAT + "${LIBRARY_DIR}/XML/src/xmlrole.c" + "${LIBRARY_DIR}/XML/src/xmltok_impl.c" + "${LIBRARY_DIR}/XML/src/xmltok_ns.c" + "${LIBRARY_DIR}/XML/src/xmltok.c" +) - add_library (_poco_xml_expat ${SRCS_EXPAT}) - add_library (Poco::XML::Expat ALIAS _poco_xml_expat) +add_library (_poco_xml_expat ${SRCS_EXPAT}) +add_library (Poco::XML::Expat ALIAS _poco_xml_expat) - target_include_directories (_poco_xml_expat PUBLIC "${LIBRARY_DIR}/XML/include") +target_include_directories (_poco_xml_expat PUBLIC "${LIBRARY_DIR}/XML/include") - # Poco::XML +# Poco::XML - set (SRCS - "${LIBRARY_DIR}/XML/src/AbstractContainerNode.cpp" - "${LIBRARY_DIR}/XML/src/AbstractNode.cpp" - "${LIBRARY_DIR}/XML/src/Attr.cpp" - "${LIBRARY_DIR}/XML/src/Attributes.cpp" - "${LIBRARY_DIR}/XML/src/AttributesImpl.cpp" - "${LIBRARY_DIR}/XML/src/AttrMap.cpp" - "${LIBRARY_DIR}/XML/src/CDATASection.cpp" - "${LIBRARY_DIR}/XML/src/CharacterData.cpp" - "${LIBRARY_DIR}/XML/src/ChildNodesList.cpp" - "${LIBRARY_DIR}/XML/src/Comment.cpp" - "${LIBRARY_DIR}/XML/src/ContentHandler.cpp" - "${LIBRARY_DIR}/XML/src/DeclHandler.cpp" - "${LIBRARY_DIR}/XML/src/DefaultHandler.cpp" - "${LIBRARY_DIR}/XML/src/Document.cpp" - "${LIBRARY_DIR}/XML/src/DocumentEvent.cpp" - "${LIBRARY_DIR}/XML/src/DocumentFragment.cpp" - "${LIBRARY_DIR}/XML/src/DocumentType.cpp" - "${LIBRARY_DIR}/XML/src/DOMBuilder.cpp" - "${LIBRARY_DIR}/XML/src/DOMException.cpp" - "${LIBRARY_DIR}/XML/src/DOMImplementation.cpp" - "${LIBRARY_DIR}/XML/src/DOMObject.cpp" - "${LIBRARY_DIR}/XML/src/DOMParser.cpp" - "${LIBRARY_DIR}/XML/src/DOMSerializer.cpp" - "${LIBRARY_DIR}/XML/src/DOMWriter.cpp" - "${LIBRARY_DIR}/XML/src/DTDHandler.cpp" - "${LIBRARY_DIR}/XML/src/DTDMap.cpp" - "${LIBRARY_DIR}/XML/src/Element.cpp" - "${LIBRARY_DIR}/XML/src/ElementsByTagNameList.cpp" - "${LIBRARY_DIR}/XML/src/Entity.cpp" - "${LIBRARY_DIR}/XML/src/EntityReference.cpp" - "${LIBRARY_DIR}/XML/src/EntityResolver.cpp" - "${LIBRARY_DIR}/XML/src/EntityResolverImpl.cpp" - "${LIBRARY_DIR}/XML/src/ErrorHandler.cpp" - "${LIBRARY_DIR}/XML/src/Event.cpp" - "${LIBRARY_DIR}/XML/src/EventDispatcher.cpp" - "${LIBRARY_DIR}/XML/src/EventException.cpp" - "${LIBRARY_DIR}/XML/src/EventListener.cpp" - "${LIBRARY_DIR}/XML/src/EventTarget.cpp" - "${LIBRARY_DIR}/XML/src/InputSource.cpp" - "${LIBRARY_DIR}/XML/src/LexicalHandler.cpp" - "${LIBRARY_DIR}/XML/src/Locator.cpp" - "${LIBRARY_DIR}/XML/src/LocatorImpl.cpp" - "${LIBRARY_DIR}/XML/src/MutationEvent.cpp" - "${LIBRARY_DIR}/XML/src/Name.cpp" - "${LIBRARY_DIR}/XML/src/NamedNodeMap.cpp" - "${LIBRARY_DIR}/XML/src/NamePool.cpp" - "${LIBRARY_DIR}/XML/src/NamespaceStrategy.cpp" - "${LIBRARY_DIR}/XML/src/NamespaceSupport.cpp" - "${LIBRARY_DIR}/XML/src/Node.cpp" - "${LIBRARY_DIR}/XML/src/NodeAppender.cpp" - "${LIBRARY_DIR}/XML/src/NodeFilter.cpp" - "${LIBRARY_DIR}/XML/src/NodeIterator.cpp" - "${LIBRARY_DIR}/XML/src/NodeList.cpp" - "${LIBRARY_DIR}/XML/src/Notation.cpp" - "${LIBRARY_DIR}/XML/src/ParserEngine.cpp" - "${LIBRARY_DIR}/XML/src/ProcessingInstruction.cpp" - "${LIBRARY_DIR}/XML/src/QName.cpp" - "${LIBRARY_DIR}/XML/src/SAXException.cpp" - "${LIBRARY_DIR}/XML/src/SAXParser.cpp" - "${LIBRARY_DIR}/XML/src/Text.cpp" - "${LIBRARY_DIR}/XML/src/TreeWalker.cpp" - "${LIBRARY_DIR}/XML/src/ValueTraits.cpp" - "${LIBRARY_DIR}/XML/src/WhitespaceFilter.cpp" - "${LIBRARY_DIR}/XML/src/XMLException.cpp" - "${LIBRARY_DIR}/XML/src/XMLFilter.cpp" - "${LIBRARY_DIR}/XML/src/XMLFilterImpl.cpp" - "${LIBRARY_DIR}/XML/src/XMLReader.cpp" - "${LIBRARY_DIR}/XML/src/XMLStreamParser.cpp" - "${LIBRARY_DIR}/XML/src/XMLStreamParserException.cpp" - "${LIBRARY_DIR}/XML/src/XMLString.cpp" - "${LIBRARY_DIR}/XML/src/XMLWriter.cpp" +set (SRCS + "${LIBRARY_DIR}/XML/src/AbstractContainerNode.cpp" + "${LIBRARY_DIR}/XML/src/AbstractNode.cpp" + "${LIBRARY_DIR}/XML/src/Attr.cpp" + "${LIBRARY_DIR}/XML/src/Attributes.cpp" + "${LIBRARY_DIR}/XML/src/AttributesImpl.cpp" + "${LIBRARY_DIR}/XML/src/AttrMap.cpp" + "${LIBRARY_DIR}/XML/src/CDATASection.cpp" + "${LIBRARY_DIR}/XML/src/CharacterData.cpp" + "${LIBRARY_DIR}/XML/src/ChildNodesList.cpp" + "${LIBRARY_DIR}/XML/src/Comment.cpp" + "${LIBRARY_DIR}/XML/src/ContentHandler.cpp" + "${LIBRARY_DIR}/XML/src/DeclHandler.cpp" + "${LIBRARY_DIR}/XML/src/DefaultHandler.cpp" + "${LIBRARY_DIR}/XML/src/Document.cpp" + "${LIBRARY_DIR}/XML/src/DocumentEvent.cpp" + "${LIBRARY_DIR}/XML/src/DocumentFragment.cpp" + "${LIBRARY_DIR}/XML/src/DocumentType.cpp" + "${LIBRARY_DIR}/XML/src/DOMBuilder.cpp" + "${LIBRARY_DIR}/XML/src/DOMException.cpp" + "${LIBRARY_DIR}/XML/src/DOMImplementation.cpp" + "${LIBRARY_DIR}/XML/src/DOMObject.cpp" + "${LIBRARY_DIR}/XML/src/DOMParser.cpp" + "${LIBRARY_DIR}/XML/src/DOMSerializer.cpp" + "${LIBRARY_DIR}/XML/src/DOMWriter.cpp" + "${LIBRARY_DIR}/XML/src/DTDHandler.cpp" + "${LIBRARY_DIR}/XML/src/DTDMap.cpp" + "${LIBRARY_DIR}/XML/src/Element.cpp" + "${LIBRARY_DIR}/XML/src/ElementsByTagNameList.cpp" + "${LIBRARY_DIR}/XML/src/Entity.cpp" + "${LIBRARY_DIR}/XML/src/EntityReference.cpp" + "${LIBRARY_DIR}/XML/src/EntityResolver.cpp" + "${LIBRARY_DIR}/XML/src/EntityResolverImpl.cpp" + "${LIBRARY_DIR}/XML/src/ErrorHandler.cpp" + "${LIBRARY_DIR}/XML/src/Event.cpp" + "${LIBRARY_DIR}/XML/src/EventDispatcher.cpp" + "${LIBRARY_DIR}/XML/src/EventException.cpp" + "${LIBRARY_DIR}/XML/src/EventListener.cpp" + "${LIBRARY_DIR}/XML/src/EventTarget.cpp" + "${LIBRARY_DIR}/XML/src/InputSource.cpp" + "${LIBRARY_DIR}/XML/src/LexicalHandler.cpp" + "${LIBRARY_DIR}/XML/src/Locator.cpp" + "${LIBRARY_DIR}/XML/src/LocatorImpl.cpp" + "${LIBRARY_DIR}/XML/src/MutationEvent.cpp" + "${LIBRARY_DIR}/XML/src/Name.cpp" + "${LIBRARY_DIR}/XML/src/NamedNodeMap.cpp" + "${LIBRARY_DIR}/XML/src/NamePool.cpp" + "${LIBRARY_DIR}/XML/src/NamespaceStrategy.cpp" + "${LIBRARY_DIR}/XML/src/NamespaceSupport.cpp" + "${LIBRARY_DIR}/XML/src/Node.cpp" + "${LIBRARY_DIR}/XML/src/NodeAppender.cpp" + "${LIBRARY_DIR}/XML/src/NodeFilter.cpp" + "${LIBRARY_DIR}/XML/src/NodeIterator.cpp" + "${LIBRARY_DIR}/XML/src/NodeList.cpp" + "${LIBRARY_DIR}/XML/src/Notation.cpp" + "${LIBRARY_DIR}/XML/src/ParserEngine.cpp" + "${LIBRARY_DIR}/XML/src/ProcessingInstruction.cpp" + "${LIBRARY_DIR}/XML/src/QName.cpp" + "${LIBRARY_DIR}/XML/src/SAXException.cpp" + "${LIBRARY_DIR}/XML/src/SAXParser.cpp" + "${LIBRARY_DIR}/XML/src/Text.cpp" + "${LIBRARY_DIR}/XML/src/TreeWalker.cpp" + "${LIBRARY_DIR}/XML/src/ValueTraits.cpp" + "${LIBRARY_DIR}/XML/src/WhitespaceFilter.cpp" + "${LIBRARY_DIR}/XML/src/XMLException.cpp" + "${LIBRARY_DIR}/XML/src/XMLFilter.cpp" + "${LIBRARY_DIR}/XML/src/XMLFilterImpl.cpp" + "${LIBRARY_DIR}/XML/src/XMLReader.cpp" + "${LIBRARY_DIR}/XML/src/XMLStreamParser.cpp" + "${LIBRARY_DIR}/XML/src/XMLStreamParserException.cpp" + "${LIBRARY_DIR}/XML/src/XMLString.cpp" + "${LIBRARY_DIR}/XML/src/XMLWriter.cpp" - # expat - "${LIBRARY_DIR}/XML/src/xmlparse.cpp" - ) + # expat + "${LIBRARY_DIR}/XML/src/xmlparse.cpp" +) - add_library (_poco_xml ${SRCS}) - add_library (Poco::XML ALIAS _poco_xml) +add_library (_poco_xml ${SRCS}) +add_library (Poco::XML ALIAS _poco_xml) - target_compile_options (_poco_xml PRIVATE -Wno-old-style-cast) - target_include_directories (_poco_xml SYSTEM PUBLIC "${LIBRARY_DIR}/XML/include") - target_link_libraries (_poco_xml PUBLIC Poco::Foundation Poco::XML::Expat) -else () - add_library (Poco::XML UNKNOWN IMPORTED GLOBAL) - - find_library (LIBRARY_POCO_XML PocoXML) - find_path (INCLUDE_POCO_XML Poco/XML/XML.h) - set_target_properties (Poco::XML PROPERTIES IMPORTED_LOCATION ${LIBRARY_POCO_XML}) - set_target_properties (Poco::XML PROPERTIES INTERFACE_INCLUDE_DIRECTORIES ${INCLUDE_POCO_XML}) - - message (STATUS "Using Poco::XML: ${LIBRARY_POCO_XML} ${INCLUDE_POCO_XML}") -endif () +target_compile_options (_poco_xml PRIVATE -Wno-old-style-cast) +target_include_directories (_poco_xml SYSTEM PUBLIC "${LIBRARY_DIR}/XML/include") +target_link_libraries (_poco_xml PUBLIC Poco::Foundation Poco::XML::Expat) diff --git a/contrib/protobuf-cmake/CMakeLists.txt b/contrib/protobuf-cmake/CMakeLists.txt index 92eec444e44..5e22136fc1f 100644 --- a/contrib/protobuf-cmake/CMakeLists.txt +++ b/contrib/protobuf-cmake/CMakeLists.txt @@ -1,3 +1,22 @@ +option(ENABLE_PROTOBUF "Enable protobuf" ${ENABLE_LIBRARIES}) + +if(NOT ENABLE_PROTOBUF) + message(STATUS "Not using protobuf") + return() +endif() + +set(Protobuf_INCLUDE_DIR "${ClickHouse_SOURCE_DIR}/contrib/protobuf/src") +if(OS_FREEBSD AND SANITIZE STREQUAL "address") + # ../contrib/protobuf/src/google/protobuf/arena_impl.h:45:10: fatal error: 'sanitizer/asan_interface.h' file not found + # #include + if(LLVM_INCLUDE_DIRS) + set(Protobuf_INCLUDE_DIR "${Protobuf_INCLUDE_DIR}" ${LLVM_INCLUDE_DIRS}) + else() + message(${RECONFIGURE_MESSAGE_LEVEL} "Can't use protobuf on FreeBSD with address sanitizer without LLVM") + return() + endif() +endif() + set(protobuf_source_dir "${ClickHouse_SOURCE_DIR}/contrib/protobuf") set(protobuf_binary_dir "${ClickHouse_BINARY_DIR}/contrib/protobuf") @@ -8,7 +27,6 @@ add_definitions(-DHAVE_PTHREAD) add_definitions(-DHAVE_ZLIB) include_directories( - ${ZLIB_INCLUDE_DIRECTORIES} ${protobuf_binary_dir} ${protobuf_source_dir}/src) @@ -45,13 +63,13 @@ set(libprotobuf_lite_files ${protobuf_source_dir}/src/google/protobuf/wire_format_lite.cc ) -add_library(libprotobuf-lite ${libprotobuf_lite_files}) -target_link_libraries(libprotobuf-lite pthread) +add_library(_libprotobuf-lite ${libprotobuf_lite_files}) +target_link_libraries(_libprotobuf-lite pthread) if(${CMAKE_SYSTEM_NAME} STREQUAL "Android") - target_link_libraries(libprotobuf-lite log) + target_link_libraries(_libprotobuf-lite log) endif() -target_include_directories(libprotobuf-lite SYSTEM PUBLIC ${protobuf_source_dir}/src) -add_library(protobuf::libprotobuf-lite ALIAS libprotobuf-lite) +target_include_directories(_libprotobuf-lite SYSTEM PUBLIC ${protobuf_source_dir}/src) +add_library(protobuf::libprotobuf-lite ALIAS _libprotobuf-lite) set(libprotobuf_files @@ -109,17 +127,17 @@ set(libprotobuf_files ${protobuf_source_dir}/src/google/protobuf/wrappers.pb.cc ) -add_library(libprotobuf ${libprotobuf_lite_files} ${libprotobuf_files}) +add_library(_libprotobuf ${libprotobuf_lite_files} ${libprotobuf_files}) if (ENABLE_FUZZING) - target_compile_options(libprotobuf PRIVATE "-fsanitize-recover=all") + target_compile_options(_libprotobuf PRIVATE "-fsanitize-recover=all") endif() -target_link_libraries(libprotobuf pthread) -target_link_libraries(libprotobuf ${ZLIB_LIBRARIES}) +target_link_libraries(_libprotobuf pthread) +target_link_libraries(_libprotobuf ch_contrib::zlib) if(${CMAKE_SYSTEM_NAME} STREQUAL "Android") - target_link_libraries(libprotobuf log) + target_link_libraries(_libprotobuf log) endif() -target_include_directories(libprotobuf SYSTEM PUBLIC ${protobuf_source_dir}/src) -add_library(protobuf::libprotobuf ALIAS libprotobuf) +target_include_directories(_libprotobuf SYSTEM PUBLIC ${protobuf_source_dir}/src) +add_library(protobuf::libprotobuf ALIAS _libprotobuf) set(libprotoc_files @@ -208,9 +226,9 @@ set(libprotoc_files ${protobuf_source_dir}/src/google/protobuf/compiler/zip_writer.cc ) -add_library(libprotoc ${libprotoc_files}) -target_link_libraries(libprotoc libprotobuf) -add_library(protobuf::libprotoc ALIAS libprotoc) +add_library(_libprotoc ${libprotoc_files}) +target_link_libraries(_libprotoc _libprotobuf) +add_library(protobuf::libprotoc ALIAS _libprotoc) set(protoc_files ${protobuf_source_dir}/src/google/protobuf/compiler/main.cc) @@ -218,7 +236,7 @@ if (CMAKE_HOST_SYSTEM_NAME STREQUAL CMAKE_SYSTEM_NAME AND CMAKE_HOST_SYSTEM_PROCESSOR STREQUAL CMAKE_SYSTEM_PROCESSOR) add_executable(protoc ${protoc_files}) - target_link_libraries(protoc libprotoc libprotobuf pthread) + target_link_libraries(protoc _libprotoc _libprotobuf pthread) add_executable(protobuf::protoc ALIAS protoc) if (ENABLE_FUZZING) @@ -297,3 +315,15 @@ else () set_target_properties (protoc PROPERTIES IMPORTED_LOCATION "${PROTOC_BUILD_DIR}/protoc") add_dependencies(protoc "${PROTOC_BUILD_DIR}/protoc") endif () + +include("${ClickHouse_SOURCE_DIR}/contrib/protobuf-cmake/protobuf_generate.cmake") + +add_library(_protobuf INTERFACE) +target_link_libraries(_protobuf INTERFACE _libprotobuf) +target_include_directories(_protobuf INTERFACE "${Protobuf_INCLUDE_DIR}") +add_library(ch_contrib::protobuf ALIAS _protobuf) + +add_library(_protoc INTERFACE) +target_link_libraries(_protoc INTERFACE _libprotoc _libprotobuf) +target_include_directories(_protoc INTERFACE "${Protobuf_INCLUDE_DIR}") +add_library(ch_contrib::protoc ALIAS _protoc) diff --git a/contrib/rapidjson-cmake/CMakeLists.txt b/contrib/rapidjson-cmake/CMakeLists.txt new file mode 100644 index 00000000000..0d7ba74a399 --- /dev/null +++ b/contrib/rapidjson-cmake/CMakeLists.txt @@ -0,0 +1,11 @@ +option(ENABLE_RAPIDJSON "Use rapidjson" ${ENABLE_LIBRARIES}) + +if(NOT ENABLE_RAPIDJSON) + message(STATUS "Not using rapidjson") + return() +endif() + +set(RAPIDJSON_INCLUDE_DIR "${ClickHouse_SOURCE_DIR}/contrib/rapidjson/include") +add_library(_rapidjson INTERFACE) +target_include_directories(_rapidjson SYSTEM BEFORE INTERFACE ${RAPIDJSON_INCLUDE_DIR}) +add_library(ch_contrib::rapidjson ALIAS _rapidjson) diff --git a/contrib/re2-cmake/CMakeLists.txt b/contrib/re2-cmake/CMakeLists.txt index ff8b3c43472..e74f488643d 100644 --- a/contrib/re2-cmake/CMakeLists.txt +++ b/contrib/re2-cmake/CMakeLists.txt @@ -4,6 +4,11 @@ # This file was edited for ClickHouse +string(FIND ${CMAKE_CURRENT_BINARY_DIR} " " _have_space) +if(_have_space GREATER 0) + message(FATAL_ERROR "Using spaces in build path [${CMAKE_CURRENT_BINARY_DIR}] highly not recommended. Library re2st will be disabled.") +endif() + set(SRC_DIR "${ClickHouse_SOURCE_DIR}/contrib/re2") set(RE2_SOURCES @@ -30,11 +35,9 @@ set(RE2_SOURCES ${SRC_DIR}/util/rune.cc ${SRC_DIR}/util/strutil.cc ) - add_library(re2 ${RE2_SOURCES}) target_include_directories(re2 PUBLIC "${SRC_DIR}") - # Building re2 which is thread-safe and re2_st which is not. # re2 changes its state during matching of regular expression, e.g. creates temporary DFA. # It uses RWLock to process the same regular expression object from different threads. @@ -43,7 +46,8 @@ target_include_directories(re2 PUBLIC "${SRC_DIR}") add_library(re2_st ${RE2_SOURCES}) target_compile_definitions (re2_st PRIVATE NDEBUG NO_THREADS re2=re2_st) target_include_directories (re2_st PRIVATE .) -target_include_directories (re2_st SYSTEM PUBLIC ${CMAKE_CURRENT_BINARY_DIR} ${SRC_DIR}) +target_include_directories (re2_st SYSTEM PUBLIC ${CMAKE_CURRENT_BINARY_DIR}) +target_include_directories (re2_st SYSTEM BEFORE PUBLIC ${SRC_DIR}) file (MAKE_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/re2_st) foreach (FILENAME filtered_re2.h re2.h set.h stringpiece.h) @@ -66,3 +70,8 @@ foreach (FILENAME mutex.h) add_custom_target (transform_${FILENAME} DEPENDS "${CMAKE_CURRENT_BINARY_DIR}/util/${FILENAME}") add_dependencies (re2_st transform_${FILENAME}) endforeach () + +# NOTE: you should not change name of library here, since it is used for PVS +# (see docker/test/pvs/Dockerfile), to generate required header (see above) +add_library(ch_contrib::re2 ALIAS re2) +add_library(ch_contrib::re2_st ALIAS re2_st) diff --git a/contrib/replxx-cmake/CMakeLists.txt b/contrib/replxx-cmake/CMakeLists.txt index 222a38095cb..8487ad520bc 100644 --- a/contrib/replxx-cmake/CMakeLists.txt +++ b/contrib/replxx-cmake/CMakeLists.txt @@ -1,74 +1,30 @@ option (ENABLE_REPLXX "Enable replxx support" ${ENABLE_LIBRARIES}) if (NOT ENABLE_REPLXX) - if (USE_INTERNAL_REPLXX_LIBRARY) - message (${RECONFIGURE_MESSAGE_LEVEL} "Can't use internal replxx with ENABLE_REPLXX=OFF") - endif() - - add_library(replxx INTERFACE) - target_compile_definitions(replxx INTERFACE USE_REPLXX=0) - message (STATUS "Not using replxx") return() endif() -option (USE_INTERNAL_REPLXX_LIBRARY "Use internal replxx library (Experimental: set to OFF on your own risk)" ON) +set (LIBRARY_DIR "${ClickHouse_SOURCE_DIR}/contrib/replxx") -if (NOT USE_INTERNAL_REPLXX_LIBRARY) - find_library(LIBRARY_REPLXX NAMES replxx replxx-static) - find_path(INCLUDE_REPLXX replxx.hxx) +set(SRCS + "${LIBRARY_DIR}/src/conversion.cxx" + "${LIBRARY_DIR}/src/ConvertUTF.cpp" + "${LIBRARY_DIR}/src/escape.cxx" + "${LIBRARY_DIR}/src/history.cxx" + "${LIBRARY_DIR}/src/terminal.cxx" + "${LIBRARY_DIR}/src/prompt.cxx" + "${LIBRARY_DIR}/src/replxx_impl.cxx" + "${LIBRARY_DIR}/src/replxx.cxx" + "${LIBRARY_DIR}/src/util.cxx" + "${LIBRARY_DIR}/src/wcwidth.cpp" +) - if (LIBRARY_REPLXX AND INCLUDE_REPLXX) - set(CMAKE_REQUIRED_LIBRARIES ${LIBRARY_REPLXX}) - set(CMAKE_REQUIRED_INCLUDES ${INCLUDE_REPLXX}) - check_cxx_source_compiles( - " - #include - int main() { - replxx::Replxx rx; - } - " - EXTERNAL_REPLXX_WORKS - ) - - if (NOT EXTERNAL_REPLXX_WORKS) - message (${RECONFIGURE_MESSAGE_LEVEL} "replxx is unusable: ${LIBRARY_REPLXX} ${INCLUDE_REPLXX}") - else() - add_library(replxx UNKNOWN IMPORTED) - set_property(TARGET replxx PROPERTY IMPORTED_LOCATION ${LIBRARY_REPLXX}) - target_include_directories(replxx SYSTEM PUBLIC ${INCLUDE_REPLXX}) - endif() - else() - message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find system replxx") - endif() -endif() - - -if (NOT LIBRARY_REPLXX OR NOT INCLUDE_REPLXX OR NOT EXTERNAL_REPLXX_WORKS) - set(USE_INTERNAL_REPLXX_LIBRARY 1) - set (LIBRARY_DIR "${ClickHouse_SOURCE_DIR}/contrib/replxx") - - set(SRCS - "${LIBRARY_DIR}/src/conversion.cxx" - "${LIBRARY_DIR}/src/ConvertUTF.cpp" - "${LIBRARY_DIR}/src/escape.cxx" - "${LIBRARY_DIR}/src/history.cxx" - "${LIBRARY_DIR}/src/terminal.cxx" - "${LIBRARY_DIR}/src/prompt.cxx" - "${LIBRARY_DIR}/src/replxx_impl.cxx" - "${LIBRARY_DIR}/src/replxx.cxx" - "${LIBRARY_DIR}/src/util.cxx" - "${LIBRARY_DIR}/src/wcwidth.cpp" - ) - - add_library (replxx ${SRCS}) - target_include_directories(replxx SYSTEM PUBLIC "${LIBRARY_DIR}/include") -endif () +add_library (_replxx ${SRCS}) +target_include_directories(_replxx SYSTEM PUBLIC "${LIBRARY_DIR}/include") if (COMPILER_CLANG) - target_compile_options(replxx PRIVATE -Wno-documentation) + target_compile_options(_replxx PRIVATE -Wno-documentation) endif () -target_compile_definitions(replxx PUBLIC USE_REPLXX=1) - -message (STATUS "Using replxx") +add_library(ch_contrib::replxx ALIAS _replxx) diff --git a/contrib/rocksdb-cmake/CMakeLists.txt b/contrib/rocksdb-cmake/CMakeLists.txt index db0b3942b79..902d29a9630 100644 --- a/contrib/rocksdb-cmake/CMakeLists.txt +++ b/contrib/rocksdb-cmake/CMakeLists.txt @@ -1,3 +1,10 @@ +option (ENABLE_ROCKSDB "Enable rocksdb library" ${ENABLE_LIBRARIES}) + +if (NOT ENABLE_ROCKSDB) + message (STATUS "Not using rocksdb") + return() +endif() + ## this file is extracted from `contrib/rocksdb/CMakeLists.txt` set(ROCKSDB_SOURCE_DIR "${ClickHouse_SOURCE_DIR}/contrib/rocksdb") list(APPEND CMAKE_MODULE_PATH "${ROCKSDB_SOURCE_DIR}/cmake/modules/") @@ -6,6 +13,10 @@ set(PORTABLE ON) ## always disable jemalloc for rocksdb by default ## because it introduces non-standard jemalloc APIs option(WITH_JEMALLOC "build with JeMalloc" OFF) +set(USE_SNAPPY OFF) +if (TARGET ch_contrib::snappy) + set(USE_SNAPPY ON) +endif() option(WITH_SNAPPY "build with SNAPPY" ${USE_SNAPPY}) ## lz4, zlib, zstd is enabled in ClickHouse by default option(WITH_LZ4 "build with lz4" ON) @@ -38,25 +49,25 @@ else() # but it does not have all the jemalloc files in include/... set(WITH_JEMALLOC ON) else() - if(WITH_JEMALLOC) + if(WITH_JEMALLOC AND TARGET ch_contrib::jemalloc) add_definitions(-DROCKSDB_JEMALLOC -DJEMALLOC_NO_DEMANGLE) - list(APPEND THIRDPARTY_LIBS jemalloc) + list(APPEND THIRDPARTY_LIBS ch_contrib::jemalloc) endif() endif() if(WITH_SNAPPY) add_definitions(-DSNAPPY) - list(APPEND THIRDPARTY_LIBS snappy) + list(APPEND THIRDPARTY_LIBS ch_contrib::snappy) endif() if(WITH_ZLIB) add_definitions(-DZLIB) - list(APPEND THIRDPARTY_LIBS zlib) + list(APPEND THIRDPARTY_LIBS ch_contrib::zlib) endif() if(WITH_LZ4) add_definitions(-DLZ4) - list(APPEND THIRDPARTY_LIBS lz4) + list(APPEND THIRDPARTY_LIBS ch_contrib::lz4) endif() if(WITH_ZSTD) @@ -66,7 +77,7 @@ else() include_directories("${ZSTD_INCLUDE_DIR}/dictBuilder") include_directories("${ZSTD_INCLUDE_DIR}/deprecated") - list(APPEND THIRDPARTY_LIBS zstd) + list(APPEND THIRDPARTY_LIBS ch_contrib::zstd) endif() endif() @@ -538,8 +549,8 @@ if(WITH_FOLLY_DISTRIBUTED_MUTEX) "${ROCKSDB_SOURCE_DIR}/third-party/folly/folly/synchronization/WaitOptions.cpp") endif() -set(ROCKSDB_STATIC_LIB rocksdb) - -add_library(${ROCKSDB_STATIC_LIB} STATIC ${SOURCES}) -target_link_libraries(${ROCKSDB_STATIC_LIB} PRIVATE - ${THIRDPARTY_LIBS} ${SYSTEM_LIBS}) +add_library(_rocksdb STATIC ${SOURCES}) +add_library(ch_contrib::rocksdb ALIAS _rocksdb) +target_link_libraries(_rocksdb PRIVATE ${THIRDPARTY_LIBS} ${SYSTEM_LIBS}) +# SYSTEM is required to overcome some issues +target_include_directories(_rocksdb SYSTEM BEFORE INTERFACE "${ROCKSDB_SOURCE_DIR}/include") diff --git a/contrib/s2geometry-cmake/CMakeLists.txt b/contrib/s2geometry-cmake/CMakeLists.txt index e2b0f20f408..49c80e45b18 100644 --- a/contrib/s2geometry-cmake/CMakeLists.txt +++ b/contrib/s2geometry-cmake/CMakeLists.txt @@ -1,3 +1,10 @@ +option(ENABLE_S2_GEOMETRY "Enable S2 geometry library" ${ENABLE_LIBRARIES}) + +if (NOT ENABLE_S2_GEOMETRY) + message(STATUS "Not using S2 geometry") + return() +endif() + set(S2_SOURCE_DIR "${ClickHouse_SOURCE_DIR}/contrib/s2geometry/src") set(ABSL_SOURCE_DIR "${ClickHouse_SOURCE_DIR}/contrib/abseil-cpp") @@ -108,15 +115,17 @@ set(S2_SRCS ) -add_library(s2 ${S2_SRCS}) -set_property(TARGET s2 PROPERTY CXX_STANDARD 17) +add_library(_s2 ${S2_SRCS}) +add_library(ch_contrib::s2 ALIAS _s2) -if (OPENSSL_FOUND) - target_link_libraries(s2 PRIVATE ${OPENSSL_LIBRARIES}) +set_property(TARGET _s2 PROPERTY CXX_STANDARD 17) + +if (TARGET OpenSSL::SSL) + target_link_libraries(_s2 PRIVATE OpenSSL::Crypto OpenSSL::SSL) endif() # Copied from contrib/s2geometry/CMakeLists -target_link_libraries(s2 PRIVATE +target_link_libraries(_s2 PRIVATE absl::base absl::btree absl::config @@ -138,9 +147,9 @@ target_link_libraries(s2 PRIVATE absl::utility ) -target_include_directories(s2 SYSTEM BEFORE PUBLIC "${S2_SOURCE_DIR}/") -target_include_directories(s2 SYSTEM PUBLIC "${ABSL_SOURCE_DIR}") +target_include_directories(_s2 SYSTEM BEFORE PUBLIC "${S2_SOURCE_DIR}/") +target_include_directories(_s2 SYSTEM PUBLIC "${ABSL_SOURCE_DIR}") if(M_LIBRARY) - target_link_libraries(s2 PRIVATE ${M_LIBRARY}) + target_link_libraries(_s2 PRIVATE ${M_LIBRARY}) endif() diff --git a/contrib/sentry-native-cmake/CMakeLists.txt b/contrib/sentry-native-cmake/CMakeLists.txt index f4e946cf797..520fa176b91 100644 --- a/contrib/sentry-native-cmake/CMakeLists.txt +++ b/contrib/sentry-native-cmake/CMakeLists.txt @@ -1,3 +1,14 @@ +if (NOT OS_FREEBSD AND NOT SPLIT_SHARED_LIBRARIES AND NOT (OS_DARWIN AND COMPILER_CLANG)) + option (ENABLE_SENTRY "Enable Sentry" ${ENABLE_LIBRARIES}) +else() + option (ENABLE_SENTRY "Enable Sentry" OFF) +endif() + +if (NOT ENABLE_SENTRY) + message(STATUS "Not using sentry") + return() +endif() + set (SRC_DIR "${ClickHouse_SOURCE_DIR}/contrib/sentry-native") set (SRCS @@ -38,15 +49,16 @@ else() list(APPEND SRCS ${SRC_DIR}/src/modulefinder/sentry_modulefinder_linux.c) endif() -add_library(sentry ${SRCS}) -add_library(sentry::sentry ALIAS sentry) +add_library(_sentry ${SRCS}) if(BUILD_SHARED_LIBS) - target_compile_definitions(sentry PRIVATE SENTRY_BUILD_SHARED) + target_compile_definitions(_sentry PRIVATE SENTRY_BUILD_SHARED) else() - target_compile_definitions(sentry PUBLIC SENTRY_BUILD_STATIC) + target_compile_definitions(_sentry PUBLIC SENTRY_BUILD_STATIC) endif() -target_link_libraries(sentry PRIVATE curl pthread) -target_include_directories(sentry PUBLIC "${SRC_DIR}/include" PRIVATE "${SRC_DIR}/src") -target_compile_definitions(sentry PRIVATE SENTRY_WITH_INPROC_BACKEND SIZEOF_LONG=8) +target_link_libraries(_sentry PRIVATE ch_contrib::curl pthread) +target_include_directories(_sentry PUBLIC "${SRC_DIR}/include" PRIVATE "${SRC_DIR}/src") +target_compile_definitions(_sentry PRIVATE SENTRY_WITH_INPROC_BACKEND SIZEOF_LONG=8) + +add_library(ch_contrib::sentry ALIAS _sentry) diff --git a/contrib/simdjson-cmake/CMakeLists.txt b/contrib/simdjson-cmake/CMakeLists.txt index bb9a5844def..ab2840f5b7f 100644 --- a/contrib/simdjson-cmake/CMakeLists.txt +++ b/contrib/simdjson-cmake/CMakeLists.txt @@ -1,11 +1,20 @@ +option (ENABLE_SIMDJSON "Use simdjson" ${ENABLE_LIBRARIES}) + +if (NOT ENABLE_SIMDJSON) + message(STATUS "Not using simdjson") + return() +endif() + set(SIMDJSON_INCLUDE_DIR "${ClickHouse_SOURCE_DIR}/contrib/simdjson/include") set(SIMDJSON_SRC_DIR "${ClickHouse_SOURCE_DIR}/contrib/simdjson/src") set(SIMDJSON_SRC "${SIMDJSON_SRC_DIR}/simdjson.cpp") -add_library(simdjson ${SIMDJSON_SRC}) -target_include_directories(simdjson SYSTEM PUBLIC "${SIMDJSON_INCLUDE_DIR}" PRIVATE "${SIMDJSON_SRC_DIR}") +add_library(_simdjson ${SIMDJSON_SRC}) +target_include_directories(_simdjson SYSTEM PUBLIC "${SIMDJSON_INCLUDE_DIR}" PRIVATE "${SIMDJSON_SRC_DIR}") # simdjson is using its own CPU dispatching and get confused if we enable AVX/AVX2 flags. if(ARCH_AMD64) - target_compile_options(simdjson PRIVATE -mno-avx -mno-avx2) + target_compile_options(_simdjson PRIVATE -mno-avx -mno-avx2) endif() + +add_library(ch_contrib::simdjson ALIAS _simdjson) diff --git a/contrib/snappy-cmake/CMakeLists.txt b/contrib/snappy-cmake/CMakeLists.txt index 289f8908436..0997ea207e0 100644 --- a/contrib/snappy-cmake/CMakeLists.txt +++ b/contrib/snappy-cmake/CMakeLists.txt @@ -30,8 +30,9 @@ configure_file( "${SOURCE_DIR}/snappy-stubs-public.h.in" "${CMAKE_CURRENT_BINARY_DIR}/snappy-stubs-public.h") -add_library(snappy "") -target_sources(snappy +add_library(_snappy "") +add_library(ch_contrib::snappy ALIAS _snappy) +target_sources(_snappy PRIVATE "${SOURCE_DIR}/snappy-internal.h" "${SOURCE_DIR}/snappy-stubs-internal.h" @@ -40,5 +41,5 @@ target_sources(snappy "${SOURCE_DIR}/snappy-stubs-internal.cc" "${SOURCE_DIR}/snappy.cc") -target_include_directories(snappy SYSTEM PUBLIC ${SOURCE_DIR} ${CMAKE_CURRENT_BINARY_DIR}) -target_compile_definitions(snappy PRIVATE -DHAVE_CONFIG_H) +target_include_directories(_snappy SYSTEM BEFORE PUBLIC ${SOURCE_DIR} ${CMAKE_CURRENT_BINARY_DIR}) +target_compile_definitions(_snappy PRIVATE -DHAVE_CONFIG_H) diff --git a/contrib/sparsehash-c11-cmake/CMakeLists.txt b/contrib/sparsehash-c11-cmake/CMakeLists.txt new file mode 100644 index 00000000000..af588c9484f --- /dev/null +++ b/contrib/sparsehash-c11-cmake/CMakeLists.txt @@ -0,0 +1,3 @@ +add_library(_sparsehash INTERFACE) +target_include_directories(_sparsehash SYSTEM BEFORE INTERFACE "${ClickHouse_SOURCE_DIR}/contrib/sparsehash-c11") +add_library(ch_contrib::sparsehash ALIAS _sparsehash) diff --git a/contrib/sqlite-cmake/CMakeLists.txt b/contrib/sqlite-cmake/CMakeLists.txt index 495cb63798d..7559dd4c184 100644 --- a/contrib/sqlite-cmake/CMakeLists.txt +++ b/contrib/sqlite-cmake/CMakeLists.txt @@ -1,6 +1,14 @@ +option(ENABLE_SQLITE "Enable sqlite" ${ENABLE_LIBRARIES}) + +if (NOT ENABLE_SQLITE) + message(STATUS "Not using sqlite") + return() +endif() + set (LIBRARY_DIR "${ClickHouse_SOURCE_DIR}/contrib/sqlite-amalgamation") set(SRCS ${LIBRARY_DIR}/sqlite3.c) -add_library(sqlite ${SRCS}) -target_include_directories(sqlite SYSTEM PUBLIC "${LIBRARY_DIR}") +add_library(_sqlite ${SRCS}) +target_include_directories(_sqlite SYSTEM PUBLIC "${LIBRARY_DIR}") +add_library(ch_contrib::sqlite ALIAS _sqlite) diff --git a/contrib/stats-cmake/CMakeLists.txt b/contrib/stats-cmake/CMakeLists.txt deleted file mode 100644 index 8279e49c3f0..00000000000 --- a/contrib/stats-cmake/CMakeLists.txt +++ /dev/null @@ -1,9 +0,0 @@ -# The stats is a header-only library of probability density functions, -# cumulative distribution functions, quantile functions, and random sampling methods. -set(STATS_INCLUDE_DIR "${ClickHouse_SOURCE_DIR}/contrib/stats/include") -set(GCEM_INCLUDE_DIR "${ClickHouse_SOURCE_DIR}/contrib/gcem/include") - -add_library(stats INTERFACE) - -target_include_directories(stats SYSTEM INTERFACE ${STATS_INCLUDE_DIR}) -target_include_directories(stats SYSTEM INTERFACE ${GCEM_INCLUDE_DIR}) diff --git a/contrib/thrift-cmake/CMakeLists.txt b/contrib/thrift-cmake/CMakeLists.txt index 088dd0a969b..2a62a6fe7ab 100644 --- a/contrib/thrift-cmake/CMakeLists.txt +++ b/contrib/thrift-cmake/CMakeLists.txt @@ -1,3 +1,10 @@ +option(ENABLE_THRIFT "Enable Thrift" ${ENABLE_LIBRARIES}) + +if (NOT ENABLE_THRIFT) + message (STATUS "thrift disabled") + return() +endif() + set(LIBRARY_DIR "${ClickHouse_SOURCE_DIR}/contrib/thrift/lib/cpp") set(thriftcpp_SOURCES "${LIBRARY_DIR}/src/thrift/TApplicationException.cpp" @@ -82,6 +89,7 @@ configure_file("${CMAKE_CURRENT_SOURCE_DIR}/build/cmake/config.h.in" "${CMAKE_CU include_directories("${CMAKE_CURRENT_BINARY_DIR}") -add_library(${THRIFT_LIBRARY} ${thriftcpp_SOURCES} ${thriftcpp_threads_SOURCES}) -target_include_directories(${THRIFT_LIBRARY} SYSTEM PUBLIC "${THRIFT_INCLUDE_DIR}" ${CMAKE_CURRENT_BINARY_DIR}) -target_link_libraries (${THRIFT_LIBRARY} PUBLIC boost::headers_only) +add_library(_thrift ${thriftcpp_SOURCES} ${thriftcpp_threads_SOURCES}) +add_library(ch_contrib::thrift ALIAS _thrift) +target_include_directories(_thrift SYSTEM PUBLIC "${ClickHouse_SOURCE_DIR}/contrib/thrift/lib/cpp/src" ${CMAKE_CURRENT_BINARY_DIR}) +target_link_libraries (_thrift PUBLIC boost::headers_only) diff --git a/contrib/unixodbc-cmake/CMakeLists.txt b/contrib/unixodbc-cmake/CMakeLists.txt index e03f6313a31..b594ead3ba0 100644 --- a/contrib/unixodbc-cmake/CMakeLists.txt +++ b/contrib/unixodbc-cmake/CMakeLists.txt @@ -1,4 +1,13 @@ -if (NOT USE_INTERNAL_ODBC_LIBRARY) +option (ENABLE_ODBC "Enable ODBC library" ${ENABLE_LIBRARIES}) +if (NOT OS_LINUX) + if (ENABLE_ODBC) + message(STATUS "ODBC is only supported on Linux") + endif() + set (ENABLE_ODBC OFF CACHE INTERNAL "") +endif () + +if (NOT ENABLE_ODBC) + message(STATUS "Not using ODBC") return() endif() @@ -20,9 +29,9 @@ set (SRCS_LTDL "${LIBRARY_DIR}/libltdl/loaders/preopen.c" ) -add_library (ltdl ${SRCS_LTDL}) +add_library (_ltdl ${SRCS_LTDL}) -target_include_directories(ltdl +target_include_directories(_ltdl SYSTEM PRIVATE linux_x86_64/libltdl @@ -30,8 +39,8 @@ target_include_directories(ltdl "${LIBRARY_DIR}/libltdl" "${LIBRARY_DIR}/libltdl/libltdl" ) -target_compile_definitions(ltdl PRIVATE -DHAVE_CONFIG_H -DLTDL -DLTDLOPEN=libltdlc) -target_compile_options(ltdl PRIVATE -Wno-constant-logical-operand -Wno-unknown-warning-option -O2) +target_compile_definitions(_ltdl PRIVATE -DHAVE_CONFIG_H -DLTDL -DLTDLOPEN=libltdlc) +target_compile_options(_ltdl PRIVATE -Wno-constant-logical-operand -Wno-unknown-warning-option -O2) # odbc @@ -270,13 +279,13 @@ set (SRCS "${LIBRARY_DIR}/odbcinst/SQLWritePrivateProfileString.c" ) -add_library (unixodbc ${SRCS}) +add_library (_unixodbc ${SRCS}) -target_link_libraries (unixodbc PRIVATE ltdl) +target_link_libraries (_unixodbc PRIVATE _ltdl) # SYSTEM_FILE_PATH was changed to /etc -target_include_directories (unixodbc +target_include_directories (_unixodbc SYSTEM PRIVATE linux_x86_64/private @@ -284,8 +293,8 @@ target_include_directories (unixodbc linux_x86_64 "${LIBRARY_DIR}/include" ) -target_compile_definitions (unixodbc PRIVATE -DHAVE_CONFIG_H) -target_compile_options (unixodbc +target_compile_definitions (_unixodbc PRIVATE -DHAVE_CONFIG_H) +target_compile_options (_unixodbc PRIVATE -Wno-dangling-else -Wno-parentheses @@ -294,4 +303,5 @@ target_compile_options (unixodbc -Wno-reserved-id-macro -O2 ) -target_compile_definitions (unixodbc INTERFACE USE_ODBC=1) + +add_library (ch_contrib::unixodbc ALIAS _unixodbc) diff --git a/contrib/wordnet-blast-cmake/CMakeLists.txt b/contrib/wordnet-blast-cmake/CMakeLists.txt index 37e4e9825ca..40712ecd2c5 100644 --- a/contrib/wordnet-blast-cmake/CMakeLists.txt +++ b/contrib/wordnet-blast-cmake/CMakeLists.txt @@ -6,8 +6,7 @@ set(SRCS "${LIBRARY_DIR}/wnb/core/wordnet.cc" ) -add_library(wnb ${SRCS}) - -target_link_libraries(wnb PRIVATE boost::headers_only boost::graph) - -target_include_directories(wnb SYSTEM PUBLIC "${LIBRARY_DIR}") +add_library(_wnb ${SRCS}) +target_link_libraries(_wnb PRIVATE boost::headers_only boost::graph) +target_include_directories(_wnb SYSTEM PUBLIC "${LIBRARY_DIR}") +add_library(ch_contrib::wnb ALIAS _wnb) diff --git a/contrib/xz-cmake/CMakeLists.txt b/contrib/xz-cmake/CMakeLists.txt index 5d70199413f..9d08adc9c7a 100644 --- a/contrib/xz-cmake/CMakeLists.txt +++ b/contrib/xz-cmake/CMakeLists.txt @@ -97,7 +97,7 @@ endif () find_package(Threads REQUIRED) -add_library(liblzma +add_library(_liblzma ${SRC_DIR}/src/common/mythread.h ${SRC_DIR}/src/common/sysdefs.h ${SRC_DIR}/src/common/tuklib_common.h @@ -241,7 +241,7 @@ add_library(liblzma ${SRC_DIR}/src/liblzma/simple/x86.c ) -target_include_directories(liblzma SYSTEM PUBLIC +target_include_directories(_liblzma SYSTEM PRIVATE ${SRC_DIR}/src/liblzma/api ${SRC_DIR}/src/liblzma/common ${SRC_DIR}/src/liblzma/check @@ -252,12 +252,15 @@ target_include_directories(liblzma SYSTEM PUBLIC ${SRC_DIR}/src/liblzma/simple ${SRC_DIR}/src/common ) +target_include_directories(_liblzma SYSTEM BEFORE PUBLIC ${SRC_DIR}/src/liblzma/api) -target_link_libraries(liblzma Threads::Threads) +target_link_libraries(_liblzma Threads::Threads) # Put the tuklib functions under the lzma_ namespace. -target_compile_definitions(liblzma PRIVATE TUKLIB_SYMBOL_PREFIX=lzma_) +target_compile_definitions(_liblzma PRIVATE TUKLIB_SYMBOL_PREFIX=lzma_) if (ENABLE_SSE2) - target_compile_definitions(liblzma PRIVATE HAVE_IMMINTRIN_H HAVE__MM_MOVEMASK_EPI8) + target_compile_definitions(_liblzma PRIVATE HAVE_IMMINTRIN_H HAVE__MM_MOVEMASK_EPI8) endif() + +add_library(ch_contrib::xz ALIAS _liblzma) diff --git a/contrib/yaml-cpp-cmake/CMakeLists.txt b/contrib/yaml-cpp-cmake/CMakeLists.txt index ed0287de110..00e85f90932 100644 --- a/contrib/yaml-cpp-cmake/CMakeLists.txt +++ b/contrib/yaml-cpp-cmake/CMakeLists.txt @@ -1,39 +1,47 @@ +option(ENABLE_YAML_CPP "Enable yaml-cpp" ${ENABLE_LIBRARIES}) + +if (NOT ENABLE_YAML_CPP) + message(STATUS "Not using yaml") + return() +endif() + set (LIBRARY_DIR ${ClickHouse_SOURCE_DIR}/contrib/yaml-cpp) set (SRCS - ${LIBRARY_DIR}/src/binary.cpp - ${LIBRARY_DIR}/src/emitterutils.cpp - ${LIBRARY_DIR}/src/null.cpp - ${LIBRARY_DIR}/src/scantoken.cpp - ${LIBRARY_DIR}/src/convert.cpp - ${LIBRARY_DIR}/src/exceptions.cpp - ${LIBRARY_DIR}/src/ostream_wrapper.cpp - ${LIBRARY_DIR}/src/simplekey.cpp - ${LIBRARY_DIR}/src/depthguard.cpp - ${LIBRARY_DIR}/src/exp.cpp - ${LIBRARY_DIR}/src/parse.cpp - ${LIBRARY_DIR}/src/singledocparser.cpp - ${LIBRARY_DIR}/src/directives.cpp - ${LIBRARY_DIR}/src/memory.cpp - ${LIBRARY_DIR}/src/parser.cpp - ${LIBRARY_DIR}/src/stream.cpp - ${LIBRARY_DIR}/src/emit.cpp - ${LIBRARY_DIR}/src/nodebuilder.cpp - ${LIBRARY_DIR}/src/regex_yaml.cpp - ${LIBRARY_DIR}/src/tag.cpp - ${LIBRARY_DIR}/src/emitfromevents.cpp - ${LIBRARY_DIR}/src/node.cpp - ${LIBRARY_DIR}/src/scanner.cpp - ${LIBRARY_DIR}/src/emitter.cpp - ${LIBRARY_DIR}/src/node_data.cpp - ${LIBRARY_DIR}/src/scanscalar.cpp - ${LIBRARY_DIR}/src/emitterstate.cpp - ${LIBRARY_DIR}/src/nodeevents.cpp - ${LIBRARY_DIR}/src/scantag.cpp + ${LIBRARY_DIR}/src/binary.cpp + ${LIBRARY_DIR}/src/emitterutils.cpp + ${LIBRARY_DIR}/src/null.cpp + ${LIBRARY_DIR}/src/scantoken.cpp + ${LIBRARY_DIR}/src/convert.cpp + ${LIBRARY_DIR}/src/exceptions.cpp + ${LIBRARY_DIR}/src/ostream_wrapper.cpp + ${LIBRARY_DIR}/src/simplekey.cpp + ${LIBRARY_DIR}/src/depthguard.cpp + ${LIBRARY_DIR}/src/exp.cpp + ${LIBRARY_DIR}/src/parse.cpp + ${LIBRARY_DIR}/src/singledocparser.cpp + ${LIBRARY_DIR}/src/directives.cpp + ${LIBRARY_DIR}/src/memory.cpp + ${LIBRARY_DIR}/src/parser.cpp + ${LIBRARY_DIR}/src/stream.cpp + ${LIBRARY_DIR}/src/emit.cpp + ${LIBRARY_DIR}/src/nodebuilder.cpp + ${LIBRARY_DIR}/src/regex_yaml.cpp + ${LIBRARY_DIR}/src/tag.cpp + ${LIBRARY_DIR}/src/emitfromevents.cpp + ${LIBRARY_DIR}/src/node.cpp + ${LIBRARY_DIR}/src/scanner.cpp + ${LIBRARY_DIR}/src/emitter.cpp + ${LIBRARY_DIR}/src/node_data.cpp + ${LIBRARY_DIR}/src/scanscalar.cpp + ${LIBRARY_DIR}/src/emitterstate.cpp + ${LIBRARY_DIR}/src/nodeevents.cpp + ${LIBRARY_DIR}/src/scantag.cpp ) -add_library (yaml-cpp ${SRCS}) +add_library (_yaml_cpp ${SRCS}) +target_include_directories(_yaml_cpp PRIVATE ${LIBRARY_DIR}/include/yaml-cpp) +target_include_directories(_yaml_cpp SYSTEM BEFORE PUBLIC ${LIBRARY_DIR}/include) -target_include_directories(yaml-cpp PRIVATE ${LIBRARY_DIR}/include/yaml-cpp) -target_include_directories(yaml-cpp SYSTEM BEFORE PUBLIC ${LIBRARY_DIR}/include) +add_library (ch_contrib::yaml_cpp ALIAS _yaml_cpp) diff --git a/contrib/zlib-ng-cmake/CMakeLists.txt b/contrib/zlib-ng-cmake/CMakeLists.txt index bf5bc0d7f1c..371a07dd31a 100644 --- a/contrib/zlib-ng-cmake/CMakeLists.txt +++ b/contrib/zlib-ng-cmake/CMakeLists.txt @@ -130,8 +130,8 @@ set(ZLIB_SRCS set(ZLIB_ALL_SRCS ${ZLIB_SRCS} ${ZLIB_ARCH_SRCS}) -add_library(zlib ${ZLIB_ALL_SRCS}) -add_library(zlibstatic ALIAS zlib) +add_library(_zlib ${ZLIB_ALL_SRCS}) +add_library(ch_contrib::zlib ALIAS _zlib) # https://github.com/zlib-ng/zlib-ng/pull/733 # This is disabed by default @@ -153,9 +153,9 @@ configure_file(${SOURCE_DIR}/zlib.pc.cmakein ${ZLIB_PC} @ONLY) configure_file(${CMAKE_CURRENT_BINARY_DIR}/zconf.h.cmakein ${CMAKE_CURRENT_BINARY_DIR}/zconf.h @ONLY) # We should use same defines when including zlib.h as used when zlib compiled -target_compile_definitions (zlib PUBLIC ZLIB_COMPAT WITH_GZFILEOP) +target_compile_definitions (_zlib PUBLIC ZLIB_COMPAT WITH_GZFILEOP) if (ARCH_AMD64 OR ARCH_AARCH64) - target_compile_definitions (zlib PUBLIC X86_64 UNALIGNED_OK) + target_compile_definitions (_zlib PUBLIC X86_64 UNALIGNED_OK) endif () -target_include_directories(zlib SYSTEM PUBLIC ${SOURCE_DIR} ${CMAKE_CURRENT_BINARY_DIR}) +target_include_directories(_zlib SYSTEM BEFORE PUBLIC ${SOURCE_DIR} ${CMAKE_CURRENT_BINARY_DIR}) diff --git a/contrib/zstd-cmake/CMakeLists.txt b/contrib/zstd-cmake/CMakeLists.txt index 226ee1a8067..4949c3f30d5 100644 --- a/contrib/zstd-cmake/CMakeLists.txt +++ b/contrib/zstd-cmake/CMakeLists.txt @@ -148,7 +148,7 @@ IF (ZSTD_LEGACY_SUPPORT) "${LIBRARY_LEGACY_DIR}/zstd_v07.h") ENDIF (ZSTD_LEGACY_SUPPORT) -ADD_LIBRARY(zstd ${Sources} ${Headers}) - -target_include_directories (zstd PUBLIC ${LIBRARY_DIR}) -target_compile_options(zstd PRIVATE -fno-sanitize=undefined) +add_library(_zstd ${Sources} ${Headers}) +add_library(ch_contrib::zstd ALIAS _zstd) +target_include_directories(_zstd BEFORE PUBLIC ${LIBRARY_DIR}) +target_compile_options(_zstd PRIVATE -fno-sanitize=undefined) diff --git a/docker/packager/other/fuzzer.sh b/docker/packager/other/fuzzer.sh index 431352f1126..ac820d9e689 100755 --- a/docker/packager/other/fuzzer.sh +++ b/docker/packager/other/fuzzer.sh @@ -14,7 +14,7 @@ read -ra CMAKE_FLAGS <<< "${CMAKE_FLAGS:-}" # Hope, that the most part of files will be in cache, so we just link new executables # Please, add or change flags directly in cmake cmake --debug-trycompile --verbose=1 -DCMAKE_VERBOSE_MAKEFILE=1 -LA -DCMAKE_C_COMPILER="$CC" -DCMAKE_CXX_COMPILER="$CXX" \ - -DSANITIZE="$SANITIZER" -DENABLE_FUZZING=1 -DFUZZER='libfuzzer' -DENABLE_PROTOBUF=1 -DUSE_INTERNAL_PROTOBUF_LIBRARY=1 "${CMAKE_FLAGS[@]}" .. + -DSANITIZE="$SANITIZER" -DENABLE_FUZZING=1 -DFUZZER='libfuzzer' -DENABLE_PROTOBUF=1 "${CMAKE_FLAGS[@]}" .. FUZZER_TARGETS=$(find ../src -name '*_fuzzer.cpp' -execdir basename {} .cpp ';' | tr '\n' ' ') diff --git a/docker/packager/packager b/docker/packager/packager index 4e3e26d215f..05b2e02df96 100755 --- a/docker/packager/packager +++ b/docker/packager/packager @@ -156,7 +156,6 @@ def parse_env_variables(build_type, compiler, sanitizer, package_type, image_typ result.append('ENABLE_TESTS=1') result.append('BINARY_OUTPUT=tests') cmake_flags.append('-DENABLE_TESTS=1') - cmake_flags.append('-DUSE_GTEST=1') if split_binary: cmake_flags.append('-DUSE_STATIC_LIBRARIES=0 -DSPLIT_SHARED_LIBRARIES=1 -DCLICKHOUSE_SPLIT_BINARY=1') @@ -168,7 +167,6 @@ def parse_env_variables(build_type, compiler, sanitizer, package_type, image_typ if clang_tidy: cmake_flags.append('-DENABLE_CLANG_TIDY=1') cmake_flags.append('-DENABLE_UTILS=1') - cmake_flags.append('-DUSE_GTEST=1') cmake_flags.append('-DENABLE_TESTS=1') cmake_flags.append('-DENABLE_EXAMPLES=1') # Don't stop on first error to find more clang-tidy errors in one run. diff --git a/docs/en/development/build-cross-riscv.md b/docs/en/development/build-cross-riscv.md index 977387af207..5cdce710b41 100644 --- a/docs/en/development/build-cross-riscv.md +++ b/docs/en/development/build-cross-riscv.md @@ -23,7 +23,7 @@ sudo bash -c "$(wget -O - https://apt.llvm.org/llvm.sh)" ``` bash cd ClickHouse mkdir build-riscv64 -CC=clang-13 CXX=clang++-13 cmake . -Bbuild-riscv64 -G Ninja -DCMAKE_TOOLCHAIN_FILE=cmake/linux/toolchain-riscv64.cmake -DGLIBC_COMPATIBILITY=OFF -DENABLE_LDAP=OFF -DOPENSSL_NO_ASM=ON -DENABLE_JEMALLOC=ON -DENABLE_PARQUET=OFF -DUSE_INTERNAL_PARQUET_LIBRARY=OFF -DENABLE_ORC=OFF -DUSE_INTERNAL_ORC_LIBRARY=OFF -DUSE_UNWIND=OFF -DUSE_INTERNAL_PROTOBUF_LIBRARY=ON -DENABLE_GRPC=OFF -DUSE_INTERNAL_GRPC_LIBRARY=OFF -DENABLE_HDFS=OFF -DUSE_INTERNAL_HDFS3_LIBRARY=OFF -DENABLE_MYSQL=OFF -DUSE_INTERNAL_MYSQL_LIBRARY=OFF +CC=clang-13 CXX=clang++-13 cmake . -Bbuild-riscv64 -G Ninja -DCMAKE_TOOLCHAIN_FILE=cmake/linux/toolchain-riscv64.cmake -DGLIBC_COMPATIBILITY=OFF -DENABLE_LDAP=OFF -DOPENSSL_NO_ASM=ON -DENABLE_JEMALLOC=ON -DENABLE_PARQUET=OFF -DENABLE_ORC=OFF -DUSE_UNWIND=OFF -DENABLE_GRPC=OFF -DENABLE_HDFS=OFF -DENABLE_MYSQL=OFF ninja -C build-riscv64 ``` diff --git a/docs/en/introduction/adopters.md b/docs/en/introduction/adopters.md index b67e373be35..5efa1b971bc 100644 --- a/docs/en/introduction/adopters.md +++ b/docs/en/introduction/adopters.md @@ -105,10 +105,13 @@ toc_title: Adopters | MindsDB | Machine Learning | Main Product | — | — | [Official Website](https://www.mindsdb.com/blog/machine-learning-models-as-tables-in-ch) | | MUX | Online Video | Video Analytics | — | — | [Talk in English, August 2019](https://altinity.com/presentations/2019/8/13/how-clickhouse-became-the-default-analytics-database-for-mux/) | | MGID | Ad network | Web-analytics | — | — | [Blog post in Russian, April 2020](http://gs-studio.com/news-about-it/32777----clickhouse---c) | +| Muse Group | Music Software | Performance Monitoring | — | — | [Blog post in Russian, January 2021](https://habr.com/en/post/647079/) | | Netskope | Network Security | — | — | — | [Job advertisement, March 2021](https://www.mendeley.com/careers/job/senior-software-developer-backend-developer-1346348) | | NIC Labs | Network Monitoring | RaTA-DNS | — | — | [Blog post, March 2021](https://niclabs.cl/ratadns/2021/03/Clickhouse) | +| NLMK | Steel | Monitoring | — | — | [Article in Russian, Jan 2022](https://habr.com/en/company/nlmk/blog/645943/) | | NOC Project | Network Monitoring | Analytics | Main Product | — | [Official Website](https://getnoc.com/features/big-data/) | | Noction | Network Technology | Main Product | — | — | [Official Website](https://www.noction.com/news/irp-3-11-remote-triggered-blackholing-capability) +| ntop | Network Monitoning | Monitoring | — | — | [Official website, Jan 2022](https://www.ntop.org/ntop/historical-traffic-analysis-at-scale-using-clickhouse-with-ntopng/) | | Nuna Inc. | Health Data Analytics | — | — | — | [Talk in English, July 2020](https://youtu.be/GMiXCMFDMow?t=170) | | Ok.ru | Social Network | — | 72 servers | 810 TB compressed, 50bn rows/day, 1.5 TB/day | [SmartData conference, October 2021](https://assets.ctfassets.net/oxjq45e8ilak/4JPHkbJenLgZhBGGyyonFP/57472ec6987003ec4078d0941740703b/____________________ClickHouse_______________________.pdf) | | Omnicomm | Transportation Monitoring | — | — | — | [Facebook post, October 2021](https://www.facebook.com/OmnicommTeam/posts/2824479777774500) | @@ -190,5 +193,6 @@ toc_title: Adopters | Цифровой Рабочий | Industrial IoT, Analytics | — | — | — | [Blog post in Russian, March 2021](https://habr.com/en/company/croc/blog/548018/) | | ООО «МПЗ Богородский» | Agriculture | — | — | — | [Article in Russian, November 2020](https://cloud.yandex.ru/cases/okraina) | | ДомКлик | Real Estate | — | — | — | [Article in Russian, October 2021](https://habr.com/ru/company/domclick/blog/585936/) | +| АС "Стрела" | Transportation | — | — | — | [Job posting, Jan 2022](https://vk.com/topic-111905078_35689124?post=3553) | [Original article](https://clickhouse.com/docs/en/introduction/adopters/) diff --git a/docs/en/operations/external-authenticators/kerberos.md b/docs/en/operations/external-authenticators/kerberos.md index f326762a610..da84c1f6a89 100644 --- a/docs/en/operations/external-authenticators/kerberos.md +++ b/docs/en/operations/external-authenticators/kerberos.md @@ -51,6 +51,9 @@ With filtering by realm: ``` +!!! warning "Note" + You can define only one `kerberos` section. The presence of multiple `kerberos` sections will force ClickHouse to disable Kerberos authentication. + !!! warning "Note" `principal` and `realm` sections cannot be specified at the same time. The presence of both `principal` and `realm` sections will force ClickHouse to disable Kerberos authentication. diff --git a/docs/en/operations/tips.md b/docs/en/operations/tips.md index 477d3b52965..64e65575f3f 100644 --- a/docs/en/operations/tips.md +++ b/docs/en/operations/tips.md @@ -129,6 +129,10 @@ If you want to divide an existing ZooKeeper cluster into two, the correct way is Do not run ZooKeeper on the same servers as ClickHouse. Because ZooKeeper is very sensitive for latency and ClickHouse may utilize all available system resources. +You can have ZooKeeper observers in an ensemble but ClickHouse servers should not interact with observers. + +Do not change `minSessionTimeout` setting, large values may affect ClickHouse restart stability. + With the default settings, ZooKeeper is a time bomb: > The ZooKeeper server won’t delete files from old snapshots and logs when using the default configuration (see autopurge), and this is the responsibility of the operator. diff --git a/docs/en/sql-reference/aggregate-functions/reference/meanztest.md b/docs/en/sql-reference/aggregate-functions/reference/meanztest.md new file mode 100644 index 00000000000..7d016f42819 --- /dev/null +++ b/docs/en/sql-reference/aggregate-functions/reference/meanztest.md @@ -0,0 +1,70 @@ +--- +toc_priority: 303 +toc_title: meanZTest +--- + +# meanZTest {#meanztest} + +Applies mean z-test to samples from two populations. + +**Syntax** + +``` sql +meanZTest(population_variance_x, population_variance_y, confidence_level)(sample_data, sample_index) +``` + +Values of both samples are in the `sample_data` column. If `sample_index` equals to 0 then the value in that row belongs to the sample from the first population. Otherwise it belongs to the sample from the second population. +The null hypothesis is that means of populations are equal. Normal distribution is assumed. Populations may have unequal variance and the variances are known. + +**Arguments** + +- `sample_data` — Sample data. [Integer](../../../sql-reference/data-types/int-uint.md), [Float](../../../sql-reference/data-types/float.md) or [Decimal](../../../sql-reference/data-types/decimal.md). +- `sample_index` — Sample index. [Integer](../../../sql-reference/data-types/int-uint.md). + +**Parameters** + +- `population_variance_x` — Variance for population x. [Float](../../../sql-reference/data-types/float.md). +- `population_variance_y` — Variance for population y. [Float](../../../sql-reference/data-types/float.md). +- `confidence_level` — Confidence level in order to calculate confidence intervals. [Float](../../../sql-reference/data-types/float.md). + +**Returned values** + +[Tuple](../../../sql-reference/data-types/tuple.md) with four elements: + +- calculated t-statistic. [Float64](../../../sql-reference/data-types/float.md). +- calculated p-value. [Float64](../../../sql-reference/data-types/float.md). +- calculated confidence-interval-low. [Float64](../../../sql-reference/data-types/float.md). +- calculated confidence-interval-high. [Float64](../../../sql-reference/data-types/float.md). + + +**Example** + +Input table: + +``` text +┌─sample_data─┬─sample_index─┐ +│ 20.3 │ 0 │ +│ 21.9 │ 0 │ +│ 22.1 │ 0 │ +│ 18.9 │ 1 │ +│ 19 │ 1 │ +│ 20.3 │ 1 │ +└─────────────┴──────────────┘ +``` + +Query: + +``` sql +SELECT meanZTest(0.7, 0.45, 0.95)(sample_data, sample_index) FROM mean_ztest +``` + +Result: + +``` text +┌─meanZTest(0.7, 0.45, 0.95)(sample_data, sample_index)────────────────────────────┐ +│ (3.2841296025548123,0.0010229786769086013,0.8198428246768334,3.2468238419898365) │ +└──────────────────────────────────────────────────────────────────────────────────┘ +``` + + +[Original article](https://clickhouse.com/docs/en/sql-reference/aggregate-functions/reference/meanZTest/) diff --git a/docs/en/sql-reference/dictionaries/external-dictionaries/external-dicts-dict-structure.md b/docs/en/sql-reference/dictionaries/external-dictionaries/external-dicts-dict-structure.md index bee77a382d7..5d120e53a04 100644 --- a/docs/en/sql-reference/dictionaries/external-dictionaries/external-dicts-dict-structure.md +++ b/docs/en/sql-reference/dictionaries/external-dictionaries/external-dicts-dict-structure.md @@ -159,7 +159,8 @@ Configuration fields: | Tag | Description | Required | |------------------------------------------------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|----------| | `name` | Column name. | Yes | -| `type` | ClickHouse data type: [UInt8](../../../sql-reference/data-types/int-uint.md), [UInt16](../../../sql-reference/data-types/int-uint.md), [UInt32](../../../sql-reference/data-types/int-uint.md), [UInt64](../../../sql-reference/data-types/int-uint.md), [Int8](../../../sql-reference/data-types/int-uint.md), [Int16](../../../sql-reference/data-types/int-uint.md), [Int32](../../../sql-reference/data-types/int-uint.md), [Int64](../../../sql-reference/data-types/int-uint.md), [Float32](../../../sql-reference/data-types/float.md), [Float64](../../../sql-reference/data-types/float.md), [UUID](../../../sql-reference/data-types/uuid.md), [Decimal32](../../../sql-reference/data-types/decimal.md), [Decimal64](../../../sql-reference/data-types/decimal.md), [Decimal128](../../../sql-reference/data-types/decimal.md), [Decimal256](../../../sql-reference/data-types/decimal.md), [String](../../../sql-reference/data-types/string.md), [Array](../../../sql-reference/data-types/array.md).
ClickHouse tries to cast value from dictionary to the specified data type. For example, for MySQL, the field might be `TEXT`, `VARCHAR`, or `BLOB` in the MySQL source table, but it can be uploaded as `String` in ClickHouse.
[Nullable](../../../sql-reference/data-types/nullable.md) is currently supported for [Flat](external-dicts-dict-layout.md#flat), [Hashed](external-dicts-dict-layout.md#dicts-external_dicts_dict_layout-hashed), [ComplexKeyHashed](external-dicts-dict-layout.md#complex-key-hashed), [Direct](external-dicts-dict-layout.md#direct), [ComplexKeyDirect](external-dicts-dict-layout.md#complex-key-direct), [RangeHashed](external-dicts-dict-layout.md#range-hashed), [Polygon](external-dicts-dict-polygon.md), [Cache](external-dicts-dict-layout.md#cache), [ComplexKeyCache](external-dicts-dict-layout.md#complex-key-cache), [SSDCache](external-dicts-dict-layout.md#ssd-cache), [SSDComplexKeyCache](external-dicts-dict-layout.md#complex-key-ssd-cache) dictionaries. In [IPTrie](external-dicts-dict-layout.md#ip-trie) dictionaries `Nullable` types are not supported. | Yes | +| `type` | ClickHouse data type: [UInt8](../../../sql-reference/data-types/int-uint.md), [UInt16](../../../sql-reference/data-types/int-uint.md), [UInt32](../../../sql-reference/data-types/int-uint.md), [UInt64](../../../sql-reference/data-types/int-uint.md), [Int8](../../../sql-reference/data-types/int-uint.md), [Int16](../../../sql-reference/data-types/int-uint.md), [Int32](../../../sql-reference/data-types/int-uint.md), [Int64](../../../sql-reference/data-types/int-uint.md), [Float32](../../../sql-reference/data-types/float.md), [Float64](../../../sql-reference/data-types/float.md), [UUID](../../../sql-reference/data-types/uuid.md), [Decimal32](../../../sql-reference/data-types/decimal.md), [Decimal64](../../../sql-reference/data-types/decimal.md), [Decimal128](../../../sql-reference/data-types/decimal.md), [Decimal256](../../../sql-reference/data-types/decimal.md), +[Date](../../../sql-reference/data-types/date.md), [Date32](../../../sql-reference/data-types/date32.md), [DateTime](../../../sql-reference/data-types/datetime.md), [DateTime64](../../../sql-reference/data-types/datetime64.md), [String](../../../sql-reference/data-types/string.md), [Array](../../../sql-reference/data-types/array.md).
ClickHouse tries to cast value from dictionary to the specified data type. For example, for MySQL, the field might be `TEXT`, `VARCHAR`, or `BLOB` in the MySQL source table, but it can be uploaded as `String` in ClickHouse.
[Nullable](../../../sql-reference/data-types/nullable.md) is currently supported for [Flat](external-dicts-dict-layout.md#flat), [Hashed](external-dicts-dict-layout.md#dicts-external_dicts_dict_layout-hashed), [ComplexKeyHashed](external-dicts-dict-layout.md#complex-key-hashed), [Direct](external-dicts-dict-layout.md#direct), [ComplexKeyDirect](external-dicts-dict-layout.md#complex-key-direct), [RangeHashed](external-dicts-dict-layout.md#range-hashed), [Polygon](external-dicts-dict-polygon.md), [Cache](external-dicts-dict-layout.md#cache), [ComplexKeyCache](external-dicts-dict-layout.md#complex-key-cache), [SSDCache](external-dicts-dict-layout.md#ssd-cache), [SSDComplexKeyCache](external-dicts-dict-layout.md#complex-key-ssd-cache) dictionaries. In [IPTrie](external-dicts-dict-layout.md#ip-trie) dictionaries `Nullable` types are not supported. | Yes | | `null_value` | Default value for a non-existing element.
In the example, it is an empty string. [NULL](../../syntax.md#null-literal) value can be used only for the `Nullable` types (see the previous line with types description). | Yes | | `expression` | [Expression](../../../sql-reference/syntax.md#syntax-expressions) that ClickHouse executes on the value.
The expression can be a column name in the remote SQL database. Thus, you can use it to create an alias for the remote column.

Default value: no expression. | No | | `hierarchical` | If `true`, the attribute contains the value of a parent key for the current key. See [Hierarchical Dictionaries](../../../sql-reference/dictionaries/external-dictionaries/external-dicts-dict-hierarchical.md).

Default value: `false`. | No | diff --git a/docs/en/sql-reference/functions/bit-functions.md b/docs/en/sql-reference/functions/bit-functions.md index 899b6385a3e..24adb362c98 100644 --- a/docs/en/sql-reference/functions/bit-functions.md +++ b/docs/en/sql-reference/functions/bit-functions.md @@ -117,6 +117,59 @@ Result: ## bitRotateRight(a, b) {#bitrotaterighta-b} +## bitSlice(s, offset, length) + +Returns a substring starting with the bit from the ‘offset’ index that is ‘length’ bits long. bits indexing starts from +1 + +**Syntax** + +``` sql +bitSlice(s, offset[, length]) +``` + +**Arguments** + +- `s` — s is [String](../../sql-reference/data-types/string.md) + or [FixedString](../../sql-reference/data-types/fixedstring.md). +- `offset` — The start index with bit, A positive value indicates an offset on the left, and a negative value is an + indent on the right. Numbering of the bits begins with 1. +- `length` — The length of substring with bit. If you specify a negative value, the function returns an open substring [ + offset, array_length - length). If you omit the value, the function returns the substring [offset, the_end_string]. + If length exceeds s, it will be truncate.If length isn't multiple of 8, will fill 0 on the right. + +**Returned value** + +- The substring. [String](../../sql-reference/data-types/string.md) + +**Example** + +Query: + +``` sql +select bin('Hello'), bin(bitSlice('Hello', 1, 8)) +select bin('Hello'), bin(bitSlice('Hello', 1, 2)) +select bin('Hello'), bin(bitSlice('Hello', 1, 9)) +select bin('Hello'), bin(bitSlice('Hello', -4, 8)) +``` + +Result: + +``` text +┌─bin('Hello')─────────────────────────────┬─bin(bitSlice('Hello', 1, 8))─┐ +│ 0100100001100101011011000110110001101111 │ 01001000 │ +└──────────────────────────────────────────┴──────────────────────────────┘ +┌─bin('Hello')─────────────────────────────┬─bin(bitSlice('Hello', 1, 2))─┐ +│ 0100100001100101011011000110110001101111 │ 01000000 │ +└──────────────────────────────────────────┴──────────────────────────────┘ +┌─bin('Hello')─────────────────────────────┬─bin(bitSlice('Hello', 1, 9))─┐ +│ 0100100001100101011011000110110001101111 │ 0100100000000000 │ +└──────────────────────────────────────────┴──────────────────────────────┘ +┌─bin('Hello')─────────────────────────────┬─bin(bitSlice('Hello', -4, 8))─┐ +│ 0100100001100101011011000110110001101111 │ 11110000 │ +└──────────────────────────────────────────┴───────────────────────────────┘ +``` + ## bitTest {#bittest} Takes any integer and converts it into [binary form](https://en.wikipedia.org/wiki/Binary_number), returns the value of a bit at specified position. The countdown starts from 0 from the right to the left. diff --git a/docs/en/sql-reference/functions/encoding-functions.md b/docs/en/sql-reference/functions/encoding-functions.md index 69dd14da1bf..ec1524f1fa3 100644 --- a/docs/en/sql-reference/functions/encoding-functions.md +++ b/docs/en/sql-reference/functions/encoding-functions.md @@ -93,6 +93,8 @@ For [String](../../sql-reference/data-types/string.md) and [FixedString](../../s Values of [Float](../../sql-reference/data-types/float.md) and [Decimal](../../sql-reference/data-types/decimal.md) types are encoded as their representation in memory. As we support little-endian architecture, they are encoded in little-endian. Zero leading/trailing bytes are not omitted. +Values of [UUID](../data-types/uuid.md) type are encoded as big-endian order string. + **Arguments** - `arg` — A value to convert to hexadecimal. Types: [String](../../sql-reference/data-types/string.md), [UInt](../../sql-reference/data-types/int-uint.md), [Float](../../sql-reference/data-types/float.md), [Decimal](../../sql-reference/data-types/decimal.md), [Date](../../sql-reference/data-types/date.md) or [DateTime](../../sql-reference/data-types/datetime.md). @@ -147,6 +149,21 @@ Result: └──────────────────┘ ``` +Query: + +``` sql +SELECT lower(hex(toUUID('61f0c404-5cb3-11e7-907b-a6006ad3dba0'))) as uuid_hex +``` + +Result: + +``` text +┌─uuid_hex─────────────────────────┐ +│ 61f0c4045cb311e7907ba6006ad3dba0 │ +└──────────────────────────────────┘ +``` + + ## unhex {#unhexstr} Performs the opposite operation of [hex](#hex). It interprets each pair of hexadecimal digits (in the argument) as a number and converts it to the byte represented by the number. The return value is a binary string (BLOB). @@ -224,6 +241,8 @@ For [String](../../sql-reference/data-types/string.md) and [FixedString](../../s Values of [Float](../../sql-reference/data-types/float.md) and [Decimal](../../sql-reference/data-types/decimal.md) types are encoded as their representation in memory. As we support little-endian architecture, they are encoded in little-endian. Zero leading/trailing bytes are not omitted. +Values of [UUID](../data-types/uuid.md) type are encoded as big-endian order string. + **Arguments** - `arg` — A value to convert to binary. [String](../../sql-reference/data-types/string.md), [FixedString](../../sql-reference/data-types/fixedstring.md), [UInt](../../sql-reference/data-types/int-uint.md), [Float](../../sql-reference/data-types/float.md), [Decimal](../../sql-reference/data-types/decimal.md), [Date](../../sql-reference/data-types/date.md), or [DateTime](../../sql-reference/data-types/datetime.md). @@ -280,6 +299,21 @@ Result: └──────────────────────────────────────────────────────────────────┘ ``` +Query: + +``` sql +SELECT bin(toUUID('61f0c404-5cb3-11e7-907b-a6006ad3dba0')) as bin_uuid +``` + +Result: + +``` text +┌─bin_uuid─────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────┐ +│ 01100001111100001100010000000100010111001011001100010001111001111001000001111011101001100000000001101010110100111101101110100000 │ +└──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────┘ +``` + + ## unbin {#unbinstr} Interprets each pair of binary digits (in the argument) as a number and converts it to the byte represented by the number. The functions performs the opposite operation to [bin](#bin). diff --git a/docs/en/sql-reference/functions/geo/h3.md b/docs/en/sql-reference/functions/geo/h3.md index fcd78c4452f..ecbe00adfd7 100644 --- a/docs/en/sql-reference/functions/geo/h3.md +++ b/docs/en/sql-reference/functions/geo/h3.md @@ -156,6 +156,40 @@ Result: └─────────────┘ ``` +## h3EdgeLengthKm {#h3edgelengthkm} + +Calculates the average length of the [H3](#h3index) hexagon edge in kilometers. + +**Syntax** + +``` sql +h3EdgeLengthKm(resolution) +``` + +**Parameter** + +- `resolution` — Index resolution. Type: [UInt8](../../../sql-reference/data-types/int-uint.md). Range: `[0, 15]`. + +**Returned values** + +- The average length of the [H3](#h3index) hexagon edge in kilometers. Type: [Float64](../../../sql-reference/data-types/float.md). + +**Example** + +Query: + +``` sql +SELECT h3EdgeLengthKm(15) AS edgeLengthKm; +``` + +Result: + +``` text +┌─edgeLengthKm─┐ +│ 0.000509713 │ +└──────────────┘ +``` + ## geoToH3 {#geotoh3} Returns [H3](#h3index) point index `(lon, lat)` with specified resolution. @@ -197,7 +231,7 @@ Result: ## h3ToGeo {#h3togeo} -Returns the geographical coordinates of longitude and latitude corresponding to the provided [H3](#h3index) index. +Returns the centroid longitude and latitude corresponding to the provided [H3](#h3index) index. **Syntax** @@ -278,7 +312,7 @@ h3kRing(h3index, k) **Arguments** - `h3index` — Hexagon index number. Type: [UInt64](../../../sql-reference/data-types/int-uint.md). -- `k` — Raduis. Type: [integer](../../../sql-reference/data-types/int-uint.md) +- `k` — Radius. Type: [integer](../../../sql-reference/data-types/int-uint.md) **Returned values** @@ -812,4 +846,184 @@ Result: └─────────────────────┘ ``` +## h3ToCenterChild {#h3tocenterchild} + +Returns the center child (finer) [H3](#h3index) index contained by given [H3](#h3index) at the given resolution. + +**Syntax** + +``` sql +h3ToCenterChild(index, resolution) +``` + +**Parameter** + +- `index` — Hexagon index number. Type: [UInt64](../../../sql-reference/data-types/int-uint.md). +- `resolution` — Index resolution. Range: `[0, 15]`. Type: [UInt8](../../../sql-reference/data-types/int-uint.md). + +**Returned values** + +- [H3](#h3index) index of the center child contained by given [H3](#h3index) at the given resolution. + +Type: [UInt64](../../../sql-reference/data-types/int-uint.md). + +**Example** + +Query: + +``` sql +SELECT h3ToCenterChild(577023702256844799,1) AS centerToChild; +``` + +Result: + +``` text +┌──────centerToChild─┐ +│ 581496515558637567 │ +└────────────────────┘ +``` + +## h3ExactEdgeLengthM {#h3exactedgelengthm} + +Returns the exact edge length of the unidirectional edge represented by the input h3 index in meters. + +**Syntax** + +``` sql +h3ExactEdgeLengthM(index) +``` + +**Parameter** + +- `index` — Hexagon index number. Type: [UInt64](../../../sql-reference/data-types/int-uint.md). + +**Returned value** + +- Exact edge length in meters. + +Type: [Float64](../../../sql-reference/data-types/float.md). + +**Example** + +Query: + +``` sql +SELECT h3ExactEdgeLengthM(1310277011704381439) AS exactEdgeLengthM;; +``` + +Result: + +``` text +┌───exactEdgeLengthM─┐ +│ 195449.63163407316 │ +└────────────────────┘ +``` + +## h3ExactEdgeLengthKm {#h3exactedgelengthkm} + +Returns the exact edge length of the unidirectional edge represented by the input h3 index in kilometers. + +**Syntax** + +``` sql +h3ExactEdgeLengthKm(index) +``` + +**Parameter** + +- `index` — Hexagon index number. Type: [UInt64](../../../sql-reference/data-types/int-uint.md). + +**Returned value** + +- Exact edge length in kilometers. + +Type: [Float64](../../../sql-reference/data-types/float.md). + +**Example** + +Query: + +``` sql +SELECT h3ExactEdgeLengthKm(1310277011704381439) AS exactEdgeLengthKm;; +``` + +Result: + +``` text +┌──exactEdgeLengthKm─┐ +│ 195.44963163407317 │ +└────────────────────┘ +``` + +## h3ExactEdgeLengthRads {#h3exactedgelengthrads} + +Returns the exact edge length of the unidirectional edge represented by the input h3 index in radians. + +**Syntax** + +``` sql +h3ExactEdgeLengthRads(index) +``` + +**Parameter** + +- `index` — Hexagon index number. Type: [UInt64](../../../sql-reference/data-types/int-uint.md). + +**Returned value** + +- Exact edge length in radians. + +Type: [Float64](../../../sql-reference/data-types/float.md). + +**Example** + +Query: + +``` sql +SELECT h3ExactEdgeLengthRads(1310277011704381439) AS exactEdgeLengthRads;; +``` + +Result: + +``` text +┌──exactEdgeLengthRads─┐ +│ 0.030677980118976447 │ +└──────────────────────┘ +``` + +## h3NumHexagons {#h3numhexagons} + +Returns the number of unique H3 indices at the given resolution. + +**Syntax** + +``` sql +h3NumHexagons(resolution) +``` + +**Parameter** + +- `resolution` — Index resolution. Range: `[0, 15]`. Type: [UInt8](../../../sql-reference/data-types/int-uint.md). + +**Returned value** + +- Number of H3 indices. + +Type: [Int64](../../../sql-reference/data-types/int-uint.md). + +**Example** + +Query: + +``` sql +SELECT h3NumHexagons(3) AS numHexagons; +``` + +Result: + +``` text +┌─numHexagons─┐ +│ 41162 │ +└─────────────┘ +``` [Original article](https://clickhouse.com/docs/en/sql-reference/functions/geo/h3) diff --git a/docs/en/sql-reference/functions/math-functions.md b/docs/en/sql-reference/functions/math-functions.md index 2b3c000bc19..a5fc07cf687 100644 --- a/docs/en/sql-reference/functions/math-functions.md +++ b/docs/en/sql-reference/functions/math-functions.md @@ -477,3 +477,74 @@ Result: └──────────┘ ``` +## degrees(x) {#degreesx} + +Converts the input value in radians to degrees. + +**Syntax** + +``` sql +degrees(x) +``` + +**Arguments** + +- `x` — Input in radians. [Float64](../../sql-reference/data-types/float.md#float32-float64). + +**Returned value** + +- Value in degrees. + +Type: [Float64](../../sql-reference/data-types/float.md#float32-float64). + +**Example** + +Query: + +``` sql +SELECT degrees(3.141592653589793); +``` + +Result: + +``` text +┌─degrees(3.141592653589793)─┐ +│ 180 │ +└────────────────────────────┘ +``` + +## radians(x) {#radiansx} + +Converts the input value in degrees to radians. + +**Syntax** + +``` sql +radians(x) +``` + +**Arguments** + +- `x` — Input in degrees. [Float64](../../sql-reference/data-types/float.md#float32-float64). + +**Returned value** + +- Value in radians. + +Type: [Float64](../../sql-reference/data-types/float.md#float32-float64). + +**Example** + +Query: + +``` sql +SELECT radians(180); +``` + +Result: + +``` text +┌──────radians(180)─┐ +│ 3.141592653589793 │ +└───────────────────┘ +``` diff --git a/docs/en/sql-reference/functions/tuple-functions.md b/docs/en/sql-reference/functions/tuple-functions.md index 0ddd628d9c2..8502fcdcf66 100644 --- a/docs/en/sql-reference/functions/tuple-functions.md +++ b/docs/en/sql-reference/functions/tuple-functions.md @@ -134,7 +134,23 @@ Tuples should have the same type of the elements. - The Hamming distance. -Type: [UInt8](../../sql-reference/data-types/int-uint.md). +Type: The result type is calculed the same way it is for [Arithmetic functions](../../sql-reference/functions/arithmetic-functions.md), based on the number of elements in the input tuples. + +``` sql +SELECT + toTypeName(tupleHammingDistance(tuple(0), tuple(0))) AS t1, + toTypeName(tupleHammingDistance((0, 0), (0, 0))) AS t2, + toTypeName(tupleHammingDistance((0, 0, 0), (0, 0, 0))) AS t3, + toTypeName(tupleHammingDistance((0, 0, 0, 0), (0, 0, 0, 0))) AS t4, + toTypeName(tupleHammingDistance((0, 0, 0, 0, 0), (0, 0, 0, 0, 0))) AS t5 +``` + +``` text +┌─t1────┬─t2─────┬─t3─────┬─t4─────┬─t5─────┐ +│ UInt8 │ UInt16 │ UInt32 │ UInt64 │ UInt64 │ +└───────┴────────┴────────┴────────┴────────┘ +``` + **Examples** diff --git a/docs/en/sql-reference/statements/check-table.md b/docs/en/sql-reference/statements/check-table.md index bc89b11ae4d..c9ad40860f7 100644 --- a/docs/en/sql-reference/statements/check-table.md +++ b/docs/en/sql-reference/statements/check-table.md @@ -46,7 +46,7 @@ CHECK TABLE test_table; └───────────┴───────────┴─────────┘ ``` -If `check_query_single_value_result` = 0, the `CHECK TABLE` query shows the general table check status. +If `check_query_single_value_result` = 1, the `CHECK TABLE` query shows the general table check status. ```sql SET check_query_single_value_result = 1; diff --git a/docs/en/sql-reference/window-functions/index.md b/docs/en/sql-reference/window-functions/index.md index e62808a46bd..0a55eafc7ab 100644 --- a/docs/en/sql-reference/window-functions/index.md +++ b/docs/en/sql-reference/window-functions/index.md @@ -3,7 +3,7 @@ toc_priority: 62 toc_title: Window Functions --- -# [experimental] Window Functions +# Window Functions ClickHouse supports the standard grammar for defining windows and window functions. The following features are currently supported: diff --git a/docs/ru/sql-reference/functions/geo/h3.md b/docs/ru/sql-reference/functions/geo/h3.md index 8f7b98f0a45..78e7bf2fa86 100644 --- a/docs/ru/sql-reference/functions/geo/h3.md +++ b/docs/ru/sql-reference/functions/geo/h3.md @@ -4,11 +4,11 @@ toc_title: "Функции для работы с индексами H3" # Функции для работы с индексами H3 {#h3index} -[H3](https://eng.uber.com/h3/) — это система геокодирования, которая делит поверхность Земли на равные шестигранные ячейки. Система поддерживает иерархию (вложенность) ячеек, т.е. каждый "родительский" шестигранник может быть поделен на семь одинаковых вложенных "дочерних" шестигранников, и так далее. +[H3](https://eng.uber.com/h3/) — это система геокодирования, которая делит поверхность Земли на равные шестиугольные ячейки. Система поддерживает иерархию (вложенность) ячеек, т.е. каждый "родительский" шестиугольник может быть поделен на семь одинаковых вложенных "дочерних" шестиугольников, и так далее. Уровень вложенности называется "разрешением" и может принимать значение от `0` до `15`, где `0` соответствует "базовым" ячейкам самого верхнего уровня (наиболее крупным). -Для каждой точки, имеющей широту и долготу, можно получить 64-битный индекс H3, соответствующий номеру шестигранной ячейки, где эта точка находится. +Для каждой точки, имеющей широту и долготу, можно получить 64-битный индекс H3, соответствующий номеру шестриугольной ячейки, где эта точка находится. Индексы H3 используются, в основном, для геопозиционирования и расчета расстояний. @@ -24,7 +24,7 @@ h3IsValid(h3index) **Параметр** -- `h3index` — идентификатор шестигранника. Тип данных: [UInt64](../../../sql-reference/data-types/int-uint.md). +- `h3index` — идентификатор шестриугольника. Тип данных: [UInt64](../../../sql-reference/data-types/int-uint.md). **Возвращаемые значения** @@ -61,7 +61,7 @@ h3GetResolution(h3index) **Параметр** -- `h3index` — идентификатор шестигранника. Тип данных: [UInt64](../../../sql-reference/data-types/int-uint.md). +- `h3index` — идентификатор шестиугольника. Тип данных: [UInt64](../../../sql-reference/data-types/int-uint.md). **Возвращаемые значения** @@ -88,7 +88,7 @@ SELECT h3GetResolution(639821929606596015) AS resolution; ## h3EdgeAngle {#h3edgeangle} -Рассчитывает средний размер стороны шестигранника [H3](#h3index) в градусах. +Рассчитывает средний размер стороны шестиугольника [H3](#h3index) в градусах. **Синтаксис** @@ -102,7 +102,7 @@ h3EdgeAngle(resolution) **Возвращаемое значение** -- Средняя длина стороны шестигранника [H3](#h3index) в градусах. Тип данных: [Float64](../../../sql-reference/data-types/float.md). +- Средняя длина стороны шестиугольника [H3](#h3index) в градусах. Тип данных: [Float64](../../../sql-reference/data-types/float.md). **Пример** @@ -122,7 +122,7 @@ SELECT h3EdgeAngle(10) AS edgeAngle; ## h3EdgeLengthM {#h3edgelengthm} -Рассчитывает средний размер стороны шестигранника [H3](#h3index) в метрах. +Рассчитывает средний размер стороны шестиугольника [H3](#h3index) в метрах. **Синтаксис** @@ -136,7 +136,7 @@ h3EdgeLengthM(resolution) **Возвращаемое значение** -- Средняя длина стороны шестигранника H3 в метрах, тип — [Float64](../../../sql-reference/data-types/float.md). +- Средняя длина стороны шестиугольника H3 в метрах, тип — [Float64](../../../sql-reference/data-types/float.md). **Пример** @@ -172,7 +172,7 @@ geoToH3(lon, lat, resolution) **Возвращаемые значения** -- Порядковый номер шестигранника. +- Порядковый номер шестиугольника. - 0 в случае ошибки. Тип данных: [UInt64](../../../sql-reference/data-types/int-uint.md). @@ -195,7 +195,7 @@ SELECT geoToH3(37.79506683, 55.71290588, 15) AS h3Index; ## h3ToGeo {#h3togeo} -Возвращает географические координаты долготы и широты, соответствующие указанному [H3](#h3index)-индексу. +Возвращает географические координаты долготы и широты центра шестиугольника, соответствующие указанному [H3](#h3index)-индексу. **Синтаксис** @@ -265,7 +265,7 @@ SELECT h3ToGeoBoundary(644325524701193974) AS coordinates; ## h3kRing {#h3kring} -Возвращает [H3](#h3index)-индексы шестигранников в радиусе `k` от данного в произвольном порядке. +Возвращает [H3](#h3index)-индексы шестиугольника в радиусе `k` от данного в произвольном порядке. **Синтаксис** @@ -275,7 +275,7 @@ h3kRing(h3index, k) **Аргументы** -- `h3index` — идентификатор шестигранника. Тип данных: [UInt64](../../../sql-reference/data-types/int-uint.md). +- `h3index` — идентификатор шестиугольника. Тип данных: [UInt64](../../../sql-reference/data-types/int-uint.md). - `k` — радиус. Тип данных: [целое число](../../../sql-reference/data-types/int-uint.md) **Возвращаемые значения** @@ -607,7 +607,7 @@ h3IsResClassIII(index) **Параметр** -- `index` — порядковый номер шестигранника. Тип: [UInt64](../../../sql-reference/data-types/int-uint.md). +- `index` — порядковый номер шестиугольника. Тип: [UInt64](../../../sql-reference/data-types/int-uint.md). **Возвращаемые значения** @@ -644,7 +644,7 @@ h3IsPentagon(index) **Параметр** -- `index` — порядковый номер шестигранника. Тип: [UInt64](../../../sql-reference/data-types/int-uint.md). +- `index` — порядковый номер шестиугольника. Тип: [UInt64](../../../sql-reference/data-types/int-uint.md). **Возвращаемые значения** diff --git a/docs/ru/sql-reference/functions/logical-functions.md b/docs/ru/sql-reference/functions/logical-functions.md index 6ba55dca30f..ac4e226b2d2 100644 --- a/docs/ru/sql-reference/functions/logical-functions.md +++ b/docs/ru/sql-reference/functions/logical-functions.md @@ -70,7 +70,7 @@ SELECT and(NULL, 1, 10, -2); **Синтаксис** ``` sql -and(val1, val2...) +or(val1, val2...) ``` Чтобы вычислять функцию `or` по короткой схеме, используйте настройку [short_circuit_function_evaluation](../../operations/settings/settings.md#short-circuit-function-evaluation). Если настройка включена, то выражение `vali` вычисляется только для строк, где условие `((NOT val1) AND (NOT val2) AND ... AND (NOT val{i-1}))` верно. Например, при выполнении запроса `SELECT or(number = 0, intDiv(1, number) != 0) FROM numbers(10)` не будет сгенерировано исключение из-за деления на ноль. diff --git a/docs/tools/cmake_in_clickhouse_generator.py b/docs/tools/cmake_in_clickhouse_generator.py index e66915d4a39..aa4cbbddd18 100644 --- a/docs/tools/cmake_in_clickhouse_generator.py +++ b/docs/tools/cmake_in_clickhouse_generator.py @@ -39,11 +39,6 @@ def build_entity(path: str, entity: Entity, line_comment: Tuple[int, str]) -> No if name in entities: return - # cannot escape the { in macro option description -> invalid AMP html - # Skipping "USE_INTERNAL_${LIB_NAME_UC}_LIBRARY" - if "LIB_NAME_UC" in name: - return - if len(default) == 0: formatted_default: str = "`OFF`" elif default[0] == "$": @@ -140,13 +135,6 @@ def generate_cmake_flags_files() -> None: f.write(entities[k][1] + "\n") ignored_keys.append(k) - f.write("\n\n### External libraries system/bundled mode\n" + table_header) - - for k in sorted_keys: - if k.startswith("USE_INTERNAL_"): - f.write(entities[k][1] + "\n") - ignored_keys.append(k) - f.write("\n\n### Other flags\n" + table_header) for k in sorted(set(sorted_keys).difference(set(ignored_keys))): diff --git a/docs/zh/faq/general/columnar-database.md b/docs/zh/faq/general/columnar-database.md deleted file mode 120000 index b7557b62010..00000000000 --- a/docs/zh/faq/general/columnar-database.md +++ /dev/null @@ -1 +0,0 @@ -../../../en/faq/general/columnar-database.md \ No newline at end of file diff --git a/docs/zh/faq/general/columnar-database.md b/docs/zh/faq/general/columnar-database.md new file mode 100644 index 00000000000..185deaa7406 --- /dev/null +++ b/docs/zh/faq/general/columnar-database.md @@ -0,0 +1,25 @@ +--- +title: 什么是列存储数据库? +toc_hidden: true +toc_priority: 101 +--- + +# 什么是列存储数据库? {#what-is-a-columnar-database} + +列存储数据库独立存储每个列的数据。这只允许从磁盘读取任何给定查询中使用的列的数据。其代价是,影响整行的操作会按比例变得更昂贵。列存储数据库的同义词是面向列的数据库管理系统。ClickHouse就是这样一个典型的例子。 + +列存储数据库的主要优点是: + +- 查询只使用许多列其中的少数列。 +— 聚合对大量数据的查询。 +— 按列压缩。 + +下面是构建报表时传统的面向行系统和柱状数据库之间的区别: + +**传统行存储** +!(传统行存储)(https://clickhouse.com/docs/en/images/row-oriented.gif) + +**列存储** +!(列存储)(https://clickhouse.com/docs/en/images/column-oriented.gif) + +列存储数据库是分析应用程序的首选,因为它允许在一个表中有许多列以防万一,但不会在读取查询执行时为未使用的列付出代价。面向列的数据库是为大数据处理而设计的,因为和数据仓库一样,它们通常使用分布式的低成本硬件集群来提高吞吐量。ClickHouse结合了[分布式](../../engines/table-engines/special/distributed.md)和[复制式](../../engines/table-engines/mergetree-family/replication.md)两类表。 \ No newline at end of file diff --git a/docs/zh/faq/index.md b/docs/zh/faq/index.md index a44dbb31e89..dd29d73a013 100644 --- a/docs/zh/faq/index.md +++ b/docs/zh/faq/index.md @@ -1,8 +1,46 @@ --- -machine_translated: true -machine_translated_rev: 72537a2d527c63c07aa5d2361a8829f3895cf2bd toc_folder_title: F.A.Q. +toc_hidden: true toc_priority: 76 --- +# ClickHouse 问答 F.A.Q {#clickhouse-f-a-q} +本节文档是一个收集经常出现的ClickHouse相关问题的答案的地方。 + +类别: + +- **[常见问题](../faq/general/index.md)** + - [什么是 ClickHouse?](../index.md#what-is-clickhouse) + - [为何 ClickHouse 如此迅捷?](../faq/general/why-clickhouse-is-so-fast.md) + - [谁在使用 ClickHouse?](../faq/general/who-is-using-clickhouse.md) + - [“ClickHouse” 有什么含义?](../faq/general/dbms-naming.md) + - [ “Не тормозит” 有什么含义?](../faq/general/ne-tormozit.md) + - [什么是 OLAP?](../faq/general/olap.md) + - [什么是列存储数据库?](../faq/general/columnar-database.md) + - [为何不使用 MapReduce等技术?](../faq/general/mapreduce.md) +- **[应用案例](../faq/use-cases/index.md)** + - [我能把 ClickHouse 作为时序数据库来使用吗?](../faq/use-cases/time-series.md) + - [我能把 ClickHouse 作为 key-value 键值存储吗?](../faq/use-cases/key-value.md) +- **[运维操作](../faq/operations/index.md)** + - [如果想在生产环境部署,需要用哪个版本的 ClickHouse 呢?](../faq/operations/production.md) + - [是否可能从 ClickHouse 数据表中删除所有旧的数据记录?](../faq/operations/delete-old-data.md) +- **[集成开发](../faq/integration/index.md)** + - [如何从 ClickHouse 导出数据到一个文件?](../faq/integration/file-export.md) + - [如果我用ODBC链接Oracle数据库出现编码问题该怎么办?](../faq/integration/oracle-odbc.md) + +{## TODO +Question candidates: +- How to choose a primary key? +- How to add a column in ClickHouse? +- Too many parts +- How to filter ClickHouse table by an array column contents? +- How to insert all rows from one table to another of identical structure? +- How to kill a process (query) in ClickHouse? +- How to implement pivot (like in pandas)? +- How to remove the default ClickHouse user through users.d? +- Importing MySQL dump to ClickHouse +- Window function workarounds (row_number, lag/lead, running diff/sum/average) +##} + +{## [原始文档](https://clickhouse.com/docs/en/faq) ##} diff --git a/docs/zh/faq/use-cases/index.md b/docs/zh/faq/use-cases/index.md deleted file mode 120000 index cc545acb000..00000000000 --- a/docs/zh/faq/use-cases/index.md +++ /dev/null @@ -1 +0,0 @@ -../../../en/faq/use-cases/index.md \ No newline at end of file diff --git a/docs/zh/faq/use-cases/index.md b/docs/zh/faq/use-cases/index.md new file mode 100644 index 00000000000..cfd3270e3f2 --- /dev/null +++ b/docs/zh/faq/use-cases/index.md @@ -0,0 +1,18 @@ +--- +title: 关于ClickHouse使用案例的问题 +toc_hidden_folder: true +toc_priority: 2 +toc_title: 使用案例 +--- + +# 关于ClickHouse使用案例的问题 {#questions-about-clickhouse-use-cases} + +问题: + +- [我能把 ClickHouse 当做时序数据库来使用吗?](../../faq/use-cases/time-series.md) +- [我能把 ClickHouse 当做Key-value 键值存储来使用吗?](../../faq/use-cases/key-value.md) + +!!! info "没找到您所需要的内容?" + 请查看[其他常见问题类别](../../faq/index.md)或浏览左侧边栏中的主要文档文章。 + +{## [原始文档](https://clickhouse.com/docs/en/faq/use-cases/) ##} diff --git a/docs/zh/faq/use-cases/key-value.md b/docs/zh/faq/use-cases/key-value.md deleted file mode 120000 index 63140458d12..00000000000 --- a/docs/zh/faq/use-cases/key-value.md +++ /dev/null @@ -1 +0,0 @@ -../../../en/faq/use-cases/key-value.md \ No newline at end of file diff --git a/docs/zh/faq/use-cases/key-value.md b/docs/zh/faq/use-cases/key-value.md new file mode 100644 index 00000000000..ae47a9a8b25 --- /dev/null +++ b/docs/zh/faq/use-cases/key-value.md @@ -0,0 +1,16 @@ +--- +title: 我能把 ClickHouse 当做Key-value 键值存储来使用吗? +toc_hidden: true +toc_priority: 101 +--- +# 我能把 ClickHouse 当做Key-value 键值存储来使用吗? {#can-i-use-clickhouse-as-a-key-value-storage}. + +简短的回答是 **不能** 。关键值的工作量是在列表中的最高位置时,**不能**{.text-danger}使用ClickHouse的情况。它是一个[OLAP](../../faq/general/olap.md)系统,毕竟有很多优秀的键值存储系统在那里。 + +然而,可能在某些情况下,使用ClickHouse进行类似键值的查询仍然是有意义的。通常,是一些低预算的产品,主要的工作负载是分析性的,很适合ClickHouse,但也有一些次要的过程需要一个键值模式,请求吞吐量不是很高,没有严格的延迟要求。如果你有无限的预算,你会为这样的次要工作负载安装一个次要的键值数据库,但实际上,多维护一个存储系统(监控、备份等)会有额外的成本,这可能是值得避免的。 + +如果你决定违背建议,对ClickHouse运行一些类似键值的查询,这里有一些提示。 + +- ClickHouse中点查询昂贵的关键原因是其稀疏的主索引[MergeTree表引擎家族](../../engines/table-engines/mergetree-family/mergetree.md)。这个索引不能指向每一行具体的数据,相反,它指向每N行,系统必须从邻近的N行扫描到所需的行,沿途读取过多的数据。在一个键值场景中,通过`index_granularity`的设置来减少N的值可能是有用的。 +- ClickHouse将每一列保存在一组单独的文件中,所以要组装一个完整的行,它需要通过这些文件中的每一个。它们的数量随着列数的增加而线性增加,所以在键值场景中,可能值得避免使用许多列,并将所有的有效数据放在一个单一的`String`列中,并以某种序列化格式(如JSON、Protobuf或任何有效的格式)进行编码。 +- 还有一种方法,使用[Join](../../engines/table-engines/special/join.md)表引擎代替正常的`MergeTree`表和[joinGet](../../sql-reference/functions/other-functions.md#joinget) 函数来检索数据。它可以提供更好的查询性能,但可能有一些可用性和可靠性问题。下面是一个[使用实例](https://github.com/ClickHouse/ClickHouse/blob/master/tests/queries/0_stateless/00800_versatile_storage_join.sql#L49-L51)。 diff --git a/docs/zh/guides/apply-catboost-model.md b/docs/zh/guides/apply-catboost-model.md index 72f5fa38e84..adc5b48eb55 100644 --- a/docs/zh/guides/apply-catboost-model.md +++ b/docs/zh/guides/apply-catboost-model.md @@ -1,6 +1,4 @@ --- -machine_translated: true -machine_translated_rev: 72537a2d527c63c07aa5d2361a8829f3895cf2bd toc_priority: 41 toc_title: "\u5E94\u7528CatBoost\u6A21\u578B" --- @@ -10,10 +8,10 @@ toc_title: "\u5E94\u7528CatBoost\u6A21\u578B" [CatBoost](https://catboost.ai) 是一个由[Yandex](https://yandex.com/company/)开发的开源免费机器学习库。 -通过这篇指导,您将学会如何用SQL建模,使用ClickHouse预先训练好的模型来推断数据。 +通过本篇文档,您将学会如何用SQL语句调用已经存放在Clickhouse中的预训练模型来预测数据。 -在ClickHouse中应用CatBoost模型的一般过程: +为了在ClickHouse中应用CatBoost模型,需要进行如下步骤: 1. [创建数据表](#create-table). 2. [将数据插入到表中](#insert-data-to-table). @@ -22,24 +20,26 @@ toc_title: "\u5E94\u7528CatBoost\u6A21\u578B" 有关训练CatBoost模型的详细信息,请参阅 [训练和模型应用](https://catboost.ai/docs/features/training.html#training). +您可以通过[RELOAD MODEL](https://clickhouse.com/docs/en/sql-reference/statements/system/#query_language-system-reload-model)与[RELOAD MODELS](https://clickhouse.com/docs/en/sql-reference/statements/system/#query_language-system-reload-models)语句来重载CatBoost模型。 + ## 先决条件 {#prerequisites} 请先安装 [Docker](https://docs.docker.com/install/)。 !!! note "注" - [Docker](https://www.docker.com) 是一个软件平台,用户可以用来创建独立于其余系统、集成CatBoost和ClickHouse的容器。 + [Docker](https://www.docker.com) 是一个软件平台,用户可以用Docker来创建独立于已有系统并集成了CatBoost和ClickHouse的容器。 在应用CatBoost模型之前: -**1.** 从容器仓库拉取docker映像 (https://hub.docker.com/r/yandex/tutorial-catboost-clickhouse) : +**1.** 从容器仓库拉取示例docker镜像 (https://hub.docker.com/r/yandex/tutorial-catboost-clickhouse) : ``` bash $ docker pull yandex/tutorial-catboost-clickhouse ``` -此Docker映像包含运行CatBoost和ClickHouse所需的所有内容:代码、运行环境、库、环境变量和配置文件。 +此示例Docker镜像包含运行CatBoost和ClickHouse所需的所有内容:代码、运行时、库、环境变量和配置文件。 -**2.** 确保已成功拉取Docker映像: +**2.** 确保已成功拉取Docker镜像: ``` bash $ docker image ls @@ -47,7 +47,7 @@ REPOSITORY TAG IMAGE ID CR yandex/tutorial-catboost-clickhouse latest 622e4d17945b 22 hours ago 1.37GB ``` -**3.** 基于此映像启动一个Docker容器: +**3.** 基于此镜像启动一个Docker容器: ``` bash $ docker run -it -p 8888:8888 yandex/tutorial-catboost-clickhouse @@ -124,9 +124,9 @@ FROM amazon_train ## 3. 将CatBoost集成到ClickHouse中 {#integrate-catboost-into-clickhouse} !!! note "注" - **可跳过。** Docker映像包含运行CatBoost和ClickHouse所需的所有内容。 + **可跳过。** 示例Docker映像已经包含了运行CatBoost和ClickHouse所需的所有内容。 -CatBoost集成到ClickHouse步骤: +为了将CatBoost集成进ClickHouse,需要进行如下步骤: **1.** 构建评估库。 @@ -134,13 +134,13 @@ CatBoost集成到ClickHouse步骤: 有关如何构建库文件的详细信息,请参阅 [CatBoost文件](https://catboost.ai/docs/concepts/c-plus-plus-api_dynamic-c-pluplus-wrapper.html). -**2.** 创建一个新目录(位置与名称可随意指定), 如 `data` 并将创建的库文件放入其中。 Docker映像已经包含了库 `data/libcatboostmodel.so`. +**2.** 创建一个新目录(位置与名称可随意指定), 如 `data` 并将创建的库文件放入其中。 示例Docker镜像已经包含了库 `data/libcatboostmodel.so`. **3.** 创建一个新目录来放配置模型, 如 `models`. **4.** 创建一个模型配置文件,如 `models/amazon_model.xml`. -**5.** 描述模型配置: +**5.** 修改模型配置: ``` xml @@ -165,9 +165,9 @@ CatBoost集成到ClickHouse步骤: /home/catboost/models/*_model.xml ``` -## 4. 运行从SQL推断的模型 {#run-model-inference} +## 4. 使用SQL调用预测模型 {#run-model-inference} -测试模型是否正常,运行ClickHouse客户端 `$ clickhouse client`. +为了测试模型是否正常,可以使用ClickHouse客户端 `$ clickhouse client`. 让我们确保模型能正常工作: @@ -189,7 +189,7 @@ LIMIT 10 ``` !!! note "注" - 函数 [modelEvaluate](../sql-reference/functions/other-functions.md#function-modelevaluate) 返回带有多类模型的每类原始预测的元组。 + 函数 [modelEvaluate](../sql-reference/functions/other-functions.md#function-modelevaluate) 会对多类别模型返回一个元组,其中包含每一类别的原始预测值。 执行预测: diff --git a/docs/zh/operations/settings/settings-users.md b/docs/zh/operations/settings/settings-users.md index ae75dddab58..d89b880328a 100644 --- a/docs/zh/operations/settings/settings-users.md +++ b/docs/zh/operations/settings/settings-users.md @@ -1,5 +1,5 @@ --- -machine_translated: true +machine_translated: false machine_translated_rev: 72537a2d527c63c07aa5d2361a8829f3895cf2bd toc_priority: 63 toc_title: "\u7528\u6237\u8BBE\u7F6E" @@ -7,12 +7,12 @@ toc_title: "\u7528\u6237\u8BBE\u7F6E" # 用户设置 {#user-settings} -该 `users` 一节 `user.xml` 配置文件包含用户设置。 +`user.xml` 中的 `users` 配置段包含了用户配置 -!!! note "信息" +!!! note "提示" ClickHouse还支持 [SQL驱动的工作流](../access-rights.md#access-control) 用于管理用户。 我们建议使用它。 -的结构 `users` 科: +`users` 配置段的结构: ``` xml @@ -43,21 +43,21 @@ toc_title: "\u7528\u6237\u8BBE\u7F6E" ``` -### 用户名称/密码 {#user-namepassword} +### user_name/password {#user-namepassword} 密码可以以明文或SHA256(十六进制格式)指定。 -- 以明文形式分配密码 (**不推荐**),把它放在一个 `password` 元素。 +- 以明文形式分配密码 (**不推荐**),把它放在一个 `password` 配置段中。 例如, `qwerty`. 密码可以留空。 -- 要使用其SHA256散列分配密码,请将其放置在 `password_sha256_hex` 元素。 +- 要使用SHA256加密后的密码,请将其放置在 `password_sha256_hex` 配置段。 例如, `65e84be33532fb784c48129675f9eff3a682b27168c0ea744b2cf58ee02337c5`. - 如何从shell生成密码的示例: + 从shell生成加密密码的示例: PASSWORD=$(base64 < /dev/urandom | head -c8); echo "$PASSWORD"; echo -n "$PASSWORD" | sha256sum | tr -d '-' @@ -65,19 +65,19 @@ toc_title: "\u7528\u6237\u8BBE\u7F6E" -- 为了与MySQL客户端兼容,密码可以在双SHA1哈希中指定。 放进去 `password_double_sha1_hex` 元素。 +- 为了与MySQL客户端兼容,密码可以设置为双SHA1哈希加密, 请将其放置在 `password_double_sha1_hex` 配置段。 例如, `08b4a0f1de6ad37da17359e592c8d74788a83eb0`. - 如何从shell生成密码的示例: + 从shell生成密码的示例: PASSWORD=$(base64 < /dev/urandom | head -c8); echo "$PASSWORD"; echo -n "$PASSWORD" | sha1sum | tr -d '-' | xxd -r -p | sha1sum | tr -d '-' 结果的第一行是密码。 第二行是相应的双SHA1哈希。 -### 访问管理 {#access_management-user-setting} +### access_management {#access_management-user-setting} -此设置启用禁用使用SQL驱动 [访问控制和帐户管理](../access-rights.md#access-control) 对于用户。 +此设置可为用户启用或禁用 SQL-driven [访问控制和帐户管理](../access-rights.md#access-control) 。 可能的值: @@ -86,42 +86,42 @@ toc_title: "\u7528\u6237\u8BBE\u7F6E" 默认值:0。 -### 用户名称/网络 {#user-namenetworks} +### user_name/networks {#user-namenetworks} -用户可以从中连接到ClickHouse服务器的网络列表。 +用户访问来源列表 列表中的每个元素都可以具有以下形式之一: -- `` — IP address or network mask. +- `` — IP地址或网络掩码 例: `213.180.204.3`, `10.0.0.1/8`, `10.0.0.1/255.255.255.0`, `2a02:6b8::3`, `2a02:6b8::3/64`, `2a02:6b8::3/ffff:ffff:ffff:ffff::`. -- `` — Hostname. +- `` — 域名 示例: `example01.host.ru`. - 要检查访问,将执行DNS查询,并将所有返回的IP地址与对等地址进行比较。 + 为检查访问,将执行DNS查询,并将所有返回的IP地址与对端地址进行比较。 -- `` — Regular expression for hostnames. +- `` — 域名的正则表达式. 示例, `^example\d\d-\d\d-\d\.host\.ru$` - 要检查访问,a [DNS PTR查询](https://en.wikipedia.org/wiki/Reverse_DNS_lookup) 对对等体地址执行,然后应用指定的正则表达式。 然后,对PTR查询的结果执行另一个DNS查询,并将所有接收到的地址与对等地址进行比较。 我们强烈建议正则表达式以$结尾。 + 为检查访问,[DNS PTR查询](https://en.wikipedia.org/wiki/Reverse_DNS_lookup) 对对端地址执行,然后应用指定的正则表达式。 然后,以PTR查询的结果执行另一个DNS查询,并将所有接收到的地址与对端地址进行比较. 我们强烈建议正则表达式以$结尾. DNS请求的所有结果都将被缓存,直到服务器重新启动。 **例** -要从任何网络打开用户的访问权限,请指定: +要开启任意来源网络的访问, 请指定: ``` xml ::/0 ``` !!! warning "警告" - 从任何网络开放访问是不安全的,除非你有一个防火墙正确配置或服务器没有直接连接到互联网。 + 从任何网络开放访问是不安全的,除非你有一个正确配置的防火墙, 或者服务器没有直接连接到互联网。 -若要仅从本地主机打开访问权限,请指定: +若要限定本机访问, 请指定: ``` xml ::1 @@ -130,22 +130,21 @@ DNS请求的所有结果都将被缓存,直到服务器重新启动。 ### user_name/profile {#user-nameprofile} -您可以为用户分配设置配置文件。 设置配置文件在单独的部分配置 `users.xml` 文件 有关详细信息,请参阅 [设置配置文件](settings-profiles.md). +您可以为用户分配设置配置文件。 设置配置文件在`users.xml` 中有单独的配置段. 有关详细信息,请参阅 [设置配置文件](settings-profiles.md). -### 用户名称/配额 {#user-namequota} +### user_name/quota {#user-namequota} -配额允许您在一段时间内跟踪或限制资源使用情况。 配额在配置 `quotas` -一节 `users.xml` 配置文件。 +配额允许您在一段时间内跟踪或限制资源使用情况。 配额在`users.xml` 中的 `quotas` 配置段下. 您可以为用户分配配额。 有关配额配置的详细说明,请参阅 [配额](../quotas.md#quotas). -### 用户名/数据库 {#user-namedatabases} +### user_name/databases {#user-namedatabases} -在本节中,您可以限制ClickHouse返回的行 `SELECT` 由当前用户进行的查询,从而实现基本的行级安全性。 +在本配置段中,您可以限制ClickHouse中由当前用户进行的 `SELECT` 查询所返回的行,从而实现基本的行级安全性。 **示例** -以下配置强制该用户 `user1` 只能看到的行 `table1` 作为结果 `SELECT` 查询,其中的值 `id` 场是1000。 +以下配置使用户 `user1` 通过SELECT查询只能得到table1中id为1000的行 ``` xml @@ -159,6 +158,6 @@ DNS请求的所有结果都将被缓存,直到服务器重新启动。 ``` -该 `filter` 可以是导致任何表达式 [UInt8](../../sql-reference/data-types/int-uint.md)-键入值。 它通常包含比较和逻辑运算符。 从行 `database_name.table1` 其中,不会为此用户返回为0的筛选结果。 过滤是不兼容的 `PREWHERE` 操作和禁用 `WHERE→PREWHERE` 优化。 +该 `filter` 可以是[UInt8](../../sql-reference/data-types/int-uint.md)编码的任何表达式。 它通常包含比较和逻辑运算符, 当filter返回0时, database_name.table1 的该行结果将不会返回给用户.过滤不兼容 `PREWHERE` 操作并禁用 `WHERE→PREWHERE` 优化。 [原始文章](https://clickhouse.com/docs/en/operations/settings/settings_users/) diff --git a/docs/zh/sql-reference/statements/alter/ttl.md b/docs/zh/sql-reference/statements/alter/ttl.md deleted file mode 120000 index 94a112e7a17..00000000000 --- a/docs/zh/sql-reference/statements/alter/ttl.md +++ /dev/null @@ -1 +0,0 @@ -../../../../en/sql-reference/statements/alter/ttl.md \ No newline at end of file diff --git a/docs/zh/sql-reference/statements/alter/ttl.md b/docs/zh/sql-reference/statements/alter/ttl.md new file mode 100644 index 00000000000..ca011a2a12f --- /dev/null +++ b/docs/zh/sql-reference/statements/alter/ttl.md @@ -0,0 +1,85 @@ +--- +toc_priority: 44 +toc_title: TTL +--- + +# 表的 TTL 操作 {#manipulations-with-table-ttl} + +## 修改 MODIFY TTL {#modify-ttl} + +你能修改 [表 TTL](../../../engines/table-engines/mergetree-family/mergetree.md#mergetree-table-ttl) ,命令语法如下所示: + +``` sql +ALTER TABLE table_name MODIFY TTL ttl_expression; +``` + +## 移除 REMOVE TTL {#remove-ttl} + +TTL 属性可以用下列命令从表中移除: + +```sql +ALTER TABLE table_name REMOVE TTL +``` + +**示例** + +创建一个表,带有 `TTL` 属性如下所示: + +```sql +CREATE TABLE table_with_ttl +( + event_time DateTime, + UserID UInt64, + Comment String +) +ENGINE MergeTree() +ORDER BY tuple() +TTL event_time + INTERVAL 3 MONTH; +SETTINGS min_bytes_for_wide_part = 0; + +INSERT INTO table_with_ttl VALUES (now(), 1, 'username1'); + +INSERT INTO table_with_ttl VALUES (now() - INTERVAL 4 MONTH, 2, 'username2'); +``` + +运行命令 `OPTIMIZE` 强制清理 `TTL`: + +```sql +OPTIMIZE TABLE table_with_ttl FINAL; +SELECT * FROM table_with_ttl FORMAT PrettyCompact; +``` +第二行记录被从表中删除掉了. + +```text +┌─────────event_time────┬──UserID─┬─────Comment──┐ +│ 2020-12-11 12:44:57 │ 1 │ username1 │ +└───────────────────────┴─────────┴──────────────┘ +``` + +现在用下面的命令,把表的 `TTL` 移除掉: + +```sql +ALTER TABLE table_with_ttl REMOVE TTL; +``` + +重新插入上面的数据,并尝试再次运行 `OPTIMIZE` 命令清理 `TTL` 属性 : + +```sql +INSERT INTO table_with_ttl VALUES (now() - INTERVAL 4 MONTH, 2, 'username2'); +OPTIMIZE TABLE table_with_ttl FINAL; +SELECT * FROM table_with_ttl FORMAT PrettyCompact; +``` + +可以看得到 `TTL` 这个属性已经没了,并且可以看得到第二行记录并没有被删除: + +```text +┌─────────event_time────┬──UserID─┬─────Comment──┐ +│ 2020-12-11 12:44:57 │ 1 │ username1 │ +│ 2020-08-11 12:44:57 │ 2 │ username2 │ +└───────────────────────┴─────────┴──────────────┘ +``` + +**更多参考** + +- 关于 [TTL 表达式](../../../sql-reference/statements/create/table.md#ttl-expression). +- 修改列 [with TTL](../../../sql-reference/statements/alter/column.md#alter_modify-column). diff --git a/docs/zh/sql-reference/statements/alter/update.md b/docs/zh/sql-reference/statements/alter/update.md deleted file mode 120000 index fa9be21c070..00000000000 --- a/docs/zh/sql-reference/statements/alter/update.md +++ /dev/null @@ -1 +0,0 @@ -../../../../en/sql-reference/statements/alter/update.md \ No newline at end of file diff --git a/docs/zh/sql-reference/statements/alter/update.md b/docs/zh/sql-reference/statements/alter/update.md new file mode 100644 index 00000000000..08eccdf1aa2 --- /dev/null +++ b/docs/zh/sql-reference/statements/alter/update.md @@ -0,0 +1,29 @@ +--- +toc_priority: 40 +toc_title: UPDATE +--- + +# ALTER TABLE … UPDATE 语句 {#alter-table-update-statements} + +``` sql +ALTER TABLE [db.]table UPDATE column1 = expr1 [, ...] WHERE filter_expr +``` + +操作与指定过滤表达式相匹配的数据。作为一个[变更 mutation](../../../sql-reference/statements/alter/index.md#mutations)来实现. + +!!! note "Note" + `ALTER TABLE` 的前缀使这个语法与其他大多数支持SQL的系统不同。它的目的是表明,与OLTP数据库中的类似查询不同,这是一个繁重的操作,不是为频繁使用而设计。 + +`filter_expr`必须是`UInt8`类型。这个查询将指定列的值更新为行中相应表达式的值,对于这些行,`filter_expr`取值为非零。使用`CAST`操作符将数值映射到列的类型上。不支持更新用于计算主键或分区键的列。 + +一个查询可以包含几个由逗号分隔的命令。 + +查询处理的同步性由 [mutations_sync](../../../operations/settings/settings.md#mutations_sync) 设置定义。 默认情况下,它是异步操作。 + + +**更多详情请参阅** + +- [变更 Mutations](../../../sql-reference/statements/alter/index.md#mutations) +- [ALTER查询的同步性问题](../../../sql-reference/statements/alter/index.md#synchronicity-of-alter-queries) +- [mutations_sync](../../../operations/settings/settings.md#mutations_sync) setting + diff --git a/programs/CMakeLists.txt b/programs/CMakeLists.txt index 4806a7fe46e..8906d186bfc 100644 --- a/programs/CMakeLists.txt +++ b/programs/CMakeLists.txt @@ -53,9 +53,9 @@ option (ENABLE_CLICKHOUSE_KEEPER "ClickHouse alternative to ZooKeeper" ${ENABLE_ option (ENABLE_CLICKHOUSE_KEEPER_CONVERTER "Util allows to convert ZooKeeper logs and snapshots into clickhouse-keeper snapshot" ${ENABLE_CLICKHOUSE_ALL}) -if (NOT USE_NURAFT) +if (NOT ENABLE_NURAFT) # RECONFIGURE_MESSAGE_LEVEL should not be used here, - # since USE_NURAFT is set to OFF for FreeBSD and Darwin. + # since ENABLE_NURAFT is set to OFF for FreeBSD and Darwin. message (STATUS "clickhouse-keeper and clickhouse-keeper-converter will not be built (lack of NuRaft)") set(ENABLE_CLICKHOUSE_KEEPER OFF) set(ENABLE_CLICKHOUSE_KEEPER_CONVERTER OFF) @@ -160,7 +160,7 @@ else() message(STATUS "ClickHouse keeper-converter mode: OFF") endif() -if(NOT (MAKE_STATIC_LIBRARIES OR SPLIT_SHARED_LIBRARIES)) +if(NOT (USE_STATIC_LIBRARIES OR SPLIT_SHARED_LIBRARIES)) set(CLICKHOUSE_ONE_SHARED ON) endif() @@ -468,7 +468,7 @@ else () endif() endif () -if (ENABLE_TESTS AND USE_GTEST) +if (ENABLE_TESTS) set (CLICKHOUSE_UNIT_TESTS_TARGETS unit_tests_dbms) add_custom_target (clickhouse-tests ALL DEPENDS ${CLICKHOUSE_UNIT_TESTS_TARGETS}) add_dependencies(clickhouse-bundle clickhouse-tests) diff --git a/programs/local/LocalServer.cpp b/programs/local/LocalServer.cpp index a294857ace8..ce39f22e978 100644 --- a/programs/local/LocalServer.cpp +++ b/programs/local/LocalServer.cpp @@ -37,6 +37,7 @@ #include #include #include +#include #include #include #include @@ -319,9 +320,9 @@ std::string LocalServer::getInitialCreateTableQuery() auto table_name = backQuoteIfNeed(config().getString("table-name", "table")); auto table_structure = config().getString("table-structure", "auto"); - auto data_format = backQuoteIfNeed(config().getString("table-data-format", "TSV")); String table_file; + String format_from_file_name; if (!config().has("table-file") || config().getString("table-file") == "-") { /// Use Unix tools stdin naming convention @@ -330,9 +331,14 @@ std::string LocalServer::getInitialCreateTableQuery() else { /// Use regular file - table_file = quoteString(config().getString("table-file")); + auto file_name = config().getString("table-file"); + table_file = quoteString(file_name); + format_from_file_name = FormatFactory::instance().getFormatFromFileName(file_name, false); } + auto data_format + = backQuoteIfNeed(config().getString("table-data-format", format_from_file_name.empty() ? "TSV" : format_from_file_name)); + if (table_structure == "auto") table_structure = ""; else diff --git a/programs/odbc-bridge/CMakeLists.txt b/programs/odbc-bridge/CMakeLists.txt index 7b232f2b5dc..54f47204259 100644 --- a/programs/odbc-bridge/CMakeLists.txt +++ b/programs/odbc-bridge/CMakeLists.txt @@ -26,8 +26,8 @@ target_link_libraries(clickhouse-odbc-bridge PRIVATE dbms bridge clickhouse_parsers - nanodbc - unixodbc + ch_contrib::nanodbc + ch_contrib::unixodbc ) set_target_properties(clickhouse-odbc-bridge PROPERTIES RUNTIME_OUTPUT_DIRECTORY ..) diff --git a/programs/odbc-bridge/ColumnInfoHandler.h b/programs/odbc-bridge/ColumnInfoHandler.h index bc976f54aee..76c0103d604 100644 --- a/programs/odbc-bridge/ColumnInfoHandler.h +++ b/programs/odbc-bridge/ColumnInfoHandler.h @@ -1,11 +1,12 @@ #pragma once +#include + #if USE_ODBC #include #include #include -#include #include diff --git a/programs/odbc-bridge/HandlerFactory.cpp b/programs/odbc-bridge/HandlerFactory.cpp index 6a5ef89ab8b..1a6df287a5c 100644 --- a/programs/odbc-bridge/HandlerFactory.cpp +++ b/programs/odbc-bridge/HandlerFactory.cpp @@ -1,6 +1,7 @@ #include "HandlerFactory.h" #include "PingHandler.h" #include "ColumnInfoHandler.h" +#include #include #include #include diff --git a/programs/odbc-bridge/IdentifierQuoteHandler.h b/programs/odbc-bridge/IdentifierQuoteHandler.h index ef3806fd802..23ffd84663b 100644 --- a/programs/odbc-bridge/IdentifierQuoteHandler.h +++ b/programs/odbc-bridge/IdentifierQuoteHandler.h @@ -2,7 +2,7 @@ #include #include - +#include #include #if USE_ODBC diff --git a/programs/odbc-bridge/MainHandler.cpp b/programs/odbc-bridge/MainHandler.cpp index 82d1bd61c24..1252d1ae70a 100644 --- a/programs/odbc-bridge/MainHandler.cpp +++ b/programs/odbc-bridge/MainHandler.cpp @@ -19,6 +19,7 @@ #include #include #include +#include #include #include diff --git a/programs/odbc-bridge/SchemaAllowedHandler.h b/programs/odbc-bridge/SchemaAllowedHandler.h index d7b922ed05b..7afa77ca091 100644 --- a/programs/odbc-bridge/SchemaAllowedHandler.h +++ b/programs/odbc-bridge/SchemaAllowedHandler.h @@ -2,6 +2,7 @@ #include #include +#include #include #if USE_ODBC diff --git a/programs/odbc-bridge/getIdentifierQuote.h b/programs/odbc-bridge/getIdentifierQuote.h index f4227af5c07..a7620da2291 100644 --- a/programs/odbc-bridge/getIdentifierQuote.h +++ b/programs/odbc-bridge/getIdentifierQuote.h @@ -1,5 +1,7 @@ #pragma once +#include + #if USE_ODBC #include diff --git a/programs/server/CMakeLists.txt b/programs/server/CMakeLists.txt index 281c25d50eb..643fd2f0ec4 100644 --- a/programs/server/CMakeLists.txt +++ b/programs/server/CMakeLists.txt @@ -18,13 +18,15 @@ set (CLICKHOUSE_SERVER_LINK clickhouse_storages_system clickhouse_table_functions string_utils - jemalloc ${LINK_RESOURCE_LIB} PUBLIC daemon ) +if (TARGET ch_contrib::jemalloc) + list(APPEND CLICKHOUSE_SERVER_LINK PRIVATE ch_contrib::jemalloc) +endif() clickhouse_program_add(server) diff --git a/programs/server/Server.cpp b/programs/server/Server.cpp index 5fc3f9aa967..a49ccc79b63 100644 --- a/programs/server/Server.cpp +++ b/programs/server/Server.cpp @@ -98,9 +98,7 @@ #endif #if USE_SSL -# if USE_INTERNAL_SSL_LIBRARY -# include -# endif +# include # include # include #endif @@ -114,10 +112,6 @@ # include #endif -#if USE_BASE64 -# include -#endif - #if USE_JEMALLOC # include #endif diff --git a/src/Access/ExternalAuthenticators.cpp b/src/Access/ExternalAuthenticators.cpp index d4100c4e520..3e1c289b207 100644 --- a/src/Access/ExternalAuthenticators.cpp +++ b/src/Access/ExternalAuthenticators.cpp @@ -270,12 +270,21 @@ void ExternalAuthenticators::setConfiguration(const Poco::Util::AbstractConfigur Poco::Util::AbstractConfiguration::Keys ldap_server_names; config.keys("ldap_servers", ldap_server_names); - for (const auto & ldap_server_name : ldap_server_names) + ldap_client_params_blueprint.clear(); + for (auto ldap_server_name : ldap_server_names) { try { - ldap_client_params_blueprint.erase(ldap_server_name); - parseLDAPServer(ldap_client_params_blueprint.emplace(ldap_server_name, LDAPClient::Params{}).first->second, config, ldap_server_name); + const auto bracket_pos = ldap_server_name.find('['); + if (bracket_pos != std::string::npos) + ldap_server_name.resize(bracket_pos); + + if (ldap_client_params_blueprint.count(ldap_server_name) > 0) + throw Exception("Multiple LDAP servers with the same name are not allowed", ErrorCodes::BAD_ARGUMENTS); + + LDAPClient::Params ldap_client_params_tmp; + parseLDAPServer(ldap_client_params_tmp, config, ldap_server_name); + ldap_client_params_blueprint.emplace(std::move(ldap_server_name), std::move(ldap_client_params_tmp)); } catch (...) { @@ -283,10 +292,15 @@ void ExternalAuthenticators::setConfiguration(const Poco::Util::AbstractConfigur } } + kerberos_params.reset(); try { if (kerberos_keys_count > 0) - parseKerberosParams(kerberos_params.emplace(), config); + { + GSSAcceptorContext::Params kerberos_params_tmp; + parseKerberosParams(kerberos_params_tmp, config); + kerberos_params = std::move(kerberos_params_tmp); + } } catch (...) { diff --git a/src/AggregateFunctions/AggregateFunctionFactory.cpp b/src/AggregateFunctions/AggregateFunctionFactory.cpp index eac761c1a82..347f4607dbf 100644 --- a/src/AggregateFunctions/AggregateFunctionFactory.cpp +++ b/src/AggregateFunctions/AggregateFunctionFactory.cpp @@ -70,11 +70,11 @@ static DataTypes convertLowCardinalityTypesToNested(const DataTypes & types) AggregateFunctionPtr AggregateFunctionFactory::get( const String & name, const DataTypes & argument_types, const Array & parameters, AggregateFunctionProperties & out_properties) const { - auto type_without_low_cardinality = convertLowCardinalityTypesToNested(argument_types); + auto types_without_low_cardinality = convertLowCardinalityTypesToNested(argument_types); /// If one of the types is Nullable, we apply aggregate function combinator "Null". - if (std::any_of(type_without_low_cardinality.begin(), type_without_low_cardinality.end(), + if (std::any_of(types_without_low_cardinality.begin(), types_without_low_cardinality.end(), [](const auto & type) { return type->isNullable(); })) { AggregateFunctionCombinatorPtr combinator = AggregateFunctionCombinatorFactory::instance().tryFindSuffix("Null"); @@ -82,10 +82,10 @@ AggregateFunctionPtr AggregateFunctionFactory::get( throw Exception("Logical error: cannot find aggregate function combinator to apply a function to Nullable arguments.", ErrorCodes::LOGICAL_ERROR); - DataTypes nested_types = combinator->transformArguments(type_without_low_cardinality); + DataTypes nested_types = combinator->transformArguments(types_without_low_cardinality); Array nested_parameters = combinator->transformParameters(parameters); - bool has_null_arguments = std::any_of(type_without_low_cardinality.begin(), type_without_low_cardinality.end(), + bool has_null_arguments = std::any_of(types_without_low_cardinality.begin(), types_without_low_cardinality.end(), [](const auto & type) { return type->onlyNull(); }); AggregateFunctionPtr nested_function = getImpl( @@ -97,13 +97,10 @@ AggregateFunctionPtr AggregateFunctionFactory::get( // that are rewritten to AggregateFunctionNothing, in this case // nested_function is nullptr. if (!nested_function || !nested_function->isOnlyWindowFunction()) - { - return combinator->transformAggregateFunction(nested_function, - out_properties, type_without_low_cardinality, parameters); - } + return combinator->transformAggregateFunction(nested_function, out_properties, types_without_low_cardinality, parameters); } - auto with_original_arguments = getImpl(name, type_without_low_cardinality, parameters, out_properties, false); + auto with_original_arguments = getImpl(name, types_without_low_cardinality, parameters, out_properties, false); if (!with_original_arguments) throw Exception("Logical error: AggregateFunctionFactory returned nullptr", ErrorCodes::LOGICAL_ERROR); diff --git a/src/AggregateFunctions/AggregateFunctionIf.cpp b/src/AggregateFunctions/AggregateFunctionIf.cpp index d752900c018..5ba54ff8505 100644 --- a/src/AggregateFunctions/AggregateFunctionIf.cpp +++ b/src/AggregateFunctions/AggregateFunctionIf.cpp @@ -40,28 +40,6 @@ public: } }; -/** Given an array of flags, checks if it's all zeros - * When the buffer is all zeros, this is slightly faster than doing a memcmp since doesn't require allocating memory - * When the buffer has values, this is much faster since it avoids visiting all memory (and the allocation and function calls) - */ -static bool ALWAYS_INLINE inline is_all_zeros(const UInt8 * flags, size_t size) -{ - size_t unroll_size = size - size % 8; - size_t i = 0; - while (i < unroll_size) - { - UInt64 v = *reinterpret_cast(&flags[i]); - if (v) - return false; - i += 8; - } - - for (; i < size; ++i) - if (flags[i]) - return false; - - return true; -} /** There are two cases: for single argument and variadic. * Code for single argument is much more efficient. @@ -73,6 +51,7 @@ class AggregateFunctionIfNullUnary final { private: size_t num_arguments; + bool filter_is_nullable = false; /// The name of the nested function, including combinators (i.e. *If) /// @@ -92,8 +71,26 @@ private: using Base = AggregateFunctionNullBase>; -public: + inline bool singleFilter(const IColumn ** columns, size_t row_num, size_t num_arguments) const + { + const IColumn * filter_column = columns[num_arguments - 1]; + + if (filter_is_nullable) + { + const ColumnNullable * nullable_column = assert_cast(filter_column); + filter_column = nullable_column->getNestedColumnPtr().get(); + const UInt8 * filter_null_map = nullable_column->getNullMapData().data(); + + return assert_cast(*filter_column).getData()[row_num] && !filter_null_map[row_num]; + } + else + { + return assert_cast(*filter_column).getData()[row_num]; + } + } + +public: String getName() const override { return name; @@ -105,17 +102,10 @@ public: , name(name_) { if (num_arguments == 0) - throw Exception("Aggregate function " + getName() + " require at least one argument", - ErrorCodes::NUMBER_OF_ARGUMENTS_DOESNT_MATCH); - } + throw Exception(ErrorCodes::NUMBER_OF_ARGUMENTS_DOESNT_MATCH, + "Aggregate function {} require at least one argument", getName()); - static inline bool singleFilter(const IColumn ** columns, size_t row_num, size_t num_arguments) - { - const IColumn * filter_column = columns[num_arguments - 1]; - if (const ColumnNullable * nullable_column = typeid_cast(filter_column)) - filter_column = nullable_column->getNestedColumnPtr().get(); - - return assert_cast(*filter_column).getData()[row_num]; + filter_is_nullable = arguments[num_arguments - 1]->isNullable(); } void add(AggregateDataPtr __restrict place, const IColumn ** columns, size_t row_num, Arena * arena) const override @@ -136,29 +126,41 @@ public: const IColumn * columns_param[] = {&column->getNestedColumn()}; const IColumn * filter_column = columns[num_arguments - 1]; - if (const ColumnNullable * nullable_column = typeid_cast(filter_column)) - filter_column = nullable_column->getNestedColumnPtr().get(); - if constexpr (result_is_nullable) + + const UInt8 * filter_values = nullptr; + const UInt8 * filter_null_map = nullptr; + + if (filter_is_nullable) { - /// We need to check if there is work to do as otherwise setting the flag would be a mistake, - /// it would mean that the return value would be the default value of the nested type instead of NULL - if (is_all_zeros(assert_cast(filter_column)->getData().data(), batch_size)) - return; + const ColumnNullable * nullable_column = assert_cast(filter_column); + filter_column = nullable_column->getNestedColumnPtr().get(); + filter_null_map = nullable_column->getNullMapData().data(); } + filter_values = assert_cast(filter_column)->getData().data(); + /// Combine the 2 flag arrays so we can call a simplified version (one check vs 2) /// Note that now the null map will contain 0 if not null and not filtered, or 1 for null or filtered (or both) - const auto * filter_flags = assert_cast(filter_column)->getData().data(); + auto final_nulls = std::make_unique(batch_size); - for (size_t i = 0; i < batch_size; ++i) - final_nulls[i] = (!!null_map[i]) | (!filter_flags[i]); + + if (filter_null_map) + for (size_t i = 0; i < batch_size; ++i) + final_nulls[i] = (!!null_map[i]) | (!filter_values[i]) | (!!filter_null_map[i]); + else + for (size_t i = 0; i < batch_size; ++i) + final_nulls[i] = (!!null_map[i]) | (!filter_values[i]); + + if constexpr (result_is_nullable) + { + if (!memoryIsByte(final_nulls.get(), batch_size, 1)) + this->setFlag(place); + else + return; /// No work to do. + } this->nested_function->addBatchSinglePlaceNotNull( batch_size, this->nestedPlace(place), columns_param, final_nulls.get(), arena, -1); - - if constexpr (result_is_nullable) - if (!memoryIsByte(null_map, batch_size, 1)) - this->setFlag(place); } #if USE_EMBEDDED_COMPILER @@ -367,10 +369,14 @@ AggregateFunctionPtr AggregateFunctionIf::getOwnNullAdapter( const AggregateFunctionPtr & nested_function, const DataTypes & arguments, const Array & params, const AggregateFunctionProperties & properties) const { - bool return_type_is_nullable = !properties.returns_default_when_only_null && getReturnType()->canBeInsideNullable(); - size_t nullable_size = std::count_if(arguments.begin(), arguments.end(), [](const auto & element) { return element->isNullable(); }); - return_type_is_nullable &= nullable_size != 1 || !arguments.back()->isNullable(); /// If only condition is nullable. we should non-nullable type. - bool serialize_flag = return_type_is_nullable || properties.returns_default_when_only_null; + assert(!arguments.empty()); + + /// Nullability of the last argument (condition) does not affect the nullability of the result (NULL is processed as false). + /// For other arguments it is as usual (at least one is NULL then the result is NULL if possible). + bool return_type_is_nullable = !properties.returns_default_when_only_null && getReturnType()->canBeInsideNullable() + && std::any_of(arguments.begin(), arguments.end() - 1, [](const auto & element) { return element->isNullable(); }); + + bool need_to_serialize_flag = return_type_is_nullable || properties.returns_default_when_only_null; if (arguments.size() <= 2 && arguments.front()->isNullable()) { @@ -380,7 +386,7 @@ AggregateFunctionPtr AggregateFunctionIf::getOwnNullAdapter( } else { - if (serialize_flag) + if (need_to_serialize_flag) return std::make_shared>(nested_function->getName(), nested_func, arguments, params); else return std::make_shared>(nested_function->getName(), nested_func, arguments, params); @@ -394,7 +400,7 @@ AggregateFunctionPtr AggregateFunctionIf::getOwnNullAdapter( } else { - if (serialize_flag) + if (need_to_serialize_flag) return std::make_shared>(nested_function, arguments, params); else return std::make_shared>(nested_function, arguments, params); diff --git a/src/AggregateFunctions/AggregateFunctionMeanZTest.cpp b/src/AggregateFunctions/AggregateFunctionMeanZTest.cpp new file mode 100644 index 00000000000..edc4361bce3 --- /dev/null +++ b/src/AggregateFunctions/AggregateFunctionMeanZTest.cpp @@ -0,0 +1,64 @@ +#include +#include +#include +#include + + +namespace ErrorCodes +{ + extern const int BAD_ARGUMENTS; + extern const int NUMBER_OF_ARGUMENTS_DOESNT_MATCH; +} + + +namespace DB +{ +struct Settings; + +namespace +{ + +struct MeanZTestData : public ZTestMoments +{ + static constexpr auto name = "meanZTest"; + + std::pair getResult(Float64 pop_var_x, Float64 pop_var_y) const + { + Float64 mean_x = getMeanX(); + Float64 mean_y = getMeanY(); + + /// z = \frac{\bar{X_{1}} - \bar{X_{2}}}{\sqrt{\frac{\sigma_{1}^{2}}{n_{1}} + \frac{\sigma_{2}^{2}}{n_{2}}}} + Float64 zstat = (mean_x - mean_y) / getStandardError(pop_var_x, pop_var_y); + if (!std::isfinite(zstat)) + { + return {std::numeric_limits::quiet_NaN(), std::numeric_limits::quiet_NaN()}; + } + + Float64 pvalue = 2.0 * boost::math::cdf(boost::math::normal(0.0, 1.0), -1.0 * std::abs(zstat)); + + return {zstat, pvalue}; + } +}; + +AggregateFunctionPtr createAggregateFunctionMeanZTest( + const std::string & name, const DataTypes & argument_types, const Array & parameters, const Settings *) +{ + assertBinary(name, argument_types); + + if (parameters.size() != 3) + throw Exception("Aggregate function " + name + " requires three parameter.", ErrorCodes::NUMBER_OF_ARGUMENTS_DOESNT_MATCH); + + if (!isNumber(argument_types[0]) || !isNumber(argument_types[1])) + throw Exception("Aggregate function " + name + " only supports numerical types", ErrorCodes::BAD_ARGUMENTS); + + return std::make_shared>(argument_types, parameters); +} + +} + +void registerAggregateFunctionMeanZTest(AggregateFunctionFactory & factory) +{ + factory.registerFunction("meanZTest", createAggregateFunctionMeanZTest); +} + +} diff --git a/src/AggregateFunctions/AggregateFunctionMeanZTest.h b/src/AggregateFunctions/AggregateFunctionMeanZTest.h new file mode 100644 index 00000000000..e4be2503d87 --- /dev/null +++ b/src/AggregateFunctions/AggregateFunctionMeanZTest.h @@ -0,0 +1,139 @@ +#pragma once + +#include +#include +#include +#include +#include +#include +#include +#include +#include + + +namespace DB +{ +struct Settings; + +class ReadBuffer; +class WriteBuffer; + +namespace ErrorCodes +{ + extern const int BAD_ARGUMENTS; +} + + +/// Returns tuple of (z-statistic, p-value, confidence-interval-low, confidence-interval-high) +template +class AggregateFunctionMeanZTest : + public IAggregateFunctionDataHelper> +{ +private: + Float64 pop_var_x; + Float64 pop_var_y; + Float64 confidence_level; + +public: + AggregateFunctionMeanZTest(const DataTypes & arguments, const Array & params) + : IAggregateFunctionDataHelper>({arguments}, params) + { + pop_var_x = params.at(0).safeGet(); + pop_var_y = params.at(1).safeGet(); + confidence_level = params.at(2).safeGet(); + + if (!std::isfinite(pop_var_x) || !std::isfinite(pop_var_y) || !std::isfinite(confidence_level)) + { + throw Exception(ErrorCodes::BAD_ARGUMENTS, "Aggregate function {} requires finite parameter values.", Data::name); + } + + if (pop_var_x < 0.0 || pop_var_y < 0.0) + { + throw Exception(ErrorCodes::BAD_ARGUMENTS, "Population variance parameters must be larger than or equal to zero in aggregate function {}.", Data::name); + } + + if (confidence_level <= 0.0 || confidence_level >= 1.0) + { + throw Exception(ErrorCodes::BAD_ARGUMENTS, "Confidence level parameter must be between 0 and 1 in aggregate function {}.", Data::name); + } + } + + String getName() const override + { + return Data::name; + } + + DataTypePtr getReturnType() const override + { + DataTypes types + { + std::make_shared>(), + std::make_shared>(), + std::make_shared>(), + std::make_shared>(), + }; + + Strings names + { + "z_statistic", + "p_value", + "confidence_interval_low", + "confidence_interval_high" + }; + + return std::make_shared( + std::move(types), + std::move(names) + ); + } + + bool allocatesMemoryInArena() const override { return false; } + + void add(AggregateDataPtr __restrict place, const IColumn ** columns, size_t row_num, Arena *) const override + { + Float64 value = columns[0]->getFloat64(row_num); + UInt8 is_second = columns[1]->getUInt(row_num); + + if (is_second) + this->data(place).addY(value); + else + this->data(place).addX(value); + } + + void merge(AggregateDataPtr __restrict place, ConstAggregateDataPtr rhs, Arena *) const override + { + this->data(place).merge(this->data(rhs)); + } + + void serialize(ConstAggregateDataPtr __restrict place, WriteBuffer & buf, std::optional /* version */) const override + { + this->data(place).write(buf); + } + + void deserialize(AggregateDataPtr __restrict place, ReadBuffer & buf, std::optional /* version */, Arena *) const override + { + this->data(place).read(buf); + } + + void insertResultInto(AggregateDataPtr __restrict place, IColumn & to, Arena *) const override + { + auto [z_stat, p_value] = this->data(place).getResult(pop_var_x, pop_var_y); + auto [ci_low, ci_high] = this->data(place).getConfidenceIntervals(pop_var_x, pop_var_y, confidence_level); + + /// Because p-value is a probability. + p_value = std::min(1.0, std::max(0.0, p_value)); + + auto & column_tuple = assert_cast(to); + auto & column_stat = assert_cast &>(column_tuple.getColumn(0)); + auto & column_value = assert_cast &>(column_tuple.getColumn(1)); + auto & column_ci_low = assert_cast &>(column_tuple.getColumn(2)); + auto & column_ci_high = assert_cast &>(column_tuple.getColumn(3)); + + column_stat.getData().push_back(z_stat); + column_value.getData().push_back(p_value); + column_ci_low.getData().push_back(ci_low); + column_ci_high.getData().push_back(ci_high); + } +}; + +}; diff --git a/src/AggregateFunctions/CMakeLists.txt b/src/AggregateFunctions/CMakeLists.txt index 64f6eed9a6c..0cb38fc729a 100644 --- a/src/AggregateFunctions/CMakeLists.txt +++ b/src/AggregateFunctions/CMakeLists.txt @@ -23,7 +23,7 @@ list(REMOVE_ITEM clickhouse_aggregate_functions_headers ) add_library(clickhouse_aggregate_functions ${clickhouse_aggregate_functions_sources}) -target_link_libraries(clickhouse_aggregate_functions PRIVATE dbms PUBLIC ${CITYHASH_LIBRARIES}) +target_link_libraries(clickhouse_aggregate_functions PRIVATE dbms PUBLIC ch_contrib::cityhash) if(ENABLE_EXAMPLES) add_subdirectory(examples) diff --git a/src/AggregateFunctions/Moments.h b/src/AggregateFunctions/Moments.h index 6f51e76607f..d2a6b0b5581 100644 --- a/src/AggregateFunctions/Moments.h +++ b/src/AggregateFunctions/Moments.h @@ -2,6 +2,7 @@ #include #include +#include namespace DB @@ -359,4 +360,74 @@ struct TTestMoments } }; +template +struct ZTestMoments +{ + T nx{}; + T ny{}; + T x1{}; + T y1{}; + + void addX(T value) + { + ++nx; + x1 += value; + } + + void addY(T value) + { + ++ny; + y1 += value; + } + + void merge(const ZTestMoments & rhs) + { + nx += rhs.nx; + ny += rhs.ny; + x1 += rhs.x1; + y1 += rhs.y1; + } + + void write(WriteBuffer & buf) const + { + writePODBinary(*this, buf); + } + + void read(ReadBuffer & buf) + { + readPODBinary(*this, buf); + } + + Float64 getMeanX() const + { + return x1 / nx; + } + + Float64 getMeanY() const + { + return y1 / ny; + } + + Float64 getStandardError(Float64 pop_var_x, Float64 pop_var_y) const + { + /// \sqrt{\frac{\sigma_{1}^{2}}{n_{1}} + \frac{\sigma_{2}^{2}}{n_{2}}} + return std::sqrt(pop_var_x / nx + pop_var_y / ny); + } + + std::pair getConfidenceIntervals(Float64 pop_var_x, Float64 pop_var_y, Float64 confidence_level) const + { + /// (\bar{x_{1}} - \bar{x_{2}}) \pm zscore \times \sqrt{\frac{\sigma_{1}^{2}}{n_{1}} + \frac{\sigma_{2}^{2}}{n_{2}}} + Float64 mean_x = getMeanX(); + Float64 mean_y = getMeanY(); + + Float64 z = boost::math::quantile(boost::math::complement( + boost::math::normal(0.0f, 1.0f), (1.0f - confidence_level) / 2.0f)); + Float64 se = getStandardError(pop_var_x, pop_var_y); + Float64 ci_low = (mean_x - mean_y) - z * se; + Float64 ci_high = (mean_x - mean_y) + z * se; + + return {ci_low, ci_high}; + } +}; + } diff --git a/src/AggregateFunctions/registerAggregateFunctions.cpp b/src/AggregateFunctions/registerAggregateFunctions.cpp index 33f6a532224..351adac31bb 100644 --- a/src/AggregateFunctions/registerAggregateFunctions.cpp +++ b/src/AggregateFunctions/registerAggregateFunctions.cpp @@ -48,6 +48,7 @@ void registerAggregateFunctionRankCorrelation(AggregateFunctionFactory &); void registerAggregateFunctionMannWhitney(AggregateFunctionFactory &); void registerAggregateFunctionWelchTTest(AggregateFunctionFactory &); void registerAggregateFunctionStudentTTest(AggregateFunctionFactory &); +void registerAggregateFunctionMeanZTest(AggregateFunctionFactory &); void registerAggregateFunctionCramersV(AggregateFunctionFactory &); void registerAggregateFunctionTheilsU(AggregateFunctionFactory &); void registerAggregateFunctionContingency(AggregateFunctionFactory &); @@ -123,6 +124,7 @@ void registerAggregateFunctions() registerAggregateFunctionSequenceNextNode(factory); registerAggregateFunctionWelchTTest(factory); registerAggregateFunctionStudentTTest(factory); + registerAggregateFunctionMeanZTest(factory); registerAggregateFunctionNothing(factory); registerAggregateFunctionSingleValueOrNull(factory); registerAggregateFunctionIntervalLengthSum(factory); diff --git a/src/CMakeLists.txt b/src/CMakeLists.txt index a2a4764b398..57d4bf29491 100644 --- a/src/CMakeLists.txt +++ b/src/CMakeLists.txt @@ -11,7 +11,7 @@ if(COMPILER_PIPE) else() set(MAX_COMPILER_MEMORY 1500) endif() -if(MAKE_STATIC_LIBRARIES) +if(USE_STATIC_LIBRARIES) set(MAX_LINKER_MEMORY 3500) else() set(MAX_LINKER_MEMORY 2500) @@ -23,12 +23,13 @@ set (CONFIG_COMMON "${CMAKE_CURRENT_BINARY_DIR}/Common/config.h") include (../cmake/version.cmake) message (STATUS "Will build ${VERSION_FULL} revision ${VERSION_REVISION} ${VERSION_OFFICIAL}") +include (configure_config.cmake) configure_file (Common/config.h.in ${CONFIG_COMMON}) configure_file (Common/config_version.h.in ${CONFIG_VERSION}) configure_file (Core/config_core.h.in "${CMAKE_CURRENT_BINARY_DIR}/Core/include/config_core.h") if (USE_DEBUG_HELPERS) - get_target_property(MAGIC_ENUM_INCLUDE_DIR magic_enum INTERFACE_INCLUDE_DIRECTORIES) + get_target_property(MAGIC_ENUM_INCLUDE_DIR ch_contrib::magic_enum INTERFACE_INCLUDE_DIRECTORIES) # CMake generator expression will do insane quoting when it encounters special character like quotes, spaces, etc. # Prefixing "SHELL:" will force it to use the original text. set (INCLUDE_DEBUG_HELPERS "SHELL:-I\"${ClickHouse_SOURCE_DIR}/base\" -I\"${MAGIC_ENUM_INCLUDE_DIR}\" -include \"${ClickHouse_SOURCE_DIR}/src/Core/iostream_debug_helpers.h\"") @@ -82,15 +83,15 @@ add_headers_and_sources(clickhouse_common_io IO/S3) list (REMOVE_ITEM clickhouse_common_io_sources Common/malloc.cpp Common/new_delete.cpp) add_headers_and_sources(dbms Disks/IO) -if (USE_SQLITE) +if (TARGET ch_contrib::sqlite) add_headers_and_sources(dbms Databases/SQLite) endif() -if(USE_RDKAFKA) +if (TARGET ch_contrib::rdkafka) add_headers_and_sources(dbms Storages/Kafka) endif() -if (USE_AMQPCPP) +if (TARGET ch_contrib::amqp_cpp) add_headers_and_sources(dbms Storages/RabbitMQ) endif() @@ -100,32 +101,34 @@ if (USE_LIBPQXX) add_headers_and_sources(dbms Storages/PostgreSQL) endif() -if (USE_ROCKSDB) +if (TARGET ch_contrib::rocksdb) add_headers_and_sources(dbms Storages/RocksDB) endif() -if (USE_AWS_S3) +if (TARGET ch_contrib::aws_s3) add_headers_and_sources(dbms Common/S3) add_headers_and_sources(dbms Disks/S3) endif() -if (USE_AZURE_BLOB_STORAGE) +if (TARGET ch_contrib::azure_sdk) add_headers_and_sources(dbms Disks/AzureBlobStorage) endif() -if (USE_HDFS) +if (TARGET ch_contrib::hdfs) add_headers_and_sources(dbms Storages/HDFS) add_headers_and_sources(dbms Disks/HDFS) endif() add_headers_and_sources(dbms Storages/Cache) -if (USE_HIVE) +if (TARGET ch_contrib::hivemetastore) add_headers_and_sources(dbms Storages/Hive) endif() -if(USE_FILELOG) +if (OS_LINUX) add_headers_and_sources(dbms Storages/FileLog) -endif() +else() + message(STATUS "StorageFileLog is only supported on Linux") +endif () list (APPEND clickhouse_common_io_sources ${CONFIG_BUILD}) list (APPEND clickhouse_common_io_headers ${CONFIG_VERSION} ${CONFIG_COMMON}) @@ -174,10 +177,15 @@ if (((SANITIZE STREQUAL "thread") OR (SANITIZE STREQUAL "address")) AND COMPILER message(WARNING "Memory tracking is disabled, due to gcc sanitizers") else() add_library (clickhouse_new_delete STATIC Common/new_delete.cpp) - target_link_libraries (clickhouse_new_delete PRIVATE clickhouse_common_io jemalloc) + target_link_libraries (clickhouse_new_delete PRIVATE clickhouse_common_io) + if (TARGET ch_contrib::jemalloc) + target_link_libraries (clickhouse_new_delete PRIVATE ch_contrib::jemalloc) + endif() endif() -target_link_libraries (clickhouse_common_io PRIVATE jemalloc) +if (TARGET ch_contrib::jemalloc) + target_link_libraries (clickhouse_common_io PRIVATE ch_contrib::jemalloc) +endif() add_subdirectory(Access/Common) add_subdirectory(Common/ZooKeeper) @@ -185,7 +193,7 @@ add_subdirectory(Common/Config) set (all_modules) macro(add_object_library name common_path) - if (MAKE_STATIC_LIBRARIES OR NOT SPLIT_SHARED_LIBRARIES) + if (USE_STATIC_LIBRARIES OR NOT SPLIT_SHARED_LIBRARIES) add_headers_and_sources(dbms ${common_path}) else () list (APPEND all_modules ${name}) @@ -240,25 +248,26 @@ add_object_library(clickhouse_processors_merges_algorithms Processors/Merges/Alg add_object_library(clickhouse_processors_queryplan Processors/QueryPlan) add_object_library(clickhouse_processors_queryplan_optimizations Processors/QueryPlan/Optimizations) -if (USE_NURAFT) +if (TARGET ch_contrib::nuraft) add_object_library(clickhouse_coordination Coordination) endif() set (DBMS_COMMON_LIBRARIES) -# libgcc_s does not provide an implementation of an atomics library. Instead, -# GCC’s libatomic library can be used to supply these when using libgcc_s. -if ((NOT USE_LIBCXX) AND COMPILER_CLANG AND OS_LINUX) - list (APPEND DBMS_COMMON_LIBRARIES atomic) -endif() -if (MAKE_STATIC_LIBRARIES OR NOT SPLIT_SHARED_LIBRARIES) +if (USE_STATIC_LIBRARIES OR NOT SPLIT_SHARED_LIBRARIES) add_library (dbms STATIC ${dbms_headers} ${dbms_sources}) - target_link_libraries (dbms PRIVATE jemalloc libdivide ${DBMS_COMMON_LIBRARIES}) + target_link_libraries (dbms PRIVATE ch_contrib::libdivide ${DBMS_COMMON_LIBRARIES}) + if (TARGET ch_contrib::jemalloc) + target_link_libraries (dbms PRIVATE ch_contrib::jemalloc) + endif() set (all_modules dbms) else() add_library (dbms SHARED ${dbms_headers} ${dbms_sources}) target_link_libraries (dbms PUBLIC ${all_modules} ${DBMS_COMMON_LIBRARIES}) - target_link_libraries (clickhouse_interpreters PRIVATE jemalloc libdivide) + target_link_libraries (clickhouse_interpreters PRIVATE ch_contrib::libdivide) + if (TARGET ch_contrib::jemalloc) + target_link_libraries (clickhouse_interpreters PRIVATE ch_contrib::jemalloc) + endif() list (APPEND all_modules dbms) # force all split libs to be linked if (OS_DARWIN) @@ -283,9 +292,8 @@ endmacro () dbms_target_include_directories (PUBLIC "${ClickHouse_SOURCE_DIR}/src" "${ClickHouse_BINARY_DIR}/src") target_include_directories (clickhouse_common_io PUBLIC "${ClickHouse_SOURCE_DIR}/src" "${ClickHouse_BINARY_DIR}/src") -if (USE_EMBEDDED_COMPILER) - dbms_target_link_libraries (PUBLIC ${REQUIRED_LLVM_LIBRARIES}) - dbms_target_include_directories (SYSTEM BEFORE PUBLIC ${LLVM_INCLUDE_DIRS}) +if (TARGET ch_contrib::llvm) + dbms_target_link_libraries (PUBLIC ch_contrib::llvm) endif () # Otherwise it will slow down stack traces printing too much. @@ -302,8 +310,8 @@ target_link_libraries (clickhouse_common_io ${LINK_LIBRARIES_ONLY_ON_X86_64} PUBLIC common - ${DOUBLE_CONVERSION_LIBRARIES} - dragonbox_to_chars + ch_contrib::double_conversion + ch_contrib::dragonbox_to_chars ) # Use X86 AVX2/AVX512 instructions to accelerate filter operations @@ -312,54 +320,48 @@ set_source_files_properties( Columns/ColumnsCommon.cpp Columns/ColumnVector.cpp Columns/ColumnDecimal.cpp + Columns/ColumnString.cpp PROPERTIES COMPILE_FLAGS "${X86_INTRINSICS_FLAGS}") -if(RE2_LIBRARY) - target_link_libraries(clickhouse_common_io PUBLIC ${RE2_LIBRARY}) -endif() -if(RE2_ST_LIBRARY) - target_link_libraries(clickhouse_common_io PUBLIC ${RE2_ST_LIBRARY}) -endif() +target_link_libraries(clickhouse_common_io PUBLIC ch_contrib::re2_st) +target_link_libraries(clickhouse_common_io PUBLIC ch_contrib::re2) target_link_libraries(clickhouse_common_io PRIVATE ${EXECINFO_LIBRARIES} - cpuid PUBLIC boost::program_options boost::system - ${CITYHASH_LIBRARIES} - ${ZLIB_LIBRARIES} + ch_contrib::cityhash + ch_contrib::zlib pcg_random Poco::Foundation ) +if (TARGET ch_contrib::cpuid) + target_link_libraries(clickhouse_common_io PRIVATE ch_contrib::cpuid) +endif() + +dbms_target_link_libraries(PUBLIC ch_contrib::abseil_swiss_tables) + # Make dbms depend on roaring instead of clickhouse_common_io so that roaring itself can depend on clickhouse_common_io # That way we we can redirect malloc/free functions avoiding circular dependencies -dbms_target_link_libraries(PUBLIC roaring) +dbms_target_link_libraries(PUBLIC ch_contrib::roaring) -if (USE_RDKAFKA) - dbms_target_link_libraries(PRIVATE ${CPPKAFKA_LIBRARY} ${RDKAFKA_LIBRARY}) - if(NOT USE_INTERNAL_RDKAFKA_LIBRARY) - dbms_target_include_directories(SYSTEM BEFORE PRIVATE ${RDKAFKA_INCLUDE_DIR}) - endif() +if (TARGET ch_contrib::rdkafka) + dbms_target_link_libraries(PRIVATE ch_contrib::rdkafka ch_contrib::cppkafka) endif() -if (USE_CYRUS_SASL) - dbms_target_link_libraries(PRIVATE ${CYRUS_SASL_LIBRARY}) +if (TARGET ch_contrib::sasl2) + dbms_target_link_libraries(PRIVATE ch_contrib::sasl2) endif() -if (USE_KRB5) - dbms_target_include_directories(SYSTEM BEFORE PRIVATE ${KRB5_INCLUDE_DIR}) - dbms_target_link_libraries(PRIVATE ${KRB5_LIBRARY}) +if (TARGET ch_contrib::krb5) + dbms_target_link_libraries(PRIVATE ch_contrib::krb5) endif() -if (USE_NURAFT) - dbms_target_link_libraries(PUBLIC ${NURAFT_LIBRARY}) -endif() - -if(RE2_INCLUDE_DIR) - target_include_directories(clickhouse_common_io SYSTEM BEFORE PUBLIC ${RE2_INCLUDE_DIR}) +if (TARGET ch_contrib::nuraft) + dbms_target_link_libraries(PUBLIC ch_contrib::nuraft) endif() dbms_target_link_libraries ( @@ -370,16 +372,19 @@ dbms_target_link_libraries ( clickhouse_common_zookeeper clickhouse_dictionaries_embedded clickhouse_parsers - lz4 + ch_contrib::lz4 Poco::JSON Poco::MongoDB string_utils PUBLIC - ${MYSQLXX_LIBRARY} boost::system clickhouse_common_io ) +if (TARGET ch::mysqlxx) + dbms_target_link_libraries (PUBLIC ch::mysqlxx) +endif() + dbms_target_link_libraries ( PUBLIC boost::circular_buffer @@ -388,113 +393,86 @@ dbms_target_link_libraries ( target_include_directories(clickhouse_common_io PUBLIC "${CMAKE_CURRENT_BINARY_DIR}/Core/include") # uses some includes from core dbms_target_include_directories(PUBLIC "${CMAKE_CURRENT_BINARY_DIR}/Core/include") -target_include_directories(clickhouse_common_io SYSTEM BEFORE PUBLIC ${PDQSORT_INCLUDE_DIR}) -dbms_target_include_directories(SYSTEM BEFORE PUBLIC ${PDQSORT_INCLUDE_DIR}) -target_include_directories(clickhouse_common_io SYSTEM BEFORE PUBLIC ${MINISELECT_INCLUDE_DIR}) -dbms_target_include_directories(SYSTEM BEFORE PUBLIC ${MINISELECT_INCLUDE_DIR}) +target_link_libraries(clickhouse_common_io PUBLIC + ch_contrib::miniselect + ch_contrib::pdqsort) +dbms_target_link_libraries(PUBLIC + ch_contrib::miniselect + ch_contrib::pdqsort) -if (ZSTD_LIBRARY) - dbms_target_link_libraries(PRIVATE ${ZSTD_LIBRARY}) - target_link_libraries (clickhouse_common_io PUBLIC ${ZSTD_LIBRARY}) - target_include_directories (clickhouse_common_io SYSTEM BEFORE PUBLIC ${ZSTD_INCLUDE_DIR}) - if (NOT USE_INTERNAL_ZSTD_LIBRARY AND ZSTD_INCLUDE_DIR) - dbms_target_include_directories(SYSTEM BEFORE PRIVATE ${ZSTD_INCLUDE_DIR}) - endif () -endif() +dbms_target_link_libraries(PRIVATE ch_contrib::zstd) +target_link_libraries (clickhouse_common_io PUBLIC ch_contrib::zstd) +target_link_libraries (clickhouse_common_io PUBLIC ch_contrib::xz) -if (XZ_LIBRARY) - target_link_libraries (clickhouse_common_io PUBLIC ${XZ_LIBRARY}) - target_include_directories (clickhouse_common_io SYSTEM BEFORE PUBLIC ${XZ_INCLUDE_DIR}) -endif() - -if (USE_ICU) - dbms_target_link_libraries (PRIVATE ${ICU_LIBRARIES}) - dbms_target_include_directories (SYSTEM PRIVATE ${ICU_INCLUDE_DIRS}) +if (TARGET ch_contrib::icu) + dbms_target_link_libraries (PRIVATE ch_contrib::icu) endif () -if (USE_CAPNP) - dbms_target_link_libraries (PRIVATE ${CAPNP_LIBRARIES}) +if (TARGET ch_contrib::capnp) + dbms_target_link_libraries (PRIVATE ch_contrib::capnp) endif () -if (USE_PARQUET) - dbms_target_link_libraries(PRIVATE ${PARQUET_LIBRARY}) - if (NOT USE_INTERNAL_PARQUET_LIBRARY) - dbms_target_include_directories (SYSTEM BEFORE PRIVATE ${PARQUET_INCLUDE_DIR} ${ARROW_INCLUDE_DIR}) - if (USE_STATIC_LIBRARIES) - dbms_target_link_libraries(PRIVATE ${ARROW_LIBRARY}) - endif() - endif () +if (TARGET ch_contrib::parquet) + dbms_target_link_libraries (PRIVATE ch_contrib::parquet) endif () -if (USE_AVRO) - dbms_target_link_libraries(PRIVATE ${AVROCPP_LIBRARY}) - dbms_target_include_directories (SYSTEM BEFORE PRIVATE ${AVROCPP_INCLUDE_DIR}) +if (TARGET ch_contrib::avrocpp) + dbms_target_link_libraries(PRIVATE ch_contrib::avrocpp) endif () -if (OPENSSL_CRYPTO_LIBRARY) - dbms_target_link_libraries (PRIVATE ${OPENSSL_CRYPTO_LIBRARY}) - target_link_libraries (clickhouse_common_io PRIVATE ${OPENSSL_CRYPTO_LIBRARY}) +if (TARGET OpenSSL::Crypto) + dbms_target_link_libraries (PRIVATE OpenSSL::Crypto) + target_link_libraries (clickhouse_common_io PRIVATE OpenSSL::Crypto) endif () -if (USE_LDAP) - dbms_target_include_directories (SYSTEM BEFORE PRIVATE ${OPENLDAP_INCLUDE_DIRS}) - dbms_target_link_libraries (PRIVATE ${OPENLDAP_LIBRARIES}) +if (TARGET ch_contrib::ldap) + dbms_target_link_libraries (PRIVATE ch_contrib::ldap ch_contrib::lber) endif () -dbms_target_include_directories (SYSTEM BEFORE PRIVATE ${SPARSEHASH_INCLUDE_DIR}) +dbms_target_link_libraries (PRIVATE ch_contrib::sparsehash) -if (USE_PROTOBUF) - dbms_target_link_libraries (PRIVATE ${Protobuf_LIBRARY}) - dbms_target_include_directories (SYSTEM BEFORE PRIVATE ${Protobuf_INCLUDE_DIR}) +if (TARGET ch_contrib::protobuf) + dbms_target_link_libraries (PRIVATE ch_contrib::protobuf) endif () -if (USE_GRPC) +if (TARGET clickhouse_grpc_protos) dbms_target_link_libraries (PUBLIC clickhouse_grpc_protos) endif() -if (USE_HDFS) - dbms_target_link_libraries(PRIVATE ${HDFS3_LIBRARY}) - dbms_target_include_directories (SYSTEM BEFORE PUBLIC ${HDFS3_INCLUDE_DIR}) +if (TARGET ch_contrib::hdfs) + dbms_target_link_libraries(PRIVATE ch_contrib::hdfs) endif() -if (USE_HIVE) - dbms_target_link_libraries(PRIVATE hivemetastore) - dbms_target_include_directories(SYSTEM BEFORE PUBLIC ${ClickHouse_SOURCE_DIR}/contrib/hive-metastore) +if (TARGET ch_contrib::hivemetastore) + dbms_target_link_libraries(PRIVATE ch_contrib::hivemetastore) endif() -if (USE_AWS_S3) - target_link_libraries (clickhouse_common_io PUBLIC ${AWS_S3_LIBRARY}) - target_include_directories (clickhouse_common_io SYSTEM BEFORE PUBLIC ${AWS_S3_CORE_INCLUDE_DIR}) - target_include_directories (clickhouse_common_io SYSTEM BEFORE PUBLIC ${AWS_S3_INCLUDE_DIR}) +if (TARGET ch_contrib::aws_s3) + target_link_libraries (clickhouse_common_io PUBLIC ch_contrib::aws_s3) endif() -if (USE_AZURE_BLOB_STORAGE) - target_link_libraries (clickhouse_common_io PUBLIC ${AZURE_BLOB_STORAGE_LIBRARY}) - target_include_directories (clickhouse_common_io SYSTEM BEFORE PUBLIC ${AZURE_SDK_INCLUDES}) +if (TARGET ch_contrib::azure_sdk) + target_link_libraries (clickhouse_common_io PUBLIC ch_contrib::azure_sdk) endif() -if (USE_S2_GEOMETRY) - dbms_target_link_libraries (PUBLIC ${S2_GEOMETRY_LIBRARY}) - dbms_target_include_directories (SYSTEM BEFORE PUBLIC ${S2_GEOMETRY_INCLUDE_DIR}) +if (TARGET ch_contrib::s2) + dbms_target_link_libraries (PUBLIC ch_contrib::s2) endif() -if (USE_BROTLI) - target_link_libraries (clickhouse_common_io PRIVATE ${BROTLI_LIBRARY}) - target_include_directories (clickhouse_common_io SYSTEM BEFORE PRIVATE ${BROTLI_INCLUDE_DIR}) +if (TARGET ch_contrib::brotli) + target_link_libraries (clickhouse_common_io PRIVATE ch_contrib::brotli) endif() -if (USE_SNAPPY) - target_link_libraries (clickhouse_common_io PUBLIC ${SNAPPY_LIBRARY}) - target_include_directories (clickhouse_common_io SYSTEM BEFORE PUBLIC ${SNAPPY_INCLUDE_DIR}) +if (TARGET ch_contrib::snappy) + target_link_libraries (clickhouse_common_io PUBLIC ch_contrib::snappy) endif() -if (USE_AMQPCPP) - dbms_target_link_libraries(PUBLIC ${AMQPCPP_LIBRARY}) - dbms_target_include_directories (SYSTEM BEFORE PUBLIC ${AMQPCPP_INCLUDE_DIR}) +if (TARGET ch_contrib::amqp_cpp) + dbms_target_link_libraries(PUBLIC ch_contrib::amqp_cpp) endif() -if (USE_SQLITE) - dbms_target_link_libraries(PUBLIC sqlite) +if (TARGET ch_contrib::sqlite) + dbms_target_link_libraries(PUBLIC ch_contrib::sqlite) endif() if (USE_CASSANDRA) @@ -502,63 +480,52 @@ if (USE_CASSANDRA) dbms_target_include_directories (SYSTEM BEFORE PUBLIC ${CASS_INCLUDE_DIR}) endif() -target_include_directories (clickhouse_common_io SYSTEM BEFORE PUBLIC ${DOUBLE_CONVERSION_INCLUDE_DIR}) - -if (USE_MSGPACK) - target_include_directories (clickhouse_common_io SYSTEM BEFORE PUBLIC ${MSGPACK_INCLUDE_DIR}) +if (TARGET ch_contrib::msgpack) + target_link_libraries (clickhouse_common_io PUBLIC ch_contrib::msgpack) endif() -target_link_libraries (clickhouse_common_io PUBLIC ${FAST_FLOAT_LIBRARY}) -target_include_directories (clickhouse_common_io SYSTEM BEFORE PUBLIC ${FAST_FLOAT_INCLUDE_DIR}) +target_link_libraries (clickhouse_common_io PUBLIC ch_contrib::fast_float) -if (USE_ORC) - dbms_target_link_libraries(PUBLIC ${ORC_LIBRARIES}) - dbms_target_include_directories(SYSTEM BEFORE PUBLIC ${ORC_INCLUDE_DIR} "${CMAKE_BINARY_DIR}/contrib/orc/c++/include") +if (TARGET ch_contrib::rocksdb) + dbms_target_link_libraries(PUBLIC ch_contrib::rocksdb) +endif() + +if (TARGET ch_contrib::libpqxx) + dbms_target_link_libraries(PUBLIC ch_contrib::libpqxx) +endif() + +if (TARGET ch_contrib::datasketches) + target_link_libraries (clickhouse_aggregate_functions PRIVATE ch_contrib::datasketches) endif () -if (USE_ROCKSDB) - dbms_target_link_libraries(PUBLIC ${ROCKSDB_LIBRARY}) - dbms_target_include_directories(SYSTEM BEFORE PUBLIC ${ROCKSDB_INCLUDE_DIR}) -endif() - -if (USE_LIBPQXX) - dbms_target_link_libraries(PUBLIC ${LIBPQXX_LIBRARY}) - dbms_target_include_directories(SYSTEM BEFORE PUBLIC ${LIBPQXX_INCLUDE_DIR}) -endif() - -if (USE_DATASKETCHES) - target_include_directories (clickhouse_aggregate_functions SYSTEM BEFORE PRIVATE ${DATASKETCHES_INCLUDE_DIR}) -endif () - -target_link_libraries (clickhouse_common_io PRIVATE lz4) +target_link_libraries (clickhouse_common_io PRIVATE ch_contrib::lz4) dbms_target_link_libraries(PRIVATE _boost_context) -if (USE_NLP) - dbms_target_link_libraries (PUBLIC stemmer) - dbms_target_link_libraries (PUBLIC wnb) - dbms_target_link_libraries (PUBLIC lemmagen) - dbms_target_link_libraries (PUBLIC nlp_data) +if (ENABLE_NLP) + dbms_target_link_libraries (PUBLIC ch_contrib::stemmer) + dbms_target_link_libraries (PUBLIC ch_contrib::wnb) + dbms_target_link_libraries (PUBLIC ch_contrib::lemmagen) + dbms_target_link_libraries (PUBLIC ch_contrib::nlp_data) endif() -if (USE_BZIP2) - target_link_libraries (clickhouse_common_io PRIVATE ${BZIP2_LIBRARY}) - target_include_directories (clickhouse_common_io SYSTEM BEFORE PRIVATE ${BZIP2_INCLUDE_DIR}) +if (TARGET ch_contrib::bzip2) + target_link_libraries (clickhouse_common_io PRIVATE ch_contrib::bzip2) endif() -if(USE_SIMDJSON) - dbms_target_link_libraries(PRIVATE simdjson) +if (TARGET ch_contrib::simdjson) + dbms_target_link_libraries(PRIVATE ch_contrib::simdjson) endif() -if(USE_RAPIDJSON) - dbms_target_include_directories(SYSTEM PRIVATE ${RAPIDJSON_INCLUDE_DIR}) +if (TARGET ch_contrib::rapidjson) + dbms_target_link_libraries(PRIVATE ch_contrib::rapidjson) endif() -dbms_target_link_libraries(PUBLIC consistent-hashing) +dbms_target_link_libraries(PUBLIC ch_contrib::consistent_hashing) include ("${ClickHouse_SOURCE_DIR}/cmake/add_check.cmake") -if (ENABLE_TESTS AND USE_GTEST) +if (ENABLE_TESTS) macro (grep_gtest_sources BASE_DIR DST_VAR) # Cold match files that are not in tests/ directories file(GLOB_RECURSE "${DST_VAR}" RELATIVE "${BASE_DIR}" "gtest*.cpp") @@ -580,7 +547,7 @@ if (ENABLE_TESTS AND USE_GTEST) ) target_link_libraries(unit_tests_dbms PRIVATE - ${GTEST_BOTH_LIBRARIES} + ch_contrib::gtest_all clickhouse_functions clickhouse_aggregate_functions clickhouse_parsers diff --git a/src/Client/ClientBase.cpp b/src/Client/ClientBase.cpp index eb00ee349ee..0a30e924ffb 100644 --- a/src/Client/ClientBase.cpp +++ b/src/Client/ClientBase.cpp @@ -13,15 +13,16 @@ #include #include #include -#include "Common/Exception.h" -#include "Common/getNumberOfPhysicalCPUCores.h" -#include "Common/tests/gtest_global_context.h" -#include "Common/typeid_cast.h" -#include "Columns/ColumnString.h" -#include "Columns/ColumnsNumber.h" -#include "Core/Block.h" -#include "Core/Protocol.h" -#include "Formats/FormatFactory.h" +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include #include #include @@ -1194,7 +1195,7 @@ bool ClientBase::receiveEndOfQuery() case Protocol::Server::Progress: onProgress(packet.progress); - return true; + break; default: throw NetException( diff --git a/src/Client/ClientBaseHelpers.h b/src/Client/ClientBaseHelpers.h index 048a4c17f10..3fb2863082a 100644 --- a/src/Client/ClientBaseHelpers.h +++ b/src/Client/ClientBaseHelpers.h @@ -1,6 +1,7 @@ #pragma once #include +#include #if USE_REPLXX # include diff --git a/src/Columns/ColumnDecimal.h b/src/Columns/ColumnDecimal.h index 18d4526e0f3..b55083cd671 100644 --- a/src/Columns/ColumnDecimal.h +++ b/src/Columns/ColumnDecimal.h @@ -87,7 +87,7 @@ private: public: const char * getFamilyName() const override { return TypeName.data(); } - TypeIndex getDataType() const override { return TypeId; } + TypeIndex getDataType() const override { return TypeToTypeIndex; } bool isNumeric() const override { return false; } bool canBeInsideNullable() const override { return true; } diff --git a/src/Columns/ColumnVector.h b/src/Columns/ColumnVector.h index bee7bfa738c..62a0e3a1190 100644 --- a/src/Columns/ColumnVector.h +++ b/src/Columns/ColumnVector.h @@ -238,7 +238,7 @@ public: } const char * getFamilyName() const override { return TypeName.data(); } - TypeIndex getDataType() const override { return TypeId; } + TypeIndex getDataType() const override { return TypeToTypeIndex; } MutableColumnPtr cloneResized(size_t size) const override; diff --git a/src/Common/CMakeLists.txt b/src/Common/CMakeLists.txt index 1e7d3591a48..490628a2180 100644 --- a/src/Common/CMakeLists.txt +++ b/src/Common/CMakeLists.txt @@ -4,6 +4,6 @@ if (ENABLE_EXAMPLES) add_subdirectory(examples) endif() -if (USE_MYSQL) +if (ENABLE_MYSQL) add_subdirectory (mysqlxx) endif () diff --git a/src/Common/Config/CMakeLists.txt b/src/Common/Config/CMakeLists.txt index cc41a8b2bb2..ec7bdd10196 100644 --- a/src/Common/Config/CMakeLists.txt +++ b/src/Common/Config/CMakeLists.txt @@ -27,7 +27,7 @@ target_link_libraries(clickhouse_common_config_no_zookeeper_log string_utils ) -if (USE_YAML_CPP) - target_link_libraries(clickhouse_common_config PRIVATE yaml-cpp) - target_link_libraries(clickhouse_common_config_no_zookeeper_log PRIVATE yaml-cpp) +if (TARGET ch_contrib::yaml_cpp) + target_link_libraries(clickhouse_common_config PRIVATE ch_contrib::yaml_cpp) + target_link_libraries(clickhouse_common_config_no_zookeeper_log PRIVATE ch_contrib::yaml_cpp) endif() diff --git a/src/Common/ErrorCodes.cpp b/src/Common/ErrorCodes.cpp index a22db080374..82714de3470 100644 --- a/src/Common/ErrorCodes.cpp +++ b/src/Common/ErrorCodes.cpp @@ -609,6 +609,7 @@ M(638, SNAPPY_UNCOMPRESS_FAILED) \ M(639, SNAPPY_COMPRESS_FAILED) \ M(640, NO_HIVEMETASTORE) \ + M(641, CANNOT_APPEND_TO_FILE) \ \ M(999, KEEPER_EXCEPTION) \ M(1000, POCO_EXCEPTION) \ diff --git a/src/Common/FieldVisitorConvertToNumber.h b/src/Common/FieldVisitorConvertToNumber.h index 025fd667609..7bbb7f0708a 100644 --- a/src/Common/FieldVisitorConvertToNumber.h +++ b/src/Common/FieldVisitorConvertToNumber.h @@ -123,6 +123,8 @@ public: else return static_cast(x); } + + T operator() (const bool & x) const { return T(x); } }; } diff --git a/src/Common/FieldVisitorDump.cpp b/src/Common/FieldVisitorDump.cpp index d0203407900..6c869e05fd4 100644 --- a/src/Common/FieldVisitorDump.cpp +++ b/src/Common/FieldVisitorDump.cpp @@ -37,6 +37,7 @@ String FieldVisitorDump::operator() (const UInt256 & x) const { return formatQuo String FieldVisitorDump::operator() (const Int128 & x) const { return formatQuotedWithPrefix(x, "Int128_"); } String FieldVisitorDump::operator() (const Int256 & x) const { return formatQuotedWithPrefix(x, "Int256_"); } String FieldVisitorDump::operator() (const UUID & x) const { return formatQuotedWithPrefix(x, "UUID_"); } +String FieldVisitorDump::operator() (const bool & x) const { return formatQuotedWithPrefix(x, "Bool_"); } String FieldVisitorDump::operator() (const String & x) const diff --git a/src/Common/FieldVisitorDump.h b/src/Common/FieldVisitorDump.h index 22e34d66ff7..0b1b311999e 100644 --- a/src/Common/FieldVisitorDump.h +++ b/src/Common/FieldVisitorDump.h @@ -27,6 +27,7 @@ public: String operator() (const DecimalField & x) const; String operator() (const DecimalField & x) const; String operator() (const AggregateFunctionStateData & x) const; + String operator() (const bool & x) const; }; } diff --git a/src/Common/FieldVisitorHash.cpp b/src/Common/FieldVisitorHash.cpp index 80d5f2daf65..09b8b7908f3 100644 --- a/src/Common/FieldVisitorHash.cpp +++ b/src/Common/FieldVisitorHash.cpp @@ -146,4 +146,11 @@ void FieldVisitorHash::operator() (const Int256 & x) const hash.update(x); } +void FieldVisitorHash::operator() (const bool & x) const +{ + UInt8 type = Field::Types::Bool; + hash.update(type); + hash.update(x); +} + } diff --git a/src/Common/FieldVisitorHash.h b/src/Common/FieldVisitorHash.h index 6c786fda4ad..7527e13ca20 100644 --- a/src/Common/FieldVisitorHash.h +++ b/src/Common/FieldVisitorHash.h @@ -33,6 +33,7 @@ public: void operator() (const DecimalField & x) const; void operator() (const DecimalField & x) const; void operator() (const AggregateFunctionStateData & x) const; + void operator() (const bool & x) const; }; } diff --git a/src/Common/FieldVisitorSum.cpp b/src/Common/FieldVisitorSum.cpp index 0064830c08a..c3d7f4f8462 100644 --- a/src/Common/FieldVisitorSum.cpp +++ b/src/Common/FieldVisitorSum.cpp @@ -33,5 +33,7 @@ bool FieldVisitorSum::operator() (AggregateFunctionStateData &) const throw Exception("Cannot sum AggregateFunctionStates", ErrorCodes::LOGICAL_ERROR); } +bool FieldVisitorSum::operator() (bool &) const { throw Exception("Cannot sum Bools", ErrorCodes::LOGICAL_ERROR); } + } diff --git a/src/Common/FieldVisitorSum.h b/src/Common/FieldVisitorSum.h index e208933043b..3e868e46f71 100644 --- a/src/Common/FieldVisitorSum.h +++ b/src/Common/FieldVisitorSum.h @@ -27,6 +27,7 @@ public: bool operator() (Map &) const; bool operator() (UUID &) const; bool operator() (AggregateFunctionStateData &) const; + bool operator() (bool &) const; template bool operator() (DecimalField & x) const diff --git a/src/Common/FieldVisitorToString.cpp b/src/Common/FieldVisitorToString.cpp index 01b2db54735..6cc83f32a52 100644 --- a/src/Common/FieldVisitorToString.cpp +++ b/src/Common/FieldVisitorToString.cpp @@ -51,7 +51,6 @@ static String formatFloat(const Float64 x) return { buffer, buffer + builder.position() }; } - String FieldVisitorToString::operator() (const Null & x) const { return x.isNegativeInfinity() ? "-Inf" : (x.isPositiveInfinity() ? "+Inf" : "NULL"); } String FieldVisitorToString::operator() (const UInt64 & x) const { return formatQuoted(x); } String FieldVisitorToString::operator() (const Int64 & x) const { return formatQuoted(x); } @@ -67,6 +66,7 @@ String FieldVisitorToString::operator() (const UInt256 & x) const { return forma String FieldVisitorToString::operator() (const Int256 & x) const { return formatQuoted(x); } String FieldVisitorToString::operator() (const UUID & x) const { return formatQuoted(x); } String FieldVisitorToString::operator() (const AggregateFunctionStateData & x) const { return formatQuoted(x.data); } +String FieldVisitorToString::operator() (const bool & x) const { return x ? "true" : "false"; } String FieldVisitorToString::operator() (const Array & x) const { diff --git a/src/Common/FieldVisitorToString.h b/src/Common/FieldVisitorToString.h index 39709f1c272..991f7b4b2d7 100644 --- a/src/Common/FieldVisitorToString.h +++ b/src/Common/FieldVisitorToString.h @@ -27,6 +27,7 @@ public: String operator() (const DecimalField & x) const; String operator() (const DecimalField & x) const; String operator() (const AggregateFunctionStateData & x) const; + String operator() (const bool & x) const; }; } diff --git a/src/Common/FieldVisitorWriteBinary.cpp b/src/Common/FieldVisitorWriteBinary.cpp index d01188bef40..fc17b58b334 100644 --- a/src/Common/FieldVisitorWriteBinary.cpp +++ b/src/Common/FieldVisitorWriteBinary.cpp @@ -66,5 +66,10 @@ void FieldVisitorWriteBinary::operator() (const Map & x, WriteBuffer & buf) cons } } +void FieldVisitorWriteBinary::operator()(const bool & x, WriteBuffer & buf) const +{ + writeBinary(UInt8(x), buf); +} + } diff --git a/src/Common/FieldVisitorWriteBinary.h b/src/Common/FieldVisitorWriteBinary.h index ae864ca74f3..155cf0e1050 100644 --- a/src/Common/FieldVisitorWriteBinary.h +++ b/src/Common/FieldVisitorWriteBinary.h @@ -26,6 +26,7 @@ public: void operator() (const DecimalField & x, WriteBuffer & buf) const; void operator() (const DecimalField & x, WriteBuffer & buf) const; void operator() (const AggregateFunctionStateData & x, WriteBuffer & buf) const; + void operator() (const bool & x, WriteBuffer & buf) const; }; } diff --git a/src/Common/FieldVisitorsAccurateComparison.h b/src/Common/FieldVisitorsAccurateComparison.h index 795620da0cb..487f4f78a00 100644 --- a/src/Common/FieldVisitorsAccurateComparison.h +++ b/src/Common/FieldVisitorsAccurateComparison.h @@ -32,6 +32,14 @@ public: return l == r; return false; } + else if constexpr (std::is_same_v) + { + return operator()(UInt8(l), r); + } + else if constexpr (std::is_same_v) + { + return operator()(l, UInt8(r)); + } else { if constexpr (std::is_same_v) @@ -91,6 +99,14 @@ public: { return r.isPositiveInfinity(); } + else if constexpr (std::is_same_v) + { + return operator()(UInt8(l), r); + } + else if constexpr (std::is_same_v) + { + return operator()(l, UInt8(r)); + } else { if constexpr (std::is_same_v) diff --git a/src/Common/IntervalTree.h b/src/Common/IntervalTree.h new file mode 100644 index 00000000000..dc2987247d8 --- /dev/null +++ b/src/Common/IntervalTree.h @@ -0,0 +1,691 @@ +#pragma once + +#include + +#include +#include + + +namespace DB +{ + +/** Structure that holds closed interval with left and right. + * Interval left must be less than interval right. + * Example: [1, 1] is valid interval, that contain point 1. + */ +template +struct Interval +{ + using IntervalStorageType = TIntervalStorageType; + IntervalStorageType left; + IntervalStorageType right; + + Interval(IntervalStorageType left_, IntervalStorageType right_) : left(left_), right(right_) { } + + inline bool contains(IntervalStorageType point) const { return left <= point && point <= right; } +}; + +template +bool operator<(const Interval & lhs, const Interval & rhs) +{ + return std::tie(lhs.left, lhs.right) < std::tie(rhs.left, rhs.right); +} + +template +bool operator<=(const Interval & lhs, const Interval & rhs) +{ + return std::tie(lhs.left, lhs.right) <= std::tie(rhs.left, rhs.right); +} + +template +bool operator==(const Interval & lhs, const Interval & rhs) +{ + return std::tie(lhs.left, lhs.right) == std::tie(rhs.left, rhs.right); +} + +template +bool operator!=(const Interval & lhs, const Interval & rhs) +{ + return std::tie(lhs.left, lhs.right) != std::tie(rhs.left, rhs.right); +} + +template +bool operator>(const Interval & lhs, const Interval & rhs) +{ + return std::tie(lhs.left, lhs.right) > std::tie(rhs.left, rhs.right); +} + +template +bool operator>=(const Interval & lhs, const Interval & rhs) +{ + return std::tie(lhs.left, lhs.right) >= std::tie(rhs.left, rhs.right); +} + +struct IntervalTreeVoidValue +{ +}; + +/** Tree structure that allow to efficiently retrieve all intervals that intersect specific point. + * https://en.wikipedia.org/wiki/Interval_tree + * + * Search for all intervals intersecting point has complexity O(log(n) + k), k is count of intervals that intersect point. + * If we need to only check if there are some interval intersecting point such operation has complexity O(log(n)). + * + * There is invariant that interval left must be less than interval right, otherwise such interval could not contain any point. + * If that invariant is broken, inserting such interval in IntervalTree will return false. + * + * Explanation: + * + * IntervalTree structure is balanced tree. Each node contains: + * 1. Point + * 2. Intervals sorted by left ascending that intersect that point. + * 3. Intervals sorted by right descending that intersect that point. + * + * Build: + * + * To keep tree relatively balanced we can use median of all segment points. + * On each step build tree node with intervals. For root node input intervals are all intervals. + * First split intervals in 4 groups. + * 1. Intervals that lie that are less than median point. Interval right is less than median point. + * 2. Intervals that lie that are greater than median point. Interval right is less than median point. + * 3. Intervals that intersect node sorted by left ascending. + * 4. Intervals that intersect node sorted by right descending. + * + * If intervals in 1 group are not empty. Continue build left child recursively with intervals from 1 group. + * If intervals in 2 group are not empty. Continue build right child recursively with intervals from 2 group. + * + * Search: + * + * Search for intervals intersecting point is started from root node. + * If search point is less than point in node, then we check intervals sorted by left ascending + * until left is greater than search point. + * If there is left child, continue search recursively in left child. + * + * If search point is greater than point in node, then we check intervals sorted by right descending + * until right is less than search point. + * If there is right child, continue search recursively in right child. + * + * If search point is equal to point in node, then we can emit all intervals that intersect current tree node + * and stop searching. + * + * Additional details: + * 1. To improve cache locality tree is stored implicitly in array, after build method is called + * other intervals cannot be added to the tree. + * 2. Additionally to improve cache locality in tree node we store sorted intervals for all nodes in separate + * array. In node we store only start of its sorted intervals, and also size of intersecting intervals. + * If we need to retrieve intervals sorted by left ascending they will be stored in indexes + * [sorted_intervals_start_index, sorted_intervals_start_index + intersecting_intervals_size). + * If we need to retrieve intervals sorted by right descending they will be store in indexes + * [sorted_intervals_start_index + intersecting_intervals_size, sorted_intervals_start_index + intersecting_intervals_size * 2). + */ +template +class IntervalTree +{ +public: + using IntervalStorageType = typename Interval::IntervalStorageType; + + static constexpr bool is_empty_value = std::is_same_v; + + IntervalTree() { nodes.resize(1); } + + template , bool> = true> + ALWAYS_INLINE bool emplace(Interval interval) + { + assert(!tree_is_built); + if (unlikely(interval.left > interval.right)) + return false; + + sorted_intervals.emplace_back(interval); + increaseIntervalsSize(); + + return true; + } + + template , bool> = true, typename... Args> + ALWAYS_INLINE bool emplace(Interval interval, Args &&... args) + { + assert(!tree_is_built); + if (unlikely(interval.left > interval.right)) + return false; + + sorted_intervals.emplace_back( + std::piecewise_construct, std::forward_as_tuple(interval), std::forward_as_tuple(std::forward(args)...)); + increaseIntervalsSize(); + + return true; + } + + template , bool> = true> + bool insert(Interval interval) + { + return emplace(interval); + } + + template , bool> = true> + bool insert(Interval interval, const Value & value) + { + return emplace(interval, value); + } + + template , bool> = true> + bool insert(Interval interval, Value && value) + { + return emplace(interval, std::move(value)); + } + + /// Build tree, after that intervals cannot be inserted, and only search or iteration can be performed. + void build() + { + assert(!tree_is_built); + nodes.clear(); + nodes.reserve(sorted_intervals.size()); + buildTree(); + tree_is_built = true; + } + + /** Find all intervals intersecting point. + * + * Callback interface for IntervalSet: + * + * template + * struct IntervalSetCallback + * { + * bool operator()(const IntervalType & interval) + * { + * bool should_continue_interval_iteration = false; + * return should_continue_interval_iteration; + * } + * }; + * + * Callback interface for IntervalMap: + * + * template + * struct IntervalMapCallback + * { + * bool operator()(const IntervalType & interval, const Value & value) + * { + * bool should_continue_interval_iteration = false; + * return should_continue_interval_iteration; + * } + * }; + */ + + template + void find(IntervalStorageType point, IntervalCallback && callback) const + { + if (unlikely(!tree_is_built)) + { + findIntervalsNonConstructedImpl(point, callback); + return; + } + + findIntervalsImpl(point, callback); + } + + /// Check if there is an interval intersecting point + bool has(IntervalStorageType point) const + { + bool has_intervals = false; + + if constexpr (is_empty_value) + { + find(point, [&](auto &) + { + has_intervals = true; + return false; + }); + } + else + { + find(point, [&](auto &, auto &) + { + has_intervals = true; + return false; + }); + } + + return has_intervals; + } + + class Iterator; + using iterator = Iterator; + using const_iterator = Iterator; + + iterator begin() + { + size_t start_index = findFirstIteratorNodeIndex(); + return Iterator(start_index, 0, this); + } + + iterator end() + { + size_t end_index = findLastIteratorNodeIndex(); + size_t last_interval_index = 0; + + if (likely(end_index < nodes.size())) + last_interval_index = nodes[end_index].sorted_intervals_range_size; + + return Iterator(end_index, last_interval_index, this); + } + + const_iterator begin() const + { + size_t start_index = findFirstIteratorNodeIndex(); + return Iterator(start_index, 0, this); + } + + const_iterator end() const + { + size_t end_index = findLastIteratorNodeIndex(); + size_t last_interval_index = 0; + + if (likely(end_index < nodes.size())) + last_interval_index = nodes[end_index].sorted_intervals_range_size; + + return Iterator(end_index, last_interval_index, this); + } + + const_iterator cbegin() const { return begin(); } + + const_iterator cend() const { return end(); } + + size_t getIntervalsSize() const { return intervals_size; } + +private: + struct Node + { + size_t sorted_intervals_range_start_index; + size_t sorted_intervals_range_size; + + IntervalStorageType middle_element; + + inline bool hasValue() const { return sorted_intervals_range_size != 0; } + }; + + using IntervalWithEmptyValue = Interval; + using IntervalWithNonEmptyValue = std::pair; + + using IntervalWithValue = std::conditional_t; + +public: + class Iterator + { + public: + bool operator==(const Iterator & rhs) const + { + return node_index == rhs.node_index && current_interval_index == rhs.current_interval_index && tree == rhs.tree; + } + + bool operator!=(const Iterator & rhs) const { return !(*this == rhs); } + + const IntervalWithValue & operator*() { return getCurrentValue(); } + + const IntervalWithValue & operator*() const { return getCurrentValue(); } + + const IntervalWithValue * operator->() { return &getCurrentValue(); } + + const IntervalWithValue * operator->() const { return &getCurrentValue(); } + + Iterator & operator++() + { + iterateToNext(); + return *this; + } + + Iterator operator++(int) // NOLINT + { + Iterator copy(*this); + iterateToNext(); + return copy; + } + + Iterator & operator--() + { + iterateToPrevious(); + return *this; + } + + Iterator operator--(int) // NOLINT + { + Iterator copy(*this); + iterateToPrevious(); + return copy; + } + + private: + friend class IntervalTree; + + Iterator(size_t node_index_, size_t current_interval_index_, const IntervalTree * tree_) + : node_index(node_index_), current_interval_index(current_interval_index_), tree(tree_) + { + } + + size_t node_index; + size_t current_interval_index; + const IntervalTree * tree; + + void iterateToNext() + { + size_t nodes_size = tree->nodes.size(); + auto & current_node = tree->nodes[node_index]; + + ++current_interval_index; + + if (current_interval_index < current_node.sorted_intervals_range_size) + return; + + size_t node_index_copy = node_index + 1; + for (; node_index_copy < nodes_size; ++node_index_copy) + { + auto & node = tree->nodes[node_index_copy]; + + if (node.hasValue()) + { + node_index = node_index_copy; + current_interval_index = 0; + break; + } + } + } + + void iterateToPrevious() + { + if (current_interval_index > 0) + { + --current_interval_index; + return; + } + + while (node_index > 0) + { + auto & node = tree->nodes[node_index - 1]; + if (node.hasValue()) + { + current_interval_index = node.sorted_intervals_range_size - 1; + break; + } + + --node_index; + } + } + + const IntervalWithValue & getCurrentValue() const + { + auto & current_node = tree->nodes[node_index]; + size_t interval_index = current_node.sorted_intervals_range_start_index + current_interval_index; + return tree->sorted_intervals[interval_index]; + } + }; + +private: + void buildTree() + { + std::vector temporary_points_storage; + temporary_points_storage.reserve(sorted_intervals.size() * 2); + + std::vector left_intervals; + std::vector right_intervals; + std::vector intervals_sorted_by_left_asc; + std::vector intervals_sorted_by_right_desc; + + struct StackFrame + { + size_t index; + std::vector intervals; + }; + + std::vector stack; + stack.emplace_back(StackFrame{0, std::move(sorted_intervals)}); + sorted_intervals.clear(); + + while (!stack.empty()) + { + auto frame = std::move(stack.back()); + stack.pop_back(); + + size_t current_index = frame.index; + auto & current_intervals = frame.intervals; + + if (current_intervals.empty()) + continue; + + if (current_index >= nodes.size()) + nodes.resize(current_index + 1); + + temporary_points_storage.clear(); + intervalsToPoints(current_intervals, temporary_points_storage); + auto median = pointsMedian(temporary_points_storage); + + left_intervals.clear(); + right_intervals.clear(); + intervals_sorted_by_left_asc.clear(); + intervals_sorted_by_right_desc.clear(); + + for (const auto & interval_with_value : current_intervals) + { + auto & interval = getInterval(interval_with_value); + + if (interval.right < median) + { + left_intervals.emplace_back(interval_with_value); + } + else if (interval.left > median) + { + right_intervals.emplace_back(interval_with_value); + } + else + { + intervals_sorted_by_left_asc.emplace_back(interval_with_value); + intervals_sorted_by_right_desc.emplace_back(interval_with_value); + } + } + + std::sort(intervals_sorted_by_left_asc.begin(), intervals_sorted_by_left_asc.end(), [](auto & lhs, auto & rhs) + { + auto & lhs_interval = getInterval(lhs); + auto & rhs_interval = getInterval(rhs); + return lhs_interval.left < rhs_interval.left; + }); + + std::sort(intervals_sorted_by_right_desc.begin(), intervals_sorted_by_right_desc.end(), [](auto & lhs, auto & rhs) + { + auto & lhs_interval = getInterval(lhs); + auto & rhs_interval = getInterval(rhs); + return lhs_interval.right > rhs_interval.right; + }); + + size_t sorted_intervals_range_start_index = sorted_intervals.size(); + + for (auto && interval_sorted_by_left_asc : intervals_sorted_by_left_asc) + sorted_intervals.emplace_back(std::move(interval_sorted_by_left_asc)); + + for (auto && interval_sorted_by_right_desc : intervals_sorted_by_right_desc) + sorted_intervals.emplace_back(std::move(interval_sorted_by_right_desc)); + + auto & node = nodes[current_index]; + node.middle_element = median; + node.sorted_intervals_range_start_index = sorted_intervals_range_start_index; + node.sorted_intervals_range_size = intervals_sorted_by_left_asc.size(); + + size_t left_child_index = current_index * 2 + 1; + stack.emplace_back(StackFrame{left_child_index, std::move(left_intervals)}); + + size_t right_child_index = current_index * 2 + 2; + stack.emplace_back(StackFrame{right_child_index, std::move(right_intervals)}); + } + } + + template + void findIntervalsImpl(IntervalStorageType point, IntervalCallback && callback) const + { + size_t current_index = 0; + + while (true) + { + if (current_index >= nodes.size()) + break; + + auto & node = nodes[current_index]; + if (!node.hasValue()) + break; + + auto middle_element = node.middle_element; + + if (point < middle_element) + { + size_t start = node.sorted_intervals_range_start_index; + size_t end = start + node.sorted_intervals_range_size; + + for (; start != end; ++start) + { + auto & interval_with_value_left_sorted_asc = sorted_intervals[start]; + auto & interval_left_sorted_asc = getInterval(interval_with_value_left_sorted_asc); + if (interval_left_sorted_asc.left > point) + break; + + bool should_continue = callCallback(interval_with_value_left_sorted_asc, callback); + if (unlikely(!should_continue)) + return; + } + + size_t left_child_index = current_index * 2 + 1; + current_index = left_child_index; + } + else + { + size_t start = node.sorted_intervals_range_start_index + node.sorted_intervals_range_size; + size_t end = start + node.sorted_intervals_range_size; + + for (; start != end; ++start) + { + auto & interval_with_value_right_sorted_desc = sorted_intervals[start]; + auto & interval_right_sorted_desc = getInterval(interval_with_value_right_sorted_desc); + if (interval_right_sorted_desc.right < point) + break; + + bool should_continue = callCallback(interval_with_value_right_sorted_desc, callback); + if (unlikely(!should_continue)) + return; + } + + if (likely(point > middle_element)) + { + size_t right_child_index = current_index * 2 + 2; + current_index = right_child_index; + } + else + { + /// This is case when point == middle_element. + break; + } + } + } + } + + template + void findIntervalsNonConstructedImpl(IntervalStorageType point, IntervalCallback && callback) const + { + for (auto & interval_with_value : sorted_intervals) + { + auto & interval = getInterval(interval_with_value); + + if (interval.contains(point)) + callCallback(interval_with_value, callback); + } + } + + inline size_t findFirstIteratorNodeIndex() const + { + size_t nodes_size = nodes.size(); + size_t result_index = 0; + + for (; result_index < nodes_size; ++result_index) + { + if (nodes[result_index].hasValue()) + break; + } + + if (unlikely(result_index == nodes_size)) + result_index = 0; + + return result_index; + } + + inline size_t findLastIteratorNodeIndex() const + { + if (unlikely(nodes.empty())) + return 0; + + size_t nodes_size = nodes.size(); + size_t result_index = nodes_size - 1; + for (; result_index != 0; --result_index) + { + if (nodes[result_index].hasValue()) + break; + } + + return result_index; + } + + inline void increaseIntervalsSize() + { + /// Before tree is build we store all intervals size in our first node to allow tree iteration. + ++intervals_size; + nodes[0].sorted_intervals_range_size = intervals_size; + } + + std::vector nodes; + std::vector sorted_intervals; + size_t intervals_size = 0; + bool tree_is_built = false; + + static inline const Interval & getInterval(const IntervalWithValue & interval_with_value) + { + if constexpr (is_empty_value) + return interval_with_value; + else + return interval_with_value.first; + } + + template + static inline bool callCallback(const IntervalWithValue & interval, IntervalCallback && callback) + { + if constexpr (is_empty_value) + return callback(interval); + else + return callback(interval.first, interval.second); + } + + static inline void + intervalsToPoints(const std::vector & intervals, std::vector & temporary_points_storage) + { + for (const auto & interval_with_value : intervals) + { + auto & interval = getInterval(interval_with_value); + temporary_points_storage.emplace_back(interval.left); + temporary_points_storage.emplace_back(interval.right); + } + } + + static inline IntervalStorageType pointsMedian(std::vector & points) + { + size_t size = points.size(); + size_t middle_element_index = size / 2; + + std::nth_element(points.begin(), points.begin() + middle_element_index, points.end()); + + /** We should not get median as average of middle_element_index and middle_element_index - 1 + * because we want point in node to intersect some interval. + * Example: Intervals [1, 1], [3, 3]. If we choose 2 as average point, it does not intersect any interval. + */ + return points[middle_element_index]; + } +}; + +template +using IntervalSet = IntervalTree; + +template +using IntervalMap = IntervalTree; + +} diff --git a/src/Common/OptimizedRegularExpression.h b/src/Common/OptimizedRegularExpression.h index f7223c2efa9..09b0dbe5337 100644 --- a/src/Common/OptimizedRegularExpression.h +++ b/src/Common/OptimizedRegularExpression.h @@ -8,12 +8,7 @@ #include #include - -#if USE_RE2_ST -# include -#else -# define re2_st re2 -#endif +#include /** Uses two ways to optimize a regular expression: diff --git a/src/Common/ZooKeeper/CMakeLists.txt b/src/Common/ZooKeeper/CMakeLists.txt index 7e0558dd575..8a705eb46e6 100644 --- a/src/Common/ZooKeeper/CMakeLists.txt +++ b/src/Common/ZooKeeper/CMakeLists.txt @@ -3,7 +3,12 @@ include("${ClickHouse_SOURCE_DIR}/cmake/dbms_glob_sources.cmake") add_headers_and_sources(clickhouse_common_zookeeper .) # for clickhouse server -add_library(clickhouse_common_zookeeper ${clickhouse_common_zookeeper_headers} ${clickhouse_common_zookeeper_sources}) +# +# NOTE: this library depends from Interpreters (DB::SystemLog::add), +# and so it should be STATIC because otherwise: +# - it will either fail to compile with -Wl,--unresolved-symbols=report-all +# - or it will report errors at runtime +add_library(clickhouse_common_zookeeper STATIC ${clickhouse_common_zookeeper_headers} ${clickhouse_common_zookeeper_sources}) target_compile_definitions (clickhouse_common_zookeeper PRIVATE -DZOOKEEPER_LOG) target_link_libraries (clickhouse_common_zookeeper PUBLIC @@ -12,12 +17,6 @@ target_link_libraries (clickhouse_common_zookeeper PRIVATE string_utils ) -# To avoid circular dependency from interpreters. -if (OS_DARWIN) - target_link_libraries (clickhouse_common_zookeeper PRIVATE -Wl,-undefined,dynamic_lookup) -else() - target_link_libraries (clickhouse_common_zookeeper PRIVATE -Wl,--unresolved-symbols=ignore-all) -endif() # for examples -- no logging (to avoid extra dependencies) add_library(clickhouse_common_zookeeper_no_log ${clickhouse_common_zookeeper_headers} ${clickhouse_common_zookeeper_sources}) diff --git a/src/Common/config.h.in b/src/Common/config.h.in index 28506a94581..3d785e0d0fb 100644 --- a/src/Common/config.h.in +++ b/src/Common/config.h.in @@ -2,17 +2,14 @@ // .h autogenerated by cmake! +#cmakedefine01 USE_CPUID #cmakedefine01 USE_BASE64 -#cmakedefine01 USE_RE2_ST #cmakedefine01 USE_SSL -#cmakedefine01 USE_INTERNAL_SSL_LIBRARY #cmakedefine01 USE_HDFS -#cmakedefine01 USE_INTERNAL_HDFS3_LIBRARY #cmakedefine01 USE_AWS_S3 #cmakedefine01 USE_AZURE_BLOB_STORAGE #cmakedefine01 USE_BROTLI #cmakedefine01 USE_UNWIND -#cmakedefine01 USE_OPENCL #cmakedefine01 USE_CASSANDRA #cmakedefine01 USE_SENTRY #cmakedefine01 USE_GRPC @@ -22,3 +19,6 @@ #cmakedefine01 USE_BZIP2 #cmakedefine01 USE_SNAPPY #cmakedefine01 USE_HIVE +#cmakedefine01 USE_ODBC +#cmakedefine01 USE_REPLXX +#cmakedefine01 USE_JEMALLOC diff --git a/src/Common/examples/CMakeLists.txt b/src/Common/examples/CMakeLists.txt index be91101ef40..9e551f3aa54 100644 --- a/src/Common/examples/CMakeLists.txt +++ b/src/Common/examples/CMakeLists.txt @@ -1,7 +1,7 @@ add_executable (hashes_test hashes_test.cpp) -target_link_libraries (hashes_test PRIVATE clickhouse_common_io ${CITYHASH_LIBRARIES}) -if(OPENSSL_CRYPTO_LIBRARY) - target_link_libraries (hashes_test PRIVATE ${OPENSSL_CRYPTO_LIBRARY}) +target_link_libraries (hashes_test PRIVATE clickhouse_common_io ch_contrib::cityhash) +if (TARGET OpenSSL::Crypto) + target_link_libraries (hashes_test PRIVATE OpenSSL::Crypto) endif() add_executable (sip_hash_perf sip_hash_perf.cpp) @@ -23,8 +23,7 @@ add_executable (compact_array compact_array.cpp) target_link_libraries (compact_array PRIVATE clickhouse_common_io) add_executable (radix_sort radix_sort.cpp) -target_link_libraries (radix_sort PRIVATE clickhouse_common_io) -target_include_directories(radix_sort SYSTEM PRIVATE ${PDQSORT_INCLUDE_DIR}) +target_link_libraries (radix_sort PRIVATE clickhouse_common_io ch_contrib::pdqsort) add_executable (arena_with_free_lists arena_with_free_lists.cpp) target_link_libraries (arena_with_free_lists PRIVATE dbms) @@ -42,12 +41,10 @@ add_executable (space_saving space_saving.cpp) target_link_libraries (space_saving PRIVATE clickhouse_common_io) add_executable (integer_hash_tables_and_hashes integer_hash_tables_and_hashes.cpp) -target_include_directories (integer_hash_tables_and_hashes SYSTEM BEFORE PRIVATE ${SPARSEHASH_INCLUDE_DIR}) -target_link_libraries (integer_hash_tables_and_hashes PRIVATE dbms abseil_swiss_tables) +target_link_libraries (integer_hash_tables_and_hashes PRIVATE dbms ch_contrib::abseil_swiss_tables ch_contrib::sparsehash) add_executable (integer_hash_tables_benchmark integer_hash_tables_benchmark.cpp) -target_include_directories (integer_hash_tables_benchmark SYSTEM BEFORE PRIVATE ${SPARSEHASH_INCLUDE_DIR}) -target_link_libraries (integer_hash_tables_benchmark PRIVATE dbms abseil_swiss_tables) +target_link_libraries (integer_hash_tables_benchmark PRIVATE dbms ch_contrib::abseil_swiss_tables ch_contrib::sparsehash) add_executable (cow_columns cow_columns.cpp) target_link_libraries (cow_columns PRIVATE clickhouse_common_io) @@ -81,5 +78,10 @@ target_link_libraries (shell_command_inout PRIVATE clickhouse_common_io) add_executable (executable_udf executable_udf.cpp) target_link_libraries (executable_udf PRIVATE dbms) -add_executable(hive_metastore_client hive_metastore_client.cpp) -target_link_libraries (hive_metastore_client PUBLIC hivemetastore ${THRIFT_LIBRARY}) +if (ENABLE_HIVE) + add_executable (hive_metastore_client hive_metastore_client.cpp) + target_link_libraries (hive_metastore_client PUBLIC ch_contrib::hivemetastore ch_contrib::thrift) +endif() + +add_executable (interval_tree interval_tree.cpp) +target_link_libraries (interval_tree PRIVATE dbms) diff --git a/src/Common/examples/interval_tree.cpp b/src/Common/examples/interval_tree.cpp new file mode 100644 index 00000000000..086fab37bbe --- /dev/null +++ b/src/Common/examples/interval_tree.cpp @@ -0,0 +1,95 @@ +#include +#include +#include +#include +#include +#include + +#include +#include +#include + +using namespace DB; +using Int64Interval = Interval; + +int main(int, char **) +{ + { + IntervalSet tree; + + tree.emplace(Int64Interval(0, 5)); + tree.emplace(Int64Interval(10, 15)); + + tree.build(); + + for (const auto & interval : tree) + { + std::cout << "Interval left " << interval.left << " right " << interval.right << std::endl; + } + } + { + IntervalMap tree; + + tree.emplace(Int64Interval(0, 5), "value1"); + tree.emplace(Int64Interval(10, 15), "value2"); + + tree.build(); + + for (const auto & [interval, value] : tree) + { + std::cout << "Interval left " << interval.left << " right " << interval.right; + std::cout << " value " << value << std::endl; + } + } + { + IntervalSet tree; + for (size_t i = 0; i < 5; ++i) + { + tree.emplace(Int64Interval(0, i)); + } + + tree.build(); + + for (const auto & interval : tree) + { + std::cout << "Interval left " << interval.left << " right " << interval.right << std::endl; + } + + for (Int64 i = 0; i < 5; ++i) + { + tree.find(i, [](auto & interval) + { + std::cout << "Interval left " << interval.left << " right " << interval.right << std::endl; + return true; + }); + } + } + { + IntervalMap tree; + for (size_t i = 0; i < 5; ++i) + { + tree.emplace(Int64Interval(0, i), "Value " + std::to_string(i)); + } + + tree.build(); + + for (const auto & [interval, value] : tree) + { + std::cout << "Interval left " << interval.left << " right " << interval.right; + std::cout << " value " << value << std::endl; + } + + for (Int64 i = 0; i < 5; ++i) + { + tree.find(i, [](auto & interval, auto & value) + { + std::cout << "Interval left " << interval.left << " right " << interval.right; + std::cout << " value " << value << std::endl; + + return true; + }); + } + } + + return 0; +} diff --git a/src/Common/getNumberOfPhysicalCPUCores.cpp b/src/Common/getNumberOfPhysicalCPUCores.cpp index 13485c634e8..2fc1dbf9669 100644 --- a/src/Common/getNumberOfPhysicalCPUCores.cpp +++ b/src/Common/getNumberOfPhysicalCPUCores.cpp @@ -1,26 +1,80 @@ #include "getNumberOfPhysicalCPUCores.h" +#include +#if defined(OS_LINUX) +# include +# include +#endif #if USE_CPUID # include #endif #include +#if defined(OS_LINUX) +unsigned getCGroupLimitedCPUCores(unsigned default_cpu_count) +{ + // Try to look at cgroups limit if it is available. + auto read_from = [](const char * filename, int default_value) -> int { + std::ifstream infile(filename); + if (!infile.is_open()) + { + return default_value; + } + int idata; + if (infile >> idata) + return idata; + else + return default_value; + }; + + unsigned quota_count = default_cpu_count; + // Return the number of milliseconds per period process is guaranteed to run. + // -1 for no quota + int cgroup_quota = read_from("/sys/fs/cgroup/cpu/cpu.cfs_quota_us", -1); + int cgroup_period = read_from("/sys/fs/cgroup/cpu/cpu.cfs_period_us", -1); + if (cgroup_quota > -1 && cgroup_period > 0) + { + quota_count = ceil(static_cast(cgroup_quota) / static_cast(cgroup_period)); + } + + // Share number (typically a number relative to 1024) (2048 typically expresses 2 CPUs worth of processing) + // -1 for no share setup + int cgroup_share = read_from("/sys/fs/cgroup/cpu/cpu.shares", -1); + // Convert 1024 to no shares setup + if (cgroup_share == 1024) + cgroup_share = -1; + +# define PER_CPU_SHARES 1024 + unsigned share_count = default_cpu_count; + if (cgroup_share > -1) + { + share_count = ceil(static_cast(cgroup_share) / static_cast(PER_CPU_SHARES)); + } + + return std::min(default_cpu_count, std::min(share_count, quota_count)); +} +#endif // OS_LINUX unsigned getNumberOfPhysicalCPUCores() { - static const unsigned number = [] - { -# if USE_CPUID + static const unsigned number = [] { + unsigned cpu_count = 0; // start with an invalid num +#if USE_CPUID + do + { cpu_raw_data_t raw_data; cpu_id_t data; /// On Xen VMs, libcpuid returns wrong info (zero number of cores). Fallback to alternative method. /// Also, libcpuid does not support some CPUs like AMD Hygon C86 7151. if (0 != cpuid_get_raw_data(&raw_data) || 0 != cpu_identify(&raw_data, &data) || data.num_logical_cpus == 0) - return std::thread::hardware_concurrency(); + { + // Just fallback + break; + } - unsigned res = data.num_cores * data.total_logical_cpus / data.num_logical_cpus; + cpu_count = data.num_cores * data.total_logical_cpus / data.num_logical_cpus; /// Also, libcpuid gives strange result on Google Compute Engine VMs. /// Example: @@ -28,14 +82,18 @@ unsigned getNumberOfPhysicalCPUCores() /// total_logical_cpus = 1, /// total number of logical cores on all sockets /// num_logical_cpus = 24. /// number of logical cores on current CPU socket /// It means two-way hyper-threading (24 / 12), but contradictory, 'total_logical_cpus' == 1. - - if (res != 0) - return res; -# endif + } while (false); +#endif /// As a fallback (also for non-x86 architectures) assume there are no hyper-threading on the system. /// (Actually, only Aarch64 is supported). - return std::thread::hardware_concurrency(); + if (cpu_count == 0) + cpu_count = std::thread::hardware_concurrency(); + +#if defined(OS_LINUX) + cpu_count = getCGroupLimitedCPUCores(cpu_count); +#endif // OS_LINUX + return cpu_count; }(); return number; } diff --git a/src/Common/memcmpSmall.h b/src/Common/memcmpSmall.h index db8641cb44d..57b9c731897 100644 --- a/src/Common/memcmpSmall.h +++ b/src/Common/memcmpSmall.h @@ -25,8 +25,240 @@ inline int cmp(T a, T b) /// We can process uninitialized memory in the functions below. /// Results don't depend on the values inside uninitialized memory but Memory Sanitizer cannot see it. /// Disable optimized functions if compile with Memory Sanitizer. +#if defined(__AVX512BW__) && defined(__AVX512VL__) && !defined(MEMORY_SANITIZER) +#include -#if defined(__SSE2__) && !defined(MEMORY_SANITIZER) + +/** All functions works under the following assumptions: + * - it's possible to read up to 15 excessive bytes after end of 'a' and 'b' region; + * - memory regions are relatively small and extra loop unrolling is not worth to do. + */ + +/** Variant when memory regions may have different sizes. + */ +template +inline int memcmpSmallAllowOverflow15(const Char * a, size_t a_size, const Char * b, size_t b_size) +{ + size_t min_size = std::min(a_size, b_size); + + for (size_t offset = 0; offset < min_size; offset += 16) + { + uint16_t mask = _mm_cmp_epi8_mask( + _mm_loadu_si128(reinterpret_cast(a + offset)), + _mm_loadu_si128(reinterpret_cast(b + offset)), _MM_CMPINT_NE); + + if (mask) + { + offset += __builtin_ctz(mask); + + if (offset >= min_size) + break; + + return detail::cmp(a[offset], b[offset]); + } + } + + return detail::cmp(a_size, b_size); +} + + +/** Variant when memory regions may have different sizes. + * But compare the regions as the smaller one is padded with zero bytes up to the size of the larger. + * It's needed to hold that: toFixedString('abc', 5) = 'abc' + * for compatibility with SQL standard. + */ +template +inline int memcmpSmallLikeZeroPaddedAllowOverflow15(const Char * a, size_t a_size, const Char * b, size_t b_size) +{ + size_t min_size = std::min(a_size, b_size); + + for (size_t offset = 0; offset < min_size; offset += 16) + { + uint16_t mask = _mm_cmp_epi8_mask( + _mm_loadu_si128(reinterpret_cast(a + offset)), + _mm_loadu_si128(reinterpret_cast(b + offset)), _MM_CMPINT_NE); + + if (mask) + { + offset += __builtin_ctz(mask); + + if (offset >= min_size) + break; + + return detail::cmp(a[offset], b[offset]); + } + } + + /// The strings are equal up to min_size. + /// If the rest of the larger string is zero bytes then the strings are considered equal. + + size_t max_size; + const Char * longest; + int cmp; + + if (a_size == b_size) + { + return 0; + } + else if (a_size > b_size) + { + max_size = a_size; + longest = a; + cmp = 1; + } + else + { + max_size = b_size; + longest = b; + cmp = -1; + } + + const __m128i zero16 = _mm_setzero_si128(); + + for (size_t offset = min_size; offset < max_size; offset += 16) + { + uint16_t mask = _mm_cmpneq_epi8_mask( + _mm_loadu_si128(reinterpret_cast(longest + offset)), + zero16); + + if (mask) + { + offset += __builtin_ctz(mask); + + if (offset >= max_size) + return 0; + return cmp; + } + } + + return 0; +} + + +/** Variant when memory regions have same size. + * TODO Check if the compiler can optimize previous function when the caller pass identical sizes. + */ +template +inline int memcmpSmallAllowOverflow15(const Char * a, const Char * b, size_t size) +{ + for (size_t offset = 0; offset < size; offset += 16) + { + uint16_t mask = _mm_cmp_epi8_mask( + _mm_loadu_si128(reinterpret_cast(a + offset)), + _mm_loadu_si128(reinterpret_cast(b + offset)), _MM_CMPINT_NE); + + if (mask) + { + offset += __builtin_ctz(mask); + + if (offset >= size) + return 0; + + return detail::cmp(a[offset], b[offset]); + } + } + + return 0; +} + + +/** Compare memory regions for equality. + */ +template +inline bool memequalSmallAllowOverflow15(const Char * a, size_t a_size, const Char * b, size_t b_size) +{ + if (a_size != b_size) + return false; + + for (size_t offset = 0; offset < a_size; offset += 16) + { + uint16_t mask = _mm_cmp_epi8_mask( + _mm_loadu_si128(reinterpret_cast(a + offset)), + _mm_loadu_si128(reinterpret_cast(b + offset)), _MM_CMPINT_NE); + + if (mask) + { + offset += __builtin_ctz(mask); + return offset >= a_size; + } + } + + return true; +} + + +/** Variant when the caller know in advance that the size is a multiple of 16. + */ +template +inline int memcmpSmallMultipleOf16(const Char * a, const Char * b, size_t size) +{ + for (size_t offset = 0; offset < size; offset += 16) + { + uint16_t mask = _mm_cmp_epi8_mask( + _mm_loadu_si128(reinterpret_cast(a + offset)), + _mm_loadu_si128(reinterpret_cast(b + offset)), _MM_CMPINT_NE); + + if (mask) + { + offset += __builtin_ctz(mask); + return detail::cmp(a[offset], b[offset]); + } + } + + return 0; +} + + +/** Variant when the size is 16 exactly. + */ +template +inline int memcmp16(const Char * a, const Char * b) +{ + uint16_t mask = _mm_cmp_epi8_mask( + _mm_loadu_si128(reinterpret_cast(a)), + _mm_loadu_si128(reinterpret_cast(b)), _MM_CMPINT_NE); + + if (mask) + { + auto offset = __builtin_ctz(mask); + return detail::cmp(a[offset], b[offset]); + } + + return 0; +} + + +/** Variant when the size is 16 exactly. + */ +inline bool memequal16(const void * a, const void * b) +{ + return 0xFFFF == _mm_cmp_epi8_mask( + _mm_loadu_si128(reinterpret_cast(a)), + _mm_loadu_si128(reinterpret_cast(b)), _MM_CMPINT_EQ); +} + + +/** Compare memory region to zero */ +inline bool memoryIsZeroSmallAllowOverflow15(const void * data, size_t size) +{ + const __m128i zero16 = _mm_setzero_si128(); + + for (size_t offset = 0; offset < size; offset += 16) + { + uint16_t mask = _mm_cmp_epi8_mask(zero16, + _mm_loadu_si128(reinterpret_cast(reinterpret_cast(data) + offset)), _MM_CMPINT_NE); + + if (mask) + { + offset += __builtin_ctz(mask); + return offset >= size; + } + } + + return true; +} + +#elif defined(__SSE2__) && !defined(MEMORY_SANITIZER) #include diff --git a/src/Common/memory.h b/src/Common/memory.h index 0dc163a54a1..41b10a57db4 100644 --- a/src/Common/memory.h +++ b/src/Common/memory.h @@ -4,12 +4,13 @@ #include #include +#include #if USE_JEMALLOC # include #endif -#if !USE_JEMALLOC || JEMALLOC_VERSION_MAJOR < 4 +#if !USE_JEMALLOC # include #endif @@ -37,7 +38,7 @@ inline ALWAYS_INLINE void deleteImpl(void * ptr) noexcept free(ptr); } -#if USE_JEMALLOC && JEMALLOC_VERSION_MAJOR >= 4 +#if USE_JEMALLOC inline ALWAYS_INLINE void deleteSized(void * ptr, std::size_t size) noexcept { @@ -67,7 +68,7 @@ inline ALWAYS_INLINE size_t getActualAllocationSize(size_t size) { size_t actual_size = size; -#if USE_JEMALLOC && JEMALLOC_VERSION_MAJOR >= 5 +#if USE_JEMALLOC /// The nallocx() function allocates no memory, but it performs the same size computation as the mallocx() function /// @note je_mallocx() != je_malloc(). It's expected they don't differ much in allocation logic. if (likely(size != 0)) @@ -87,7 +88,7 @@ inline ALWAYS_INLINE void untrackMemory(void * ptr [[maybe_unused]], std::size_t { try { -#if USE_JEMALLOC && JEMALLOC_VERSION_MAJOR >= 5 +#if USE_JEMALLOC /// @note It's also possible to use je_malloc_usable_size() here. if (likely(ptr != nullptr)) CurrentMemoryTracker::free(sallocx(ptr, 0)); diff --git a/src/Common/mysqlxx/CMakeLists.txt b/src/Common/mysqlxx/CMakeLists.txt index 76005651e61..d7292075aae 100644 --- a/src/Common/mysqlxx/CMakeLists.txt +++ b/src/Common/mysqlxx/CMakeLists.txt @@ -15,10 +15,12 @@ target_include_directories (mysqlxx PUBLIC .) target_link_libraries (mysqlxx clickhouse_common_io - ${MYSQLCLIENT_LIBRARIES} - ${ZLIB_LIBRARIES} + ch_contrib::zlib + ch_contrib::mariadbclient ) +add_library(ch::mysqlxx ALIAS mysqlxx) + if (ENABLE_TESTS) add_subdirectory (tests) endif () diff --git a/src/Common/new_delete.cpp b/src/Common/new_delete.cpp index 27db87809d3..8908d140b90 100644 --- a/src/Common/new_delete.cpp +++ b/src/Common/new_delete.cpp @@ -1,4 +1,5 @@ #include +#include #include #if defined(OS_DARWIN) && (USE_JEMALLOC) diff --git a/src/Common/tests/gtest_interval_tree.cpp b/src/Common/tests/gtest_interval_tree.cpp new file mode 100644 index 00000000000..e99bfe83a98 --- /dev/null +++ b/src/Common/tests/gtest_interval_tree.cpp @@ -0,0 +1,586 @@ +#include + +#include +#include + +#include +#include + + +using namespace DB; +using Int64Interval = Interval; + +template +std::set intervalSetToSet(const IntervalSet & interval_set) +{ + std::set result; + + for (const auto & interval : interval_set) + result.insert(interval); + + return result; +} + +template +std::map intervalMapToMap(const IntervalMap & interval_map) +{ + std::map result; + + for (const auto & [interval, value] : interval_map) + result.emplace(interval, value); + + return result; +} + +template +struct CollectIntervalsSetCallback +{ + explicit CollectIntervalsSetCallback(std::set & result_intervals_) + : result_intervals(result_intervals_) + { + } + + bool operator()(IntervalType interval) + { + result_intervals.insert(interval); + return true; + } + + std::set & result_intervals; +}; + +using CollectIntervalsSetInt64Callback = CollectIntervalsSetCallback; + +template +std::set intervalSetFindIntervals(const IntervalSet & interval_set, typename IntervalType::IntervalStorageType point) +{ + std::set result; + CollectIntervalsSetCallback callback(result); + + interval_set.find(point, callback); + + return result; +} + +template +struct CollectIntervalsMapCallback +{ + explicit CollectIntervalsMapCallback(std::map & result_intervals_) + : result_intervals(result_intervals_) + { + } + + bool operator()(IntervalType interval, const Value & value) + { + result_intervals.emplace(interval, value); + return true; + } + + std::map & result_intervals; +}; + + +template +std::map intervalMapFindIntervals(const IntervalMap & interval_set, typename IntervalType::IntervalStorageType point) +{ + std::map result; + CollectIntervalsMapCallback callback(result); + + interval_set.find(point, callback); + + return result; +} + +TEST(IntervalTree, IntervalSetBasic) +{ + for (size_t intervals_size = 0; intervals_size < 120; ++intervals_size) + { + std::set expected; + IntervalSet set; + + for (size_t interval_index = 0; interval_index < intervals_size; ++interval_index) + { + auto interval = Int64Interval(interval_index * 2, interval_index * 2 + 1); + expected.insert(interval); + set.insert(interval); + } + + ASSERT_TRUE(set.getIntervalsSize() == expected.size()); + ASSERT_TRUE(set.getIntervalsSize() == intervals_size); + ASSERT_TRUE(intervalSetToSet(set) == expected); + + for (const auto & expected_interval : expected) + { + std::set expected_intervals = {{expected_interval}}; + + auto actual_intervals = intervalSetFindIntervals(set, expected_interval.left); + ASSERT_TRUE(actual_intervals.size() == 1); + ASSERT_TRUE(actual_intervals == expected_intervals); + + actual_intervals = intervalSetFindIntervals(set, expected_interval.right); + ASSERT_TRUE(actual_intervals.size() == 1); + ASSERT_TRUE(actual_intervals == expected_intervals); + + ASSERT_TRUE(set.has(expected_interval.left)); + ASSERT_TRUE(set.has(expected_interval.right)); + } + + set.build(); + + ASSERT_TRUE(intervalSetToSet(set) == expected); + + for (const auto & expected_interval : expected) + { + auto actual_interval = intervalSetFindIntervals(set, expected_interval.left); + ASSERT_TRUE(actual_interval.size() == 1); + ASSERT_TRUE(actual_interval == std::set{expected_interval}); + + actual_interval = intervalSetFindIntervals(set, expected_interval.right); + ASSERT_TRUE(actual_interval.size() == 1); + ASSERT_TRUE(actual_interval == std::set{expected_interval}); + + ASSERT_TRUE(set.has(expected_interval.left)); + ASSERT_TRUE(set.has(expected_interval.right)); + } + } +} + +TEST(IntervalTree, IntervalSetPoints) +{ + for (size_t intervals_size = 0; intervals_size < 120; ++intervals_size) + { + std::set expected; + IntervalSet set; + + for (size_t interval_index = 0; interval_index < intervals_size; ++interval_index) + { + auto interval = Int64Interval(interval_index, interval_index); + expected.insert(interval); + set.insert(interval); + } + + ASSERT_TRUE(set.getIntervalsSize() == expected.size()); + ASSERT_TRUE(set.getIntervalsSize() == intervals_size); + ASSERT_TRUE(intervalSetToSet(set) == expected); + + for (const auto & expected_interval : expected) + { + std::set expected_intervals = {{expected_interval}}; + + auto actual_intervals = intervalSetFindIntervals(set, expected_interval.left); + ASSERT_TRUE(actual_intervals.size() == 1); + ASSERT_TRUE(actual_intervals == expected_intervals); + + actual_intervals = intervalSetFindIntervals(set, expected_interval.right); + ASSERT_TRUE(actual_intervals.size() == 1); + ASSERT_TRUE(actual_intervals == expected_intervals); + + ASSERT_TRUE(set.has(expected_interval.left)); + ASSERT_TRUE(set.has(expected_interval.right)); + } + + set.build(); + + ASSERT_TRUE(intervalSetToSet(set) == expected); + + for (const auto & expected_interval : expected) + { + auto actual_interval = intervalSetFindIntervals(set, expected_interval.left); + ASSERT_TRUE(actual_interval.size() == 1); + ASSERT_TRUE(actual_interval == std::set{expected_interval}); + + actual_interval = intervalSetFindIntervals(set, expected_interval.right); + ASSERT_TRUE(actual_interval.size() == 1); + ASSERT_TRUE(actual_interval == std::set{expected_interval}); + + ASSERT_TRUE(set.has(expected_interval.left)); + ASSERT_TRUE(set.has(expected_interval.right)); + } + } +} + +TEST(IntervalTree, IntervalSetIntersectingIntervals) +{ + for (size_t intervals_size = 0; intervals_size < 120; ++intervals_size) + { + std::set expected; + IntervalSet set; + + for (size_t interval_index = 0; interval_index < intervals_size; ++interval_index) + { + auto interval = Int64Interval(0, interval_index * 2 + 1); + expected.insert(interval); + set.insert(interval); + } + + ASSERT_TRUE(set.getIntervalsSize() == expected.size()); + ASSERT_TRUE(set.getIntervalsSize() == intervals_size); + ASSERT_TRUE(intervalSetToSet(set) == expected); + + for (const auto & expected_interval : expected) + { + auto actual_intervals = intervalSetFindIntervals(set, expected_interval.left); + ASSERT_TRUE(actual_intervals.size() == expected.size()); + ASSERT_TRUE(actual_intervals == expected); + + ASSERT_TRUE(set.has(expected_interval.left)); + ASSERT_TRUE(set.has(expected_interval.right)); + } + + set.build(); + + ASSERT_TRUE(intervalSetToSet(set) == expected); + + for (const auto & expected_interval : expected) + { + auto actual_intervals = intervalSetFindIntervals(set, expected_interval.left); + ASSERT_TRUE(actual_intervals.size() == expected.size()); + ASSERT_TRUE(actual_intervals == expected); + + ASSERT_TRUE(set.has(expected_interval.left)); + ASSERT_TRUE(set.has(expected_interval.right)); + } + } +} + +TEST(IntervalTree, IntervalSetIterators) +{ + { + IntervalSet set; + ASSERT_TRUE(set.begin() == set.end()); + ASSERT_TRUE(set.cbegin() == set.cend()); + set.build(); + ASSERT_TRUE(set.begin() == set.end()); + ASSERT_TRUE(set.cbegin() == set.cend()); + } + { + IntervalSet set; + set.emplace(Int64Interval(0, 5)); + ASSERT_TRUE(set.begin() != set.end()); + ASSERT_TRUE((*set.begin()).left == 0); + ASSERT_TRUE((*set.begin()).right == 5); + ASSERT_TRUE(set.begin()->left == 0); + ASSERT_TRUE(set.begin()->right == 5); + auto begin = set.begin(); + ++begin; + ASSERT_TRUE(begin == set.end()); + + begin = set.begin(); + begin++; + ASSERT_TRUE(begin == set.end()); + + auto end = set.end(); + --end; + ASSERT_TRUE(set.begin() == end); + + end = set.end(); + end--; + ASSERT_TRUE(set.begin() == end); + } + { + for (size_t intervals_size = 0; intervals_size < 120; ++intervals_size) + { + std::set expected; + IntervalSet set; + + for (size_t interval_index = 0; interval_index < intervals_size; ++interval_index) + { + auto interval = Int64Interval(interval_index * 2, interval_index * 2 + 1); + set.insert(interval); + expected.insert(interval); + } + + auto end = set.end(); + auto begin = set.begin(); + + std::set actual; + + while (end != begin) + { + --end; + actual.insert(*end); + } + + if (end != begin) + actual.insert(*end); + + ASSERT_TRUE(actual == expected); + } + } +} + +TEST(IntervalTree, IntervalSetInvalidInterval) +{ + IntervalSet interval_set; + ASSERT_TRUE(!interval_set.insert(Int64Interval(10, 0))); + ASSERT_TRUE(!interval_set.insert(Int64Interval(15, 10))); + ASSERT_TRUE(interval_set.insert(Int64Interval(20, 25))); + + std::set expected; + expected.insert({20, 25}); + + auto actual = intervalSetFindIntervals(interval_set, 20); + + ASSERT_TRUE(actual == expected); + ASSERT_TRUE(interval_set.has(20)); + + interval_set.build(); + + actual = intervalSetFindIntervals(interval_set, 20); + + ASSERT_TRUE(actual == expected); + ASSERT_TRUE(interval_set.has(20)); +} + +TEST(IntervalTree, IntervalMapBasic) +{ + for (size_t intervals_size = 0; intervals_size < 120; ++intervals_size) + { + std::map expected; + IntervalMap map; + + for (size_t interval_index = 0; interval_index < intervals_size; ++interval_index) + { + auto interval = Int64Interval(interval_index * 2, interval_index * 2 + 1); + auto value = std::to_string(interval.left); + expected.emplace(interval, value); + map.emplace(interval, value); + } + + ASSERT_TRUE(map.getIntervalsSize() == expected.size()); + ASSERT_TRUE(map.getIntervalsSize() == intervals_size); + ASSERT_TRUE(intervalMapToMap(map) == expected); + + for (const auto & [expected_interval, value] : expected) + { + std::map expected_intervals = {{expected_interval, std::to_string(expected_interval.left)}}; + + auto actual_intervals = intervalMapFindIntervals(map, expected_interval.left); + ASSERT_TRUE(actual_intervals.size() == 1); + ASSERT_TRUE(actual_intervals == expected_intervals); + + actual_intervals = intervalMapFindIntervals(map, expected_interval.right); + ASSERT_TRUE(actual_intervals.size() == 1); + ASSERT_TRUE(actual_intervals == expected_intervals); + + ASSERT_TRUE(map.has(expected_interval.left)); + ASSERT_TRUE(map.has(expected_interval.right)); + } + + map.build(); + + ASSERT_TRUE(intervalMapToMap(map) == expected); + + for (const auto & [expected_interval, value] : expected) + { + std::map expected_intervals = {{expected_interval, std::to_string(expected_interval.left)}}; + + auto actual_intervals = intervalMapFindIntervals(map, expected_interval.left); + ASSERT_TRUE(actual_intervals.size() == 1); + ASSERT_TRUE(actual_intervals == expected_intervals); + + actual_intervals = intervalMapFindIntervals(map, expected_interval.right); + ASSERT_TRUE(actual_intervals.size() == 1); + ASSERT_TRUE(actual_intervals == expected_intervals); + + ASSERT_TRUE(map.has(expected_interval.left)); + ASSERT_TRUE(map.has(expected_interval.right)); + } + } +} + +TEST(IntervalTree, IntervalMapPoints) +{ + for (size_t intervals_size = 0; intervals_size < 120; ++intervals_size) + { + std::map expected; + IntervalMap map; + + for (size_t interval_index = 0; interval_index < intervals_size; ++interval_index) + { + auto interval = Int64Interval(interval_index, interval_index); + auto value = std::to_string(interval.left); + expected.emplace(interval, value); + map.emplace(interval, value); + } + + ASSERT_TRUE(map.getIntervalsSize() == expected.size()); + ASSERT_TRUE(map.getIntervalsSize() == intervals_size); + ASSERT_TRUE(intervalMapToMap(map) == expected); + + for (const auto & [expected_interval, value] : expected) + { + std::map expected_intervals = {{expected_interval, std::to_string(expected_interval.left)}}; + + auto actual_intervals = intervalMapFindIntervals(map, expected_interval.left); + ASSERT_TRUE(actual_intervals.size() == 1); + ASSERT_TRUE(actual_intervals == expected_intervals); + + actual_intervals = intervalMapFindIntervals(map, expected_interval.right); + ASSERT_TRUE(actual_intervals.size() == 1); + ASSERT_TRUE(actual_intervals == expected_intervals); + + ASSERT_TRUE(map.has(expected_interval.left)); + ASSERT_TRUE(map.has(expected_interval.right)); + } + + map.build(); + + ASSERT_TRUE(intervalMapToMap(map) == expected); + + for (const auto & [expected_interval, value] : expected) + { + std::map expected_intervals = {{expected_interval, std::to_string(expected_interval.left)}}; + + auto actual_intervals = intervalMapFindIntervals(map, expected_interval.left); + ASSERT_TRUE(actual_intervals.size() == 1); + ASSERT_TRUE(actual_intervals == expected_intervals); + + actual_intervals = intervalMapFindIntervals(map, expected_interval.right); + ASSERT_TRUE(actual_intervals.size() == 1); + ASSERT_TRUE(actual_intervals == expected_intervals); + + ASSERT_TRUE(map.has(expected_interval.left)); + ASSERT_TRUE(map.has(expected_interval.right)); + } + } +} + +TEST(IntervalTree, IntervalMapIntersectingIntervals) +{ + for (size_t intervals_size = 0; intervals_size < 120; ++intervals_size) + { + std::map expected; + IntervalMap map; + + for (size_t interval_index = 0; interval_index < intervals_size; ++interval_index) + { + auto interval = Int64Interval(0, interval_index * 2 + 1); + auto value = std::to_string(interval.left); + expected.emplace(interval, value); + map.emplace(interval, value); + } + + ASSERT_TRUE(map.getIntervalsSize() == expected.size()); + ASSERT_TRUE(map.getIntervalsSize() == intervals_size); + ASSERT_TRUE(intervalMapToMap(map) == expected); + + for (const auto & [expected_interval, value] : expected) + { + auto actual_intervals = intervalMapFindIntervals(map, expected_interval.left); + + ASSERT_TRUE(actual_intervals.size() == expected.size()); + ASSERT_TRUE(actual_intervals == expected); + + ASSERT_TRUE(map.has(expected_interval.left)); + ASSERT_TRUE(map.has(expected_interval.right)); + } + + map.build(); + + ASSERT_TRUE(intervalMapToMap(map) == expected); + + for (const auto & [expected_interval, value] : expected) + { + auto actual_intervals = intervalMapFindIntervals(map, expected_interval.left); + + ASSERT_TRUE(actual_intervals.size() == expected.size()); + ASSERT_TRUE(actual_intervals == expected); + + ASSERT_TRUE(map.has(expected_interval.left)); + ASSERT_TRUE(map.has(expected_interval.right)); + } + } +} + +TEST(IntervalTree, IntervalMapIterators) +{ + { + IntervalMap map; + ASSERT_TRUE(map.begin() == map.end()); + ASSERT_TRUE(map.cbegin() == map.cend()); + map.build(); + ASSERT_TRUE(map.begin() == map.end()); + ASSERT_TRUE(map.cbegin() == map.cend()); + } + { + IntervalMap map; + map.emplace(Int64Interval(0, 5), "value"); + ASSERT_TRUE(map.begin() != map.end()); + ASSERT_TRUE((*map.begin()).first.left == 0); + ASSERT_TRUE((*map.begin()).first.right == 5); + ASSERT_TRUE((*map.begin()).second == "value"); + ASSERT_TRUE(map.begin()->first.left == 0); + ASSERT_TRUE(map.begin()->first.right == 5); + ASSERT_TRUE(map.begin()->second == "value"); + auto begin = map.begin(); + ++begin; + ASSERT_TRUE(begin == map.end()); + + begin = map.begin(); + begin++; + ASSERT_TRUE(begin == map.end()); + + auto end = map.end(); + --end; + ASSERT_TRUE(map.begin() == end); + + end = map.end(); + end--; + ASSERT_TRUE(map.begin() == end); + } + { + for (size_t intervals_size = 0; intervals_size < 120; ++intervals_size) + { + std::map expected; + IntervalMap map; + + for (size_t interval_index = 0; interval_index < intervals_size; ++interval_index) + { + auto interval = Int64Interval(interval_index * 2, interval_index * 2 + 1); + auto value = std::to_string(interval.left); + map.emplace(interval, value); + expected.emplace(interval, value); + } + + auto end = map.end(); + auto begin = map.begin(); + + std::map actual; + + while (end != begin) + { + --end; + actual.insert(*end); + } + + if (end != begin) + actual.insert(*end); + + ASSERT_TRUE(actual == expected); + } + } +} + +TEST(IntervalTree, IntervalMapInvalidInterval) +{ + IntervalMap interval_map; + ASSERT_TRUE(!interval_map.insert(Int64Interval(10, 0), "Value")); + ASSERT_TRUE(!interval_map.insert(Int64Interval(15, 10), "Value")); + ASSERT_TRUE(interval_map.insert(Int64Interval(20, 25), "Value")); + + std::map expected; + expected.emplace(Int64Interval{20, 25}, "Value"); + + auto actual = intervalMapFindIntervals(interval_map, 20); + + ASSERT_TRUE(actual == expected); + ASSERT_TRUE(interval_map.has(20)); + + interval_map.build(); + + actual = intervalMapFindIntervals(interval_map, 20); + + ASSERT_TRUE(actual == expected); + ASSERT_TRUE(interval_map.has(20)); +} diff --git a/src/Compression/CMakeLists.txt b/src/Compression/CMakeLists.txt index 34369d8dbc8..efa3f2b1c09 100644 --- a/src/Compression/CMakeLists.txt +++ b/src/Compression/CMakeLists.txt @@ -6,7 +6,7 @@ if (ENABLE_FUZZING) list(REMOVE_ITEM ${fuzz_compression_sources} CompressionFactoryAdditions.cpp) add_library(fuzz_compression ${fuzz_compression_headers} ${fuzz_compression_sources}) - target_link_libraries(fuzz_compression PUBLIC clickhouse_parsers clickhouse_common_io common lz4) + target_link_libraries(fuzz_compression PUBLIC clickhouse_parsers clickhouse_common_io common ch_contrib::lz4) endif() if (ENABLE_EXAMPLES) diff --git a/src/Compression/CompressionCodecEncrypted.cpp b/src/Compression/CompressionCodecEncrypted.cpp index 6cb3874f808..ddf2fb26712 100644 --- a/src/Compression/CompressionCodecEncrypted.cpp +++ b/src/Compression/CompressionCodecEncrypted.cpp @@ -9,7 +9,7 @@ #include // This depends on BoringSSL-specific API, notably . -#if USE_SSL && USE_INTERNAL_SSL_LIBRARY +#if USE_SSL #include #include #include @@ -66,7 +66,7 @@ uint8_t getMethodCode(EncryptionMethod Method) } // end of namespace DB -#if USE_SSL && USE_INTERNAL_SSL_LIBRARY +#if USE_SSL namespace DB { @@ -513,7 +513,7 @@ void CompressionCodecEncrypted::doDecompressData(const char * source, UInt32 sou } -#else /* USE_SSL && USE_INTERNAL_SSL_LIBRARY */ +#else /* USE_SSL */ namespace DB { @@ -551,7 +551,7 @@ void CompressionCodecEncrypted::Configuration::load(const Poco::Util::AbstractCo } -#endif /* USE_SSL && USE_INTERNAL_SSL_LIBRARY */ +#endif /* USE_SSL */ namespace DB { diff --git a/src/Compression/fuzzers/CMakeLists.txt b/src/Compression/fuzzers/CMakeLists.txt index 189aea66a92..db1573f1354 100644 --- a/src/Compression/fuzzers/CMakeLists.txt +++ b/src/Compression/fuzzers/CMakeLists.txt @@ -8,7 +8,7 @@ add_executable (compressed_buffer_fuzzer compressed_buffer_fuzzer.cpp) target_link_libraries (compressed_buffer_fuzzer PRIVATE dbms ${LIB_FUZZING_ENGINE}) add_executable (lz4_decompress_fuzzer lz4_decompress_fuzzer.cpp) -target_link_libraries (lz4_decompress_fuzzer PUBLIC dbms lz4 ${LIB_FUZZING_ENGINE}) +target_link_libraries (lz4_decompress_fuzzer PUBLIC dbms ch_contrib::lz4 ${LIB_FUZZING_ENGINE}) add_executable (delta_decompress_fuzzer delta_decompress_fuzzer.cpp) target_link_libraries (delta_decompress_fuzzer PRIVATE dbms ${LIB_FUZZING_ENGINE}) diff --git a/src/Coordination/FourLetterCommand.cpp b/src/Coordination/FourLetterCommand.cpp index 3d0ebe86bf3..4c76d052f9b 100644 --- a/src/Coordination/FourLetterCommand.cpp +++ b/src/Coordination/FourLetterCommand.cpp @@ -228,6 +228,8 @@ String MonitorCommand::run() print(ret, "watch_count", state_machine.getTotalWatchesCount()); print(ret, "ephemerals_count", state_machine.getTotalEphemeralNodesCount()); print(ret, "approximate_data_size", state_machine.getApproximateDataSize()); + print(ret, "key_arena_size", state_machine.getKeyArenaSize()); + print(ret, "latest_snapshot_size", state_machine.getLatestSnapshotBufSize()); #if defined(__linux__) || defined(__APPLE__) print(ret, "open_file_descriptor_count", getCurrentProcessFDCount()); diff --git a/src/Coordination/KeeperSnapshotManager.cpp b/src/Coordination/KeeperSnapshotManager.cpp index 518d569ca67..8d5df7c35e9 100644 --- a/src/Coordination/KeeperSnapshotManager.cpp +++ b/src/Coordination/KeeperSnapshotManager.cpp @@ -9,6 +9,7 @@ #include #include #include +#include #include #include @@ -19,7 +20,6 @@ namespace ErrorCodes { extern const int UNKNOWN_FORMAT_VERSION; extern const int UNKNOWN_SNAPSHOT; - extern const int LOGICAL_ERROR; } namespace @@ -41,20 +41,6 @@ namespace return base; } - std::string getBaseName(const String & path) - { - size_t basename_start = path.rfind('/'); - return std::string{&path[basename_start + 1], path.length() - basename_start - 1}; - } - - String parentPath(const String & path) - { - auto rslash_pos = path.rfind('/'); - if (rslash_pos > 0) - return path.substr(0, rslash_pos); - return "/"; - } - void writeNode(const KeeperStorage::Node & node, SnapshotVersion version, WriteBuffer & out) { writeBinary(node.data, out); @@ -182,8 +168,11 @@ void KeeperStorageSnapshot::serialize(const KeeperStorageSnapshot & snapshot, Wr { const auto & path = it->key; const auto & node = it->value; + /// Benign race condition possible while taking snapshot: NuRaft decide to create snapshot at some log id + /// and only after some time we lock storage and enable snapshot mode. So snapshot_container_size can be + /// slightly bigger than required. if (static_cast(node.stat.mzxid) > snapshot.snapshot_meta->get_last_log_idx()) - throw Exception(ErrorCodes::LOGICAL_ERROR, "Trying to serialize node with mzxid {}, but last snapshot index {}", node.stat.mzxid, snapshot.snapshot_meta->get_last_log_idx()); + break; writeBinary(path, out); writeNode(node, snapshot.version, out); @@ -292,7 +281,7 @@ void KeeperStorageSnapshot::deserialize(SnapshotDeserializationResult & deserial if (itr.key != "/") { auto parent_path = parentPath(itr.key); - storage.container.updateValue(parent_path, [&path = itr.key] (KeeperStorage::Node & value) { value.children.insert(getBaseName(path)); }); + storage.container.updateValue(parent_path, [path = itr.key] (KeeperStorage::Node & value) { value.children.insert(getBaseName(path)); }); } } @@ -348,8 +337,8 @@ KeeperStorageSnapshot::KeeperStorageSnapshot(KeeperStorage * storage_, uint64_t , session_id(storage->session_id_counter) , cluster_config(cluster_config_) { - storage->enableSnapshotMode(); snapshot_container_size = storage->container.snapshotSize(); + storage->enableSnapshotMode(snapshot_container_size); begin = storage->getSnapshotIteratorBegin(); session_and_timeout = storage->getActiveSessions(); acl_map = storage->acl_map.getMapping(); @@ -362,8 +351,8 @@ KeeperStorageSnapshot::KeeperStorageSnapshot(KeeperStorage * storage_, const Sna , session_id(storage->session_id_counter) , cluster_config(cluster_config_) { - storage->enableSnapshotMode(); snapshot_container_size = storage->container.snapshotSize(); + storage->enableSnapshotMode(snapshot_container_size); begin = storage->getSnapshotIteratorBegin(); session_and_timeout = storage->getActiveSessions(); acl_map = storage->acl_map.getMapping(); diff --git a/src/Coordination/KeeperStateMachine.cpp b/src/Coordination/KeeperStateMachine.cpp index 1ac1a584451..20d3bcbfd30 100644 --- a/src/Coordination/KeeperStateMachine.cpp +++ b/src/Coordination/KeeperStateMachine.cpp @@ -155,7 +155,7 @@ bool KeeperStateMachine::apply_snapshot(nuraft::snapshot & s) { /// deserialize and apply snapshot to storage std::lock_guard lock(storage_and_responses_lock); - auto snapshot_deserialization_result = snapshot_manager.deserializeSnapshotFromBuffer(latest_snapshot_buf); + auto snapshot_deserialization_result = snapshot_manager.deserializeSnapshotFromBuffer(latest_snapshot_ptr); storage = std::move(snapshot_deserialization_result.storage); latest_snapshot_meta = snapshot_deserialization_result.snapshot_meta; cluster_config = snapshot_deserialization_result.cluster_config; @@ -212,14 +212,13 @@ void KeeperStateMachine::create_snapshot( } { - /// Must do it with lock (clearing elements from list) + /// Destroy snapshot with lock std::lock_guard lock(storage_and_responses_lock); + LOG_TRACE(log, "Clearing garbage after snapshot"); /// Turn off "snapshot mode" and clear outdate part of storage state storage->clearGarbageAfterSnapshot(); - /// Destroy snapshot with lock - snapshot.reset(); LOG_TRACE(log, "Cleared garbage after snapshot"); - + snapshot.reset(); } } catch (...) @@ -404,6 +403,20 @@ uint64_t KeeperStateMachine::getApproximateDataSize() const return storage->getApproximateDataSize(); } +uint64_t KeeperStateMachine::getKeyArenaSize() const +{ + std::lock_guard lock(storage_and_responses_lock); + return storage->getArenaDataSize(); +} + +uint64_t KeeperStateMachine::getLatestSnapshotBufSize() const +{ + std::lock_guard lock(snapshots_lock); + if (latest_snapshot_buf) + return latest_snapshot_buf->size(); + return 0; +} + ClusterConfigPtr KeeperStateMachine::getClusterConfig() const { std::lock_guard lock(cluster_config_lock); diff --git a/src/Coordination/KeeperStateMachine.h b/src/Coordination/KeeperStateMachine.h index 2803f4b9027..291b58e2498 100644 --- a/src/Coordination/KeeperStateMachine.h +++ b/src/Coordination/KeeperStateMachine.h @@ -97,6 +97,8 @@ public: uint64_t getSessionWithEphemeralNodesCount() const; uint64_t getTotalEphemeralNodesCount() const; uint64_t getApproximateDataSize() const; + uint64_t getKeyArenaSize() const; + uint64_t getLatestSnapshotBufSize() const; private: @@ -120,7 +122,7 @@ private: SnapshotsQueue & snapshots_queue; /// Mutex for snapshots - std::mutex snapshots_lock; + mutable std::mutex snapshots_lock; /// Lock for storage and responses_queue. It's important to process requests /// and push them to the responses queue while holding this lock. Otherwise diff --git a/src/Coordination/KeeperStorage.cpp b/src/Coordination/KeeperStorage.cpp index 4f174e4e803..f6992815a6c 100644 --- a/src/Coordination/KeeperStorage.cpp +++ b/src/Coordination/KeeperStorage.cpp @@ -8,6 +8,7 @@ #include #include #include +#include #include #include #include @@ -23,20 +24,6 @@ namespace ErrorCodes extern const int BAD_ARGUMENTS; } -static String parentPath(const String & path) -{ - auto rslash_pos = path.rfind('/'); - if (rslash_pos > 0) - return path.substr(0, rslash_pos); - return "/"; -} - -static std::string getBaseName(const String & path) -{ - size_t basename_start = path.rfind('/'); - return std::string{&path[basename_start + 1], path.length() - basename_start - 1}; -} - static String base64Encode(const String & decoded) { std::ostringstream ostr; // STYLE_CHECK_ALLOW_STD_STRING_STREAM @@ -155,12 +142,12 @@ static KeeperStorage::ResponsesForSessions processWatchesImpl(const String & pat Strings paths_to_check_for_list_watches; if (event_type == Coordination::Event::CREATED) { - paths_to_check_for_list_watches.push_back(parent_path); /// Trigger list watches for parent + paths_to_check_for_list_watches.push_back(parent_path.toString()); /// Trigger list watches for parent } else if (event_type == Coordination::Event::DELETED) { paths_to_check_for_list_watches.push_back(path); /// Trigger both list watches for this path - paths_to_check_for_list_watches.push_back(parent_path); /// And for parent path + paths_to_check_for_list_watches.push_back(parent_path.toString()); /// And for parent path } /// CHANGED event never trigger list wathes @@ -244,7 +231,8 @@ struct KeeperStorageCreateRequestProcessor final : public KeeperStorageRequestPr bool checkAuth(KeeperStorage & storage, int64_t session_id) const override { auto & container = storage.container; - auto parent_path = parentPath(zk_request->getPath()); + auto path = zk_request->getPath(); + auto parent_path = parentPath(path); auto it = container.find(parent_path); if (it == container.end()) @@ -297,8 +285,7 @@ struct KeeperStorageCreateRequestProcessor final : public KeeperStorageRequestPr response.error = Coordination::Error::ZNODEEXISTS; return { response_ptr, undo }; } - auto child_path = getBaseName(path_created); - if (child_path.empty()) + if (getBaseName(path_created).size == 0) { response.error = Coordination::Error::ZBADARGUMENTS; return { response_ptr, undo }; @@ -330,15 +317,18 @@ struct KeeperStorageCreateRequestProcessor final : public KeeperStorageRequestPr created_node.data = request.data; created_node.is_sequental = request.is_sequential; + auto [map_key, _] = container.insert(path_created, std::move(created_node)); + /// Take child path from key owned by map. + auto child_path = getBaseName(map_key->getKey()); + int32_t parent_cversion = request.parent_cversion; int64_t prev_parent_zxid; int32_t prev_parent_cversion; container.updateValue(parent_path, [child_path, zxid, &prev_parent_zxid, parent_cversion, &prev_parent_cversion] (KeeperStorage::Node & parent) { - parent.children.insert(child_path); - parent.size_bytes += child_path.size(); + parent.size_bytes += child_path.size; prev_parent_cversion = parent.stat.cversion; prev_parent_zxid = parent.stat.pzxid; @@ -356,14 +346,12 @@ struct KeeperStorageCreateRequestProcessor final : public KeeperStorageRequestPr }); response.path_created = path_created; - container.insert(path_created, std::move(created_node)); if (request.is_ephemeral) ephemerals[session_id].emplace(path_created); undo = [&storage, prev_parent_zxid, prev_parent_cversion, session_id, path_created, is_ephemeral = request.is_ephemeral, parent_path, child_path, acl_id] { - storage.container.erase(path_created); storage.acl_map.removeUsage(acl_id); if (is_ephemeral) @@ -376,8 +364,10 @@ struct KeeperStorageCreateRequestProcessor final : public KeeperStorageRequestPr undo_parent.stat.cversion = prev_parent_cversion; undo_parent.stat.pzxid = prev_parent_zxid; undo_parent.children.erase(child_path); - undo_parent.size_bytes -= child_path.size(); + undo_parent.size_bytes -= child_path.size; }); + + storage.container.erase(path_created); }; response.error = Coordination::Error::ZOK; @@ -504,33 +494,34 @@ struct KeeperStorageRemoveRequestProcessor final : public KeeperStorageRequestPr storage.acl_map.removeUsage(prev_node.acl_id); - auto child_basename = getBaseName(it->key); - container.updateValue(parentPath(request.path), [&child_basename] (KeeperStorage::Node & parent) + container.updateValue(parentPath(request.path), [child_basename = getBaseName(it->key)] (KeeperStorage::Node & parent) { --parent.stat.numChildren; ++parent.stat.cversion; parent.children.erase(child_basename); - parent.size_bytes -= child_basename.size(); + parent.size_bytes -= child_basename.size; }); response.error = Coordination::Error::ZOK; - + /// Erase full path from container after child removed from parent container.erase(request.path); - undo = [prev_node, &storage, path = request.path, child_basename] + undo = [prev_node, &storage, path = request.path] { if (prev_node.stat.ephemeralOwner != 0) storage.ephemerals[prev_node.stat.ephemeralOwner].emplace(path); storage.acl_map.addUsage(prev_node.acl_id); - storage.container.insert(path, prev_node); - storage.container.updateValue(parentPath(path), [&child_basename] (KeeperStorage::Node & parent) + /// Dangerous place: we are adding StringRef to child into children unordered_hash set. + /// That's why we are taking getBaseName from inserted key, not from the path from request object. + auto [map_key, _] = storage.container.insert(path, prev_node); + storage.container.updateValue(parentPath(path), [child_name = getBaseName(map_key->getKey())] (KeeperStorage::Node & parent) { ++parent.stat.numChildren; --parent.stat.cversion; - parent.children.insert(child_basename); - parent.size_bytes += child_basename.size(); + parent.children.insert(child_name); + parent.size_bytes += child_name.size; }); }; } @@ -672,6 +663,7 @@ struct KeeperStorageListRequestProcessor final : public KeeperStorageRequestProc Coordination::ZooKeeperResponsePtr response_ptr = zk_request->makeResponse(); Coordination::ZooKeeperListResponse & response = dynamic_cast(*response_ptr); Coordination::ZooKeeperListRequest & request = dynamic_cast(*zk_request); + auto it = container.find(request.path); if (it == container.end()) { @@ -683,7 +675,10 @@ struct KeeperStorageListRequestProcessor final : public KeeperStorageRequestProc if (path_prefix.empty()) throw DB::Exception("Logical error: path cannot be empty", ErrorCodes::LOGICAL_ERROR); - response.names.insert(response.names.end(), it->value.children.begin(), it->value.children.end()); + response.names.reserve(it->value.children.size()); + + for (const auto child : it->value.children) + response.names.push_back(child.toString()); response.stat = it->value.stat; response.error = Coordination::Error::ZOK; @@ -1092,15 +1087,17 @@ KeeperStorage::ResponsesForSessions KeeperStorage::processRequest(const Coordina { for (const auto & ephemeral_path : it->second) { - container.erase(ephemeral_path); container.updateValue(parentPath(ephemeral_path), [&ephemeral_path] (KeeperStorage::Node & parent) { --parent.stat.numChildren; ++parent.stat.cversion; - parent.children.erase(getBaseName(ephemeral_path)); - parent.size_bytes -= getBaseName(ephemeral_path).size(); + auto base_name = getBaseName(ephemeral_path); + parent.children.erase(base_name); + parent.size_bytes -= base_name.size; }); + container.erase(ephemeral_path); + auto responses = processWatchesImpl(ephemeral_path, watches, list_watches, Coordination::Event::DELETED); results.insert(results.end(), responses.begin(), responses.end()); } diff --git a/src/Coordination/KeeperStorage.h b/src/Coordination/KeeperStorage.h index 11d191b7f50..cbf33be61a0 100644 --- a/src/Coordination/KeeperStorage.h +++ b/src/Coordination/KeeperStorage.h @@ -8,16 +8,17 @@ #include #include #include -#include #include +#include + namespace DB { struct KeeperStorageRequestProcessor; using KeeperStorageRequestProcessorPtr = std::shared_ptr; using ResponseCallback = std::function; -using ChildrenSet = std::unordered_set; +using ChildrenSet = absl::flat_hash_set; using SessionAndTimeout = std::unordered_map; struct KeeperStorageSnapshot; @@ -28,6 +29,7 @@ struct KeeperStorageSnapshot; class KeeperStorage { public: + struct Node { String data; @@ -158,9 +160,9 @@ public: /// Set of methods for creating snapshots /// Turn on snapshot mode, so data inside Container is not deleted, but replaced with new version. - void enableSnapshotMode() + void enableSnapshotMode(size_t up_to_size) { - container.enableSnapshotMode(); + container.enableSnapshotMode(up_to_size); } /// Turn off snapshot mode. @@ -203,6 +205,12 @@ public: return container.getApproximateDataSize(); } + uint64_t getArenaDataSize() const + { + return container.keyArenaSize(); + } + + uint64_t getTotalWatchesCount() const; uint64_t getWatchedPathsCount() const diff --git a/src/Coordination/SnapshotableHashTable.h b/src/Coordination/SnapshotableHashTable.h index 7704825f830..b1d72578530 100644 --- a/src/Coordination/SnapshotableHashTable.h +++ b/src/Coordination/SnapshotableHashTable.h @@ -1,8 +1,11 @@ #pragma once #include +#include +#include #include #include #include +#include namespace DB { @@ -10,11 +13,12 @@ namespace DB template struct ListNode { - std::string key; + StringRef key; V value; - bool active_in_map; -}; + bool active_in_map{true}; + bool free_key{false}; +}; template class SnapshotableHashTable @@ -23,11 +27,15 @@ private: using ListElem = ListNode; using List = std::list; - using IndexMap = std::unordered_map; + using Mapped = typename List::iterator; + using IndexMap = HashMap; List list; IndexMap map; bool snapshot_mode{false}; + /// Allows to avoid additional copies in updateValue function + size_t snapshot_up_to_size = 0; + ArenaWithFreeLists arena; uint64_t approximate_data_size{0}; @@ -105,51 +113,68 @@ private: } } + StringRef copyStringInArena(const std::string & value_to_copy) + { + size_t value_to_copy_size = value_to_copy.size(); + char * place_for_key = arena.alloc(value_to_copy_size); + memcpy(reinterpret_cast(place_for_key), reinterpret_cast(value_to_copy.data()), value_to_copy_size); + StringRef updated_value{place_for_key, value_to_copy_size}; + + return updated_value; + } + + public: using iterator = typename List::iterator; using const_iterator = typename List::const_iterator; - using reverse_iterator = typename List::reverse_iterator; - using const_reverse_iterator = typename List::const_reverse_iterator; using ValueUpdater = std::function; - bool insert(const std::string & key, const V & value) + std::pair insert(const std::string & key, const V & value) { - auto it = map.find(key); - if (it == map.end()) + size_t hash_value = map.hash(key); + auto it = map.find(key, hash_value); + + if (!it) { - ListElem elem{key, value, true}; + ListElem elem{copyStringInArena(key), value, true}; auto itr = list.insert(list.end(), elem); - map.emplace(itr->key, itr); + bool inserted; + map.emplace(itr->key, it, inserted, hash_value); + assert(inserted); + + it->getMapped() = itr; updateDataSize(INSERT, key.size(), value.sizeInBytes(), 0); - return true; + return std::make_pair(it, true); } - return false; + return std::make_pair(it, false); } - void insertOrReplace(const std::string & key, const V & value) { - auto it = map.find(key); - uint64_t old_value_size = it == map.end() ? 0 : it->second->value.sizeInBytes(); + size_t hash_value = map.hash(key); + auto it = map.find(key, hash_value); + uint64_t old_value_size = it == map.end() ? 0 : it->getMapped()->value.sizeInBytes(); if (it == map.end()) { - ListElem elem{key, value, true}; + ListElem elem{copyStringInArena(key), value, true}; auto itr = list.insert(list.end(), elem); - map.emplace(itr->key, itr); + bool inserted; + map.emplace(itr->key, it, inserted, hash_value); + assert(inserted); + it->getMapped() = itr; } else { - auto list_itr = it->second; + auto list_itr = it->getMapped(); if (snapshot_mode) { - ListElem elem{key, value, true}; + ListElem elem{list_itr->key, value, true}; list_itr->active_in_map = false; auto new_list_itr = list.insert(list.end(), elem); - map.erase(it); - map.emplace(new_list_itr->key, new_list_itr); + it->getMapped() = new_list_itr; } else { @@ -165,16 +190,18 @@ public: if (it == map.end()) return false; - auto list_itr = it->second; + auto list_itr = it->getMapped(); uint64_t old_data_size = list_itr->value.sizeInBytes(); if (snapshot_mode) { list_itr->active_in_map = false; - map.erase(it); + list_itr->free_key = true; + map.erase(it->getKey()); } else { - map.erase(it); + map.erase(it->getKey()); + arena.free(const_cast(list_itr->key.data), list_itr->key.size); list.erase(list_itr); } @@ -187,48 +214,62 @@ public: return map.find(key) != map.end(); } - const_iterator updateValue(const std::string & key, ValueUpdater updater) + const_iterator updateValue(StringRef key, ValueUpdater updater) { - auto it = map.find(key); + size_t hash_value = map.hash(key); + auto it = map.find(key, hash_value); assert(it != map.end()); - auto list_itr = it->second; + auto list_itr = it->getMapped(); uint64_t old_value_size = list_itr->value.sizeInBytes(); const_iterator ret; if (snapshot_mode) { - auto elem_copy = *(list_itr); - list_itr->active_in_map = false; - map.erase(it); - updater(elem_copy.value); - auto itr = list.insert(list.end(), elem_copy); - map.emplace(itr->key, itr); - ret = itr; + /// We in snapshot mode but updating some node which is already more + /// fresh than snapshot distance. So it will not participate in + /// snapshot and we don't need to copy it. + size_t distance = std::distance(list.begin(), list_itr); + if (distance < snapshot_up_to_size) + { + auto elem_copy = *(list_itr); + list_itr->active_in_map = false; + updater(elem_copy.value); + auto itr = list.insert(list.end(), elem_copy); + it->getMapped() = itr; + ret = itr; + } + else + { + updater(list_itr->value); + ret = list_itr; + } } else { updater(list_itr->value); ret = list_itr; } - updateDataSize(UPDATE_VALUE, key.size(), ret->value.sizeInBytes(), old_value_size); + + updateDataSize(UPDATE_VALUE, key.size, ret->value.sizeInBytes(), old_value_size); return ret; } - const_iterator find(const std::string & key) const + const_iterator find(StringRef key) const { auto map_it = map.find(key); if (map_it != map.end()) - return map_it->second; + return map_it->getMapped(); return list.end(); } - const V & getValue(const std::string & key) const + + const V & getValue(StringRef key) const { auto it = map.find(key); - assert(it != map.end()); - return it->second->value; + assert(it); + return it->getMapped()->value; } void clearOutdatedNodes() @@ -239,29 +280,39 @@ public: { if (!itr->active_in_map) { - updateDataSize(CLEAR_OUTDATED_NODES, itr->key.size(), itr->value.sizeInBytes(), 0); + updateDataSize(CLEAR_OUTDATED_NODES, itr->key.size, itr->value.sizeInBytes(), 0); + if (itr->free_key) + arena.free(const_cast(itr->key.data), itr->key.size); itr = list.erase(itr); } else + { + assert(!itr->free_key); itr++; + } } } void clear() { - list.clear(); map.clear(); + for (auto itr = list.begin(); itr != list.end(); ++itr) + arena.free(const_cast(itr->key.data), itr->key.size); + list.clear(); updateDataSize(CLEAR, 0, 0, 0); } - void enableSnapshotMode() + void enableSnapshotMode(size_t up_to_size) { snapshot_mode = true; + snapshot_up_to_size = up_to_size; } void disableSnapshotMode() { + snapshot_mode = false; + snapshot_up_to_size = 0; } size_t size() const @@ -279,15 +330,15 @@ public: return approximate_data_size; } + uint64_t keyArenaSize() const + { + return arena.size(); + } + iterator begin() { return list.begin(); } const_iterator begin() const { return list.cbegin(); } iterator end() { return list.end(); } const_iterator end() const { return list.cend(); } - - reverse_iterator rbegin() { return list.rbegin(); } - const_reverse_iterator rbegin() const { return list.crbegin(); } - reverse_iterator rend() { return list.rend(); } - const_reverse_iterator rend() const { return list.crend(); } }; diff --git a/src/Coordination/ZooKeeperDataReader.cpp b/src/Coordination/ZooKeeperDataReader.cpp index bd13a70252e..5d7b78d6a28 100644 --- a/src/Coordination/ZooKeeperDataReader.cpp +++ b/src/Coordination/ZooKeeperDataReader.cpp @@ -1,10 +1,13 @@ #include + #include #include +#include + #include #include #include -#include +#include namespace DB @@ -16,20 +19,6 @@ namespace ErrorCodes extern const int CORRUPTED_DATA; } -static String parentPath(const String & path) -{ - auto rslash_pos = path.rfind('/'); - if (rslash_pos > 0) - return path.substr(0, rslash_pos); - return "/"; -} - -static std::string getBaseName(const String & path) -{ - size_t basename_start = path.rfind('/'); - return std::string{&path[basename_start + 1], path.length() - basename_start - 1}; -} - int64_t getZxidFromName(const std::string & filename) { std::filesystem::path path(filename); @@ -148,7 +137,7 @@ int64_t deserializeStorageData(KeeperStorage & storage, ReadBuffer & in, Poco::L if (itr.key != "/") { auto parent_path = parentPath(itr.key); - storage.container.updateValue(parent_path, [&path = itr.key] (KeeperStorage::Node & value) { value.children.insert(getBaseName(path)); value.stat.numChildren++; }); + storage.container.updateValue(parent_path, [path = itr.key] (KeeperStorage::Node & value) { value.children.insert(getBaseName(path)); value.stat.numChildren++; }); } } diff --git a/src/Coordination/pathUtils.cpp b/src/Coordination/pathUtils.cpp new file mode 100644 index 00000000000..1e1da339d2e --- /dev/null +++ b/src/Coordination/pathUtils.cpp @@ -0,0 +1,38 @@ +#include +#include + +namespace DB +{ + +static size_t findLastSlash(StringRef path) +{ + if (path.size == 0) + return std::string::npos; + + for (size_t i = path.size - 1; i > 0; --i) + { + if (path.data[i] == '/') + return i; + } + + if (path.data[0] == '/') + return 0; + + return std::string::npos; +} + +StringRef parentPath(StringRef path) +{ + auto rslash_pos = findLastSlash(path); + if (rslash_pos > 0) + return StringRef{path.data, rslash_pos}; + return "/"; +} + +StringRef getBaseName(StringRef path) +{ + size_t basename_start = findLastSlash(path); + return StringRef{path.data + basename_start + 1, path.size - basename_start - 1}; +} + +} diff --git a/src/Coordination/pathUtils.h b/src/Coordination/pathUtils.h new file mode 100644 index 00000000000..69ed2d8b177 --- /dev/null +++ b/src/Coordination/pathUtils.h @@ -0,0 +1,13 @@ +#pragma once + +#include +#include + +namespace DB +{ + +StringRef parentPath(StringRef path); + +StringRef getBaseName(StringRef path); + +} diff --git a/src/Coordination/tests/gtest_coordination.cpp b/src/Coordination/tests/gtest_coordination.cpp index d274ee34a88..9c434ebb653 100644 --- a/src/Coordination/tests/gtest_coordination.cpp +++ b/src/Coordination/tests/gtest_coordination.cpp @@ -844,10 +844,10 @@ struct IntNode TEST_P(CoordinationTest, SnapshotableHashMapSimple) { DB::SnapshotableHashTable hello; - EXPECT_TRUE(hello.insert("hello", 5)); + EXPECT_TRUE(hello.insert("hello", 5).second); EXPECT_TRUE(hello.contains("hello")); EXPECT_EQ(hello.getValue("hello"), 5); - EXPECT_FALSE(hello.insert("hello", 145)); + EXPECT_FALSE(hello.insert("hello", 145).second); EXPECT_EQ(hello.getValue("hello"), 5); hello.updateValue("hello", [](IntNode & value) { value = 7; }); EXPECT_EQ(hello.getValue("hello"), 7); @@ -859,10 +859,10 @@ TEST_P(CoordinationTest, SnapshotableHashMapSimple) TEST_P(CoordinationTest, SnapshotableHashMapTrySnapshot) { DB::SnapshotableHashTable map_snp; - EXPECT_TRUE(map_snp.insert("/hello", 7)); - EXPECT_FALSE(map_snp.insert("/hello", 145)); - map_snp.enableSnapshotMode(); - EXPECT_FALSE(map_snp.insert("/hello", 145)); + EXPECT_TRUE(map_snp.insert("/hello", 7).second); + EXPECT_FALSE(map_snp.insert("/hello", 145).second); + map_snp.enableSnapshotMode(100000); + EXPECT_FALSE(map_snp.insert("/hello", 145).second); map_snp.updateValue("/hello", [](IntNode & value) { value = 554; }); EXPECT_EQ(map_snp.getValue("/hello"), 554); EXPECT_EQ(map_snp.snapshotSize(), 2); @@ -880,7 +880,7 @@ TEST_P(CoordinationTest, SnapshotableHashMapTrySnapshot) EXPECT_EQ(itr, map_snp.end()); for (size_t i = 0; i < 5; ++i) { - EXPECT_TRUE(map_snp.insert("/hello" + std::to_string(i), i)); + EXPECT_TRUE(map_snp.insert("/hello" + std::to_string(i), i).second); } EXPECT_EQ(map_snp.getValue("/hello3"), 3); @@ -951,7 +951,7 @@ TEST_P(CoordinationTest, SnapshotableHashMapDataSize) hello.clear(); EXPECT_EQ(hello.getApproximateDataSize(), 0); - hello.enableSnapshotMode(); + hello.enableSnapshotMode(10000); hello.insert("hello", 1); EXPECT_EQ(hello.getApproximateDataSize(), 9); hello.updateValue("hello", [](IntNode & value) { value = 2; }); @@ -984,7 +984,7 @@ TEST_P(CoordinationTest, SnapshotableHashMapDataSize) world.erase("world"); EXPECT_EQ(world.getApproximateDataSize(), 0); - world.enableSnapshotMode(); + world.enableSnapshotMode(100000); world.insert("world", n1); EXPECT_EQ(world.getApproximateDataSize(), 98); world.updateValue("world", [&](Node & value) { value = n2; }); diff --git a/src/Core/Field.cpp b/src/Core/Field.cpp index a85b7cff46e..70a1458c9f0 100644 --- a/src/Core/Field.cpp +++ b/src/Core/Field.cpp @@ -106,6 +106,12 @@ inline Field getBinaryValue(UInt8 type, ReadBuffer & buf) readStringBinary(value.data, buf); return value; } + case Field::Types::Bool: + { + UInt8 value; + readBinary(value, buf); + return bool(value); + } } return Field(); } @@ -346,6 +352,13 @@ Field Field::restoreFromDump(const std::string_view & dump_) return str; } + prefix = std::string_view{"Bool_"}; + if (dump.starts_with(prefix)) + { + bool value = parseFromString(dump.substr(prefix.length())); + return value; + } + prefix = std::string_view{"Array_["}; if (dump.starts_with(prefix)) { diff --git a/src/Core/Field.h b/src/Core/Field.h index 19573ed9831..b525e3a83ab 100644 --- a/src/Core/Field.h +++ b/src/Core/Field.h @@ -282,6 +282,7 @@ public: Int256 = 25, Map = 26, UUID = 27, + Bool = 28, }; }; @@ -323,7 +324,10 @@ public: template Field(T && rhs, enable_if_not_field_or_bool_or_stringlike_t = nullptr); - Field(bool rhs) : Field(castToNearestFieldType(rhs)) {} + Field(bool rhs) : Field(castToNearestFieldType(rhs)) + { + which = Types::Bool; + } /// Create a string inplace. Field(const std::string_view & str) { create(str.data(), str.size()); } @@ -376,7 +380,12 @@ public: enable_if_not_field_or_bool_or_stringlike_t & operator=(T && rhs); - Field & operator= (bool rhs) { return *this = castToNearestFieldType(rhs); } + Field & operator= (bool rhs) + { + *this = castToNearestFieldType(rhs); + which = Types::Bool; + return *this; + } Field & operator= (const std::string_view & str); Field & operator= (const String & str) { return *this = std::string_view{str}; } @@ -450,6 +459,7 @@ public: switch (which) { case Types::Null: return false; + case Types::Bool: [[fallthrough]]; case Types::UInt64: return get() < rhs.get(); case Types::UInt128: return get() < rhs.get(); case Types::UInt256: return get() < rhs.get(); @@ -487,6 +497,7 @@ public: switch (which) { case Types::Null: return true; + case Types::Bool: [[fallthrough]]; case Types::UInt64: return get() <= rhs.get(); case Types::UInt128: return get() <= rhs.get(); case Types::UInt256: return get() <= rhs.get(); @@ -524,6 +535,7 @@ public: switch (which) { case Types::Null: return true; + case Types::Bool: [[fallthrough]]; case Types::UInt64: return get() == rhs.get(); case Types::Int64: return get() == rhs.get(); case Types::Float64: @@ -580,6 +592,11 @@ public: case Types::Array: return f(field.template get()); case Types::Tuple: return f(field.template get()); case Types::Map: return f(field.template get()); + case Types::Bool: + { + bool value = bool(field.template get()); + return f(value); + } case Types::Decimal32: return f(field.template get>()); case Types::Decimal64: return f(field.template get>()); case Types::Decimal128: return f(field.template get>()); @@ -739,6 +756,7 @@ template <> struct Field::TypeToEnum>{ static const Typ template <> struct Field::TypeToEnum>{ static const Types::Which value = Types::Decimal256; }; template <> struct Field::TypeToEnum>{ static const Types::Which value = Types::Decimal64; }; template <> struct Field::TypeToEnum{ static const Types::Which value = Types::AggregateFunctionState; }; +template <> struct Field::TypeToEnum{ static const Types::Which value = Types::Bool; }; template <> struct Field::EnumToType { using Type = Null; }; template <> struct Field::EnumToType { using Type = UInt64; }; @@ -758,6 +776,7 @@ template <> struct Field::EnumToType { using Type = Dec template <> struct Field::EnumToType { using Type = DecimalField; }; template <> struct Field::EnumToType { using Type = DecimalField; }; template <> struct Field::EnumToType { using Type = DecimalField; }; +template <> struct Field::EnumToType { using Type = UInt64; }; inline constexpr bool isInt64OrUInt64FieldType(Field::Types::Which t) { @@ -765,6 +784,13 @@ inline constexpr bool isInt64OrUInt64FieldType(Field::Types::Which t) || t == Field::Types::UInt64; } +inline constexpr bool isInt64OrUInt64orBoolFieldType(Field::Types::Which t) +{ + return t == Field::Types::Int64 + || t == Field::Types::UInt64 + || t == Field::Types::Bool; +} + // Field value getter with type checking in debug builds. template NearestFieldType> & Field::get() @@ -781,7 +807,7 @@ NearestFieldType> & Field::get() // Disregard signedness when converting between int64 types. constexpr Field::Types::Which target = TypeToEnum::value; if (target != which - && (!isInt64OrUInt64FieldType(target) || !isInt64OrUInt64FieldType(which))) + && (!isInt64OrUInt64orBoolFieldType(target) || !isInt64OrUInt64orBoolFieldType(which))) throw Exception(ErrorCodes::LOGICAL_ERROR, "Invalid Field get from type {} to type {}", which, target); #endif diff --git a/src/Core/Settings.h b/src/Core/Settings.h index d55be808aa8..0aae455d058 100644 --- a/src/Core/Settings.h +++ b/src/Core/Settings.h @@ -75,7 +75,11 @@ class IColumn; M(UInt64, s3_max_single_read_retries, 4, "The maximum number of retries during single S3 read.", 0) \ M(UInt64, s3_max_redirects, 10, "Max number of S3 redirects hops allowed.", 0) \ M(UInt64, s3_max_connections, 1024, "The maximum number of connections per server.", 0) \ + M(Bool, s3_truncate_on_insert, false, "Enables or disables truncate before insert in s3 engine tables.", 0) \ + M(Bool, s3_create_new_file_on_insert, false, "Enables or disables creating a new file on each insert in s3 engine tables", 0) \ M(UInt64, hdfs_replication, 0, "The actual number of replications can be specified when the hdfs file is created.", 0) \ + M(Bool, hdfs_truncate_on_insert, false, "Enables or disables truncate before insert in s3 engine tables", 0) \ + M(Bool, hdfs_create_new_file_on_insert, false, "Enables or disables creating a new file on each insert in hdfs engine tables", 0) \ M(UInt64, hsts_max_age, 0, "Expired time for hsts. 0 means disable HSTS.", 0) \ M(Bool, extremes, false, "Calculate minimums and maximums of the result columns. They can be output in JSON-formats.", IMPORTANT) \ M(Bool, use_uncompressed_cache, false, "Whether to use the cache of uncompressed blocks.", 0) \ @@ -490,6 +494,7 @@ class IColumn; \ M(Bool, engine_file_empty_if_not_exists, false, "Allows to select data from a file engine table without file", 0) \ M(Bool, engine_file_truncate_on_insert, false, "Enables or disables truncate before insert in file engine tables", 0) \ + M(Bool, engine_file_allow_create_multiple_files, false, "Enables or disables creating a new file on each insert in file engine tables if format has suffix.", 0) \ M(Bool, allow_experimental_database_replicated, false, "Allow to create databases with Replicated engine", 0) \ M(UInt64, database_replicated_initial_query_timeout_sec, 300, "How long initial DDL query should wait for Replicated database to precess previous DDL queue entries", 0) \ M(UInt64, max_distributed_depth, 5, "Maximum distributed query depth", 0) \ diff --git a/src/Core/TypeId.h b/src/Core/TypeId.h index 3218db10778..39058773184 100644 --- a/src/Core/TypeId.h +++ b/src/Core/TypeId.h @@ -10,12 +10,12 @@ namespace DB * Returns TypeIndex::Nothing if type was not present in TypeIndex; * Returns TypeIndex element otherwise. * - * @example TypeId == TypeIndex::UInt8 - * @example TypeId == TypeIndex::Nothing + * @example TypeToTypeIndex == TypeIndex::UInt8 + * @example TypeToTypeIndex == TypeIndex::Nothing */ -template inline constexpr TypeIndex TypeId = TypeIndex::Nothing; +template inline constexpr TypeIndex TypeToTypeIndex = TypeIndex::Nothing; -template struct ReverseTypeIdT : std::false_type {}; +template struct TypeIndexToTypeHelper : std::false_type {}; /** * Obtain real type from TypeIndex if possible. @@ -23,14 +23,14 @@ template struct ReverseTypeIdT : std::false_type {}; * Returns a type alias if is corresponds to TypeIndex value. * Yields a compiler error otherwise. * - * @example ReverseTypeId == UInt8 + * @example TypeIndexToType == UInt8 */ -template using ReverseTypeId = typename ReverseTypeIdT::T; -template constexpr bool HasReverseTypeId = ReverseTypeIdT::value; +template using TypeIndexToType = typename TypeIndexToTypeHelper::T; +template constexpr bool TypeIndexHasType = TypeIndexToTypeHelper::value; #define TYPEID_MAP(_A) \ - template <> inline constexpr TypeIndex TypeId<_A> = TypeIndex::_A; \ - template <> struct ReverseTypeIdT : std::true_type { using T = _A; }; + template <> inline constexpr TypeIndex TypeToTypeIndex<_A> = TypeIndex::_A; \ + template <> struct TypeIndexToTypeHelper : std::true_type { using T = _A; }; TYPEID_MAP(UInt8) TYPEID_MAP(UInt16) @@ -58,4 +58,7 @@ TYPEID_MAP(String) struct Array; TYPEID_MAP(Array) + +#undef TYPEID_MAP + } diff --git a/src/Core/config_core.h.in b/src/Core/config_core.h.in index 11dd9bf96f1..5d37f8cf361 100644 --- a/src/Core/config_core.h.in +++ b/src/Core/config_core.h.in @@ -7,9 +7,7 @@ #cmakedefine01 USE_RDKAFKA #cmakedefine01 USE_AMQPCPP #cmakedefine01 USE_EMBEDDED_COMPILER -#cmakedefine01 USE_INTERNAL_LLVM_LIBRARY #cmakedefine01 USE_SSL -#cmakedefine01 USE_OPENCL #cmakedefine01 USE_LDAP #cmakedefine01 USE_ROCKSDB #cmakedefine01 USE_LIBPQXX @@ -18,3 +16,6 @@ #cmakedefine01 USE_NLP #cmakedefine01 USE_KRB5 #cmakedefine01 USE_FILELOG +#cmakedefine01 USE_ODBC +#cmakedefine01 USE_REPLXX +#cmakedefine01 USE_JEMALLOC diff --git a/src/Core/examples/CMakeLists.txt b/src/Core/examples/CMakeLists.txt index c8846eb1743..cd74ce68136 100644 --- a/src/Core/examples/CMakeLists.txt +++ b/src/Core/examples/CMakeLists.txt @@ -1,6 +1,5 @@ add_executable (string_pool string_pool.cpp) -target_link_libraries (string_pool PRIVATE clickhouse_common_io) -target_include_directories (string_pool SYSTEM BEFORE PRIVATE ${SPARSEHASH_INCLUDE_DIR}) +target_link_libraries (string_pool PRIVATE clickhouse_common_io ch_contrib::sparsehash) add_executable (field field.cpp) target_link_libraries (field PRIVATE dbms) @@ -10,9 +9,6 @@ target_link_libraries (string_ref_hash PRIVATE clickhouse_common_io) add_executable (mysql_protocol mysql_protocol.cpp) target_link_libraries (mysql_protocol PRIVATE dbms) -if(USE_SSL) - target_include_directories (mysql_protocol SYSTEM PRIVATE ${OPENSSL_INCLUDE_DIR}) -endif() add_executable (coro coro.cpp) target_link_libraries (coro PRIVATE clickhouse_common_io) diff --git a/src/Core/tests/gtest_field.cpp b/src/Core/tests/gtest_field.cpp index 5230f13bf8a..5585442d835 100644 --- a/src/Core/tests/gtest_field.cpp +++ b/src/Core/tests/gtest_field.cpp @@ -7,14 +7,14 @@ GTEST_TEST(Field, FromBool) { { Field f{false}; - ASSERT_EQ(f.getType(), Field::Types::UInt64); + ASSERT_EQ(f.getType(), Field::Types::Bool); ASSERT_EQ(f.get(), 0); ASSERT_EQ(f.get(), false); } { Field f{true}; - ASSERT_EQ(f.getType(), Field::Types::UInt64); + ASSERT_EQ(f.getType(), Field::Types::Bool); ASSERT_EQ(f.get(), 1); ASSERT_EQ(f.get(), true); } @@ -22,7 +22,7 @@ GTEST_TEST(Field, FromBool) { Field f; f = false; - ASSERT_EQ(f.getType(), Field::Types::UInt64); + ASSERT_EQ(f.getType(), Field::Types::Bool); ASSERT_EQ(f.get(), 0); ASSERT_EQ(f.get(), false); } @@ -30,7 +30,7 @@ GTEST_TEST(Field, FromBool) { Field f; f = true; - ASSERT_EQ(f.getType(), Field::Types::UInt64); + ASSERT_EQ(f.getType(), Field::Types::Bool); ASSERT_EQ(f.get(), 1); ASSERT_EQ(f.get(), true); } diff --git a/src/DataTypes/DataTypeDecimalBase.h b/src/DataTypes/DataTypeDecimalBase.h index dc8c99b06bc..bdb39978825 100644 --- a/src/DataTypes/DataTypeDecimalBase.h +++ b/src/DataTypes/DataTypeDecimalBase.h @@ -1,14 +1,15 @@ #pragma once -#include -#include -#include -#include -#include - #include #include +#include +#include +#include +#include +#include +#include + namespace DB { @@ -59,7 +60,7 @@ class DataTypeDecimalBase : public IDataType public: using FieldType = T; using ColumnType = ColumnDecimal; - static constexpr auto type_id = TypeId; + static constexpr auto type_id = TypeToTypeIndex; static constexpr bool is_parametric = true; @@ -75,7 +76,7 @@ public: throw Exception("Scale " + std::to_string(scale) + " is out of bounds", ErrorCodes::ARGUMENT_OUT_OF_BOUND); } - TypeIndex getTypeId() const override { return TypeId; } + TypeIndex getTypeId() const override { return TypeToTypeIndex; } Field getDefault() const override; MutableColumnPtr createColumn() const override; diff --git a/src/DataTypes/DataTypeNumberBase.h b/src/DataTypes/DataTypeNumberBase.h index 59dc26ed13a..01c298a4a33 100644 --- a/src/DataTypes/DataTypeNumberBase.h +++ b/src/DataTypes/DataTypeNumberBase.h @@ -1,5 +1,6 @@ #pragma once +#include #include #include @@ -20,13 +21,13 @@ class DataTypeNumberBase : public IDataType public: static constexpr bool is_parametric = false; static constexpr auto family_name = TypeName; - static constexpr auto type_id = TypeId; + static constexpr auto type_id = TypeToTypeIndex; using FieldType = T; using ColumnType = ColumnVector; const char * getFamilyName() const override { return TypeName.data(); } - TypeIndex getTypeId() const override { return TypeId; } + TypeIndex getTypeId() const override { return TypeToTypeIndex; } Field getDefault() const override; diff --git a/src/DataTypes/DataTypesDecimal.h b/src/DataTypes/DataTypesDecimal.h index 199f7796f02..fb590dd1d4b 100644 --- a/src/DataTypes/DataTypesDecimal.h +++ b/src/DataTypes/DataTypesDecimal.h @@ -38,7 +38,7 @@ public: const char * getFamilyName() const override { return family_name; } std::string doGetName() const override; - TypeIndex getTypeId() const override { return TypeId; } + TypeIndex getTypeId() const override { return TypeToTypeIndex; } bool canBePromoted() const override { return true; } DataTypePtr promoteNumericType() const override; diff --git a/src/DataTypes/FieldToDataType.cpp b/src/DataTypes/FieldToDataType.cpp index c1a8cacd5c2..8ca5ffac7c5 100644 --- a/src/DataTypes/FieldToDataType.cpp +++ b/src/DataTypes/FieldToDataType.cpp @@ -152,4 +152,9 @@ DataTypePtr FieldToDataType::operator() (const AggregateFunctionStateData & x) c return DataTypeFactory::instance().get(name); } +DataTypePtr FieldToDataType::operator()(const bool &) const +{ + return DataTypeFactory::instance().get("Bool"); +} + } diff --git a/src/DataTypes/FieldToDataType.h b/src/DataTypes/FieldToDataType.h index ca83ce868fc..72575c070f5 100644 --- a/src/DataTypes/FieldToDataType.h +++ b/src/DataTypes/FieldToDataType.h @@ -38,6 +38,7 @@ public: DataTypePtr operator() (const DecimalField & x) const; DataTypePtr operator() (const DecimalField & x) const; DataTypePtr operator() (const AggregateFunctionStateData & x) const; + DataTypePtr operator() (const bool & x) const; }; } diff --git a/src/DataTypes/IDataType.h b/src/DataTypes/IDataType.h index 9f3458b1ece..416f759924c 100644 --- a/src/DataTypes/IDataType.h +++ b/src/DataTypes/IDataType.h @@ -1,14 +1,15 @@ #pragma once #include -#include #include #include #include +#include #include #include #include + namespace DB { @@ -475,7 +476,7 @@ template inline bool isColumnedAsDecimalT(const DataType & data_type) { const WhichDataType which(data_type); - return (which.isDecimal() || which.isDateTime64()) && which.idx == TypeId; + return (which.isDecimal() || which.isDateTime64()) && which.idx == TypeToTypeIndex; } template diff --git a/src/DataTypes/Serializations/SerializationInfo.cpp b/src/DataTypes/Serializations/SerializationInfo.cpp index 42d3d14b672..22df95fc8f7 100644 --- a/src/DataTypes/Serializations/SerializationInfo.cpp +++ b/src/DataTypes/Serializations/SerializationInfo.cpp @@ -158,6 +158,19 @@ void SerializationInfoByName::add(const SerializationInfoByName & other) } } +void SerializationInfoByName::replaceData(const SerializationInfoByName & other) +{ + for (const auto & [name, new_info] : other) + { + auto & old_info = (*this)[name]; + + if (old_info) + old_info->replaceData(*new_info); + else + old_info = new_info->clone(); + } +} + void SerializationInfoByName::writeJSON(WriteBuffer & out) const { Poco::JSON::Object object; diff --git a/src/DataTypes/Serializations/SerializationInfo.h b/src/DataTypes/Serializations/SerializationInfo.h index f7af5d77217..d83fc16f2f6 100644 --- a/src/DataTypes/Serializations/SerializationInfo.h +++ b/src/DataTypes/Serializations/SerializationInfo.h @@ -89,6 +89,11 @@ public: void add(const Block & block); void add(const SerializationInfoByName & other); + /// Takes data from @other, but keeps current serialization kinds. + /// If column exists in @other infos, but not in current infos, + /// it's cloned to current infos. + void replaceData(const SerializationInfoByName & other); + void writeJSON(WriteBuffer & out) const; void readJSON(ReadBuffer & in); }; diff --git a/src/DataTypes/getLeastSupertype.cpp b/src/DataTypes/getLeastSupertype.cpp index f8d10535be2..22f6a077504 100644 --- a/src/DataTypes/getLeastSupertype.cpp +++ b/src/DataTypes/getLeastSupertype.cpp @@ -530,4 +530,16 @@ DataTypePtr getLeastSupertype(const DataTypes & types) throw Exception(getExceptionMessagePrefix(types), ErrorCodes::NO_COMMON_TYPE); } +DataTypePtr tryGetLeastSupertype(const DataTypes & types) +{ + try + { + return getLeastSupertype(types); + } + catch (...) + { + return nullptr; + } +} + } diff --git a/src/DataTypes/getLeastSupertype.h b/src/DataTypes/getLeastSupertype.h index 57e011a0529..c35ec7d722c 100644 --- a/src/DataTypes/getLeastSupertype.h +++ b/src/DataTypes/getLeastSupertype.h @@ -14,4 +14,7 @@ namespace DB */ DataTypePtr getLeastSupertype(const DataTypes & types); +/// Same as above but return nullptr instead of throwing exception. +DataTypePtr tryGetLeastSupertype(const DataTypes & types); + } diff --git a/src/Dictionaries/CMakeLists.txt b/src/Dictionaries/CMakeLists.txt index b1b3d6d55e0..19e82c45cc2 100644 --- a/src/Dictionaries/CMakeLists.txt +++ b/src/Dictionaries/CMakeLists.txt @@ -33,14 +33,11 @@ target_link_libraries(clickhouse_dictionaries string_utils ) -target_link_libraries(clickhouse_dictionaries - PUBLIC - abseil_swiss_tables) +target_link_libraries(clickhouse_dictionaries PUBLIC ch_contrib::abseil_swiss_tables) -if(USE_CASSANDRA) - target_include_directories(clickhouse_dictionaries SYSTEM PRIVATE ${CASSANDRA_INCLUDE_DIR}) +if (TARGET ch_contrib::cassandra) + target_link_libraries(clickhouse_dictionaries PRIVATE ch_contrib::cassandra) endif() add_subdirectory(Embedded) - -target_include_directories(clickhouse_dictionaries SYSTEM PRIVATE ${SPARSEHASH_INCLUDE_DIR}) +target_link_libraries(clickhouse_dictionaries PRIVATE ch_contrib::sparsehash) diff --git a/src/Dictionaries/CacheDictionary.cpp b/src/Dictionaries/CacheDictionary.cpp index c21ea763ac3..cad3e3b8799 100644 --- a/src/Dictionaries/CacheDictionary.cpp +++ b/src/Dictionaries/CacheDictionary.cpp @@ -271,7 +271,6 @@ ColumnUInt8::Ptr CacheDictionary::hasKeys(const Columns & k if (dictionary_key_type == DictionaryKeyType::Complex) dict_struct.validateKeyTypes(key_types); - DictionaryKeysArenaHolder arena_holder; DictionaryKeysExtractor extractor(key_columns, arena_holder.getComplexKeyArena()); const auto keys = extractor.extractAllKeys(); @@ -494,7 +493,8 @@ Pipe CacheDictionary::read(const Names & column_names, size if constexpr (dictionary_key_type == DictionaryKeyType::Simple) { auto keys = cache_storage_ptr->getCachedSimpleKeys(); - key_columns = {ColumnWithTypeAndName(getColumnFromPODArray(keys), std::make_shared(), dict_struct.id->name)}; + auto keys_column = getColumnFromPODArray(std::move(keys)); + key_columns = {ColumnWithTypeAndName(std::move(keys_column), std::make_shared(), dict_struct.id->name)}; } else { diff --git a/src/Dictionaries/CacheDictionaryStorage.h b/src/Dictionaries/CacheDictionaryStorage.h index 5fd1bd420c6..d6d04075a3d 100644 --- a/src/Dictionaries/CacheDictionaryStorage.h +++ b/src/Dictionaries/CacheDictionaryStorage.h @@ -553,6 +553,7 @@ private: ContainerType, ContainerType, ContainerType, + ContainerType, ContainerType, ContainerType, ContainerType, diff --git a/src/Dictionaries/DictionaryHelpers.h b/src/Dictionaries/DictionaryHelpers.h index 1e6a4a5fb44..5c2b6b27afd 100644 --- a/src/Dictionaries/DictionaryHelpers.h +++ b/src/Dictionaries/DictionaryHelpers.h @@ -682,6 +682,15 @@ static ColumnPtr getColumnFromPODArray(const PaddedPODArray & array) return column_vector; } +template +static ColumnPtr getColumnFromPODArray(PaddedPODArray && array) +{ + auto column_vector = ColumnVector::create(); + column_vector->getData() = std::move(array); + + return column_vector; +} + template static ColumnPtr getColumnFromPODArray(const PaddedPODArray & array, size_t start, size_t length) { diff --git a/src/Dictionaries/DictionaryStructure.cpp b/src/Dictionaries/DictionaryStructure.cpp index aca566c9258..2a0e1279050 100644 --- a/src/Dictionaries/DictionaryStructure.cpp +++ b/src/Dictionaries/DictionaryStructure.cpp @@ -1,18 +1,21 @@ -#include "DictionaryStructure.h" +#include + +#include +#include +#include + +#include +#include + +#include + +#include #include #include #include #include #include #include -#include -#include -#include -#include - -#include -#include -#include namespace DB @@ -45,8 +48,8 @@ std::optional tryGetAttributeUnderlyingType(TypeIndex i switch (index) /// Special cases which do not map TypeIndex::T -> AttributeUnderlyingType::T { case TypeIndex::Date: return AttributeUnderlyingType::UInt16; + case TypeIndex::Date32: return AttributeUnderlyingType::Int32; case TypeIndex::DateTime: return AttributeUnderlyingType::UInt32; - case TypeIndex::DateTime64: return AttributeUnderlyingType::UInt64; default: break; } diff --git a/src/Dictionaries/DictionaryStructure.h b/src/Dictionaries/DictionaryStructure.h index 3b5164f7f48..9014b09b072 100644 --- a/src/Dictionaries/DictionaryStructure.h +++ b/src/Dictionaries/DictionaryStructure.h @@ -7,12 +7,14 @@ #include +#include + #include +#include #include #include #include -#include -#include + #if defined(__GNUC__) /// GCC mistakenly warns about the names in enum class. @@ -26,7 +28,7 @@ using TypeIndexUnderlying = magic_enum::underlying_type_t; // We need to be able to map TypeIndex -> AttributeUnderlyingType and AttributeUnderlyingType -> real type // The first can be done by defining AttributeUnderlyingType enum values to TypeIndex values and then performing // a enum_cast. -// The second can be achieved by using ReverseTypeId +// The second can be achieved by using TypeIndexToType #define map_item(__T) __T = static_cast(TypeIndex::__T) enum class AttributeUnderlyingType : TypeIndexUnderlying @@ -35,6 +37,7 @@ enum class AttributeUnderlyingType : TypeIndexUnderlying map_item(UInt8), map_item(UInt16), map_item(UInt32), map_item(UInt64), map_item(UInt128), map_item(UInt256), map_item(Float32), map_item(Float64), map_item(Decimal32), map_item(Decimal64), map_item(Decimal128), map_item(Decimal256), + map_item(DateTime64), map_item(UUID), map_item(String), map_item(Array) }; @@ -73,7 +76,7 @@ template struct DictionaryAttributeType { /// Converts @c type to it underlying type e.g. AttributeUnderlyingType::UInt8 -> UInt8 - using AttributeType = ReverseTypeId< + using AttributeType = TypeIndexToType< static_cast( static_cast(type))>; }; diff --git a/src/Dictionaries/Embedded/CMakeLists.txt b/src/Dictionaries/Embedded/CMakeLists.txt index 20c7b3c832a..236111bc801 100644 --- a/src/Dictionaries/Embedded/CMakeLists.txt +++ b/src/Dictionaries/Embedded/CMakeLists.txt @@ -2,4 +2,7 @@ include("${ClickHouse_SOURCE_DIR}/cmake/dbms_glob_sources.cmake") add_headers_and_sources(clickhouse_dictionaries_embedded .) add_headers_and_sources(clickhouse_dictionaries_embedded GeodataProviders) add_library(clickhouse_dictionaries_embedded ${clickhouse_dictionaries_embedded_sources}) -target_link_libraries(clickhouse_dictionaries_embedded PRIVATE clickhouse_common_io ${MYSQLXX_LIBRARY}) +target_link_libraries(clickhouse_dictionaries_embedded PRIVATE clickhouse_common_io) +if (TARGET ch::mysqlxx) + target_link_libraries(clickhouse_dictionaries_embedded PRIVATE ch::mysqlxx) +endif() diff --git a/src/Dictionaries/FlatDictionary.cpp b/src/Dictionaries/FlatDictionary.cpp index 5d26ad3ebc2..40cc735557c 100644 --- a/src/Dictionaries/FlatDictionary.cpp +++ b/src/Dictionaries/FlatDictionary.cpp @@ -291,30 +291,52 @@ void FlatDictionary::blockToAttributes(const Block & block) DictionaryKeysArenaHolder arena_holder; DictionaryKeysExtractor keys_extractor({ keys_column }, arena_holder.getComplexKeyArena()); - auto keys = keys_extractor.extractAllKeys(); + size_t keys_size = keys_extractor.getKeysSize(); - HashSet already_processed_keys; + static constexpr size_t key_offset = 1; - size_t key_offset = 1; - for (size_t attribute_index = 0; attribute_index < attributes.size(); ++attribute_index) + size_t attributes_size = attributes.size(); + + if (unlikely(attributes_size == 0)) + { + for (size_t i = 0; i < keys_size; ++i) + { + auto key = keys_extractor.extractCurrentKey(); + + if (unlikely(key >= configuration.max_array_size)) + throw Exception(ErrorCodes::ARGUMENT_OUT_OF_BOUND, + "{}: identifier should be less than {}", + getFullName(), + toString(configuration.max_array_size)); + + if (key >= loaded_keys.size()) + { + const size_t elements_count = key + 1; + loaded_keys.resize(elements_count, false); + } + + loaded_keys[key] = true; + + keys_extractor.rollbackCurrentKey(); + } + + return; + } + + for (size_t attribute_index = 0; attribute_index < attributes_size; ++attribute_index) { const IColumn & attribute_column = *block.safeGetByPosition(attribute_index + key_offset).column; Attribute & attribute = attributes[attribute_index]; - for (size_t i = 0; i < keys.size(); ++i) + for (size_t i = 0; i < keys_size; ++i) { - auto key = keys[i]; - - if (already_processed_keys.find(key) != nullptr) - continue; - - already_processed_keys.insert(key); + auto key = keys_extractor.extractCurrentKey(); setAttributeValue(attribute, key, attribute_column[i]); - ++element_count; + keys_extractor.rollbackCurrentKey(); } - already_processed_keys.clear(); + keys_extractor.reset(); } } @@ -369,6 +391,12 @@ void FlatDictionary::loadData() else updateData(); + element_count = 0; + + size_t loaded_keys_size = loaded_keys.size(); + for (size_t i = 0; i < loaded_keys_size; ++i) + element_count += loaded_keys[i]; + if (configuration.require_nonempty && 0 == element_count) throw Exception(ErrorCodes::DICTIONARY_IS_EMPTY, "{}: dictionary source is empty and 'require_nonempty' property is set.", getFullName()); } @@ -495,21 +523,6 @@ void FlatDictionary::resize(Attribute & attribute, UInt64 key) } } -template -void FlatDictionary::setAttributeValueImpl(Attribute & attribute, UInt64 key, const T & value) -{ - auto & array = std::get>(attribute.container); - array[key] = value; - loaded_keys[key] = true; -} - -template <> -void FlatDictionary::setAttributeValueImpl(Attribute & attribute, UInt64 key, const String & value) -{ - auto arena_value = copyStringInArena(string_arena, value); - setAttributeValueImpl(attribute, key, arena_value); -} - void FlatDictionary::setAttributeValue(Attribute & attribute, const UInt64 key, const Field & value) { auto type_call = [&](const auto & dictionary_attribute_type) @@ -520,17 +533,27 @@ void FlatDictionary::setAttributeValue(Attribute & attribute, const UInt64 key, resize(attribute, key); - if (attribute.is_nullable_set) + if (attribute.is_nullable_set && value.isNull()) { - if (value.isNull()) - { - attribute.is_nullable_set->insert(key); - loaded_keys[key] = true; - return; - } + attribute.is_nullable_set->insert(key); + loaded_keys[key] = true; + return; } - setAttributeValueImpl(attribute, key, value.get()); + auto & attribute_value = value.get(); + + auto & container = std::get>(attribute.container); + loaded_keys[key] = true; + + if constexpr (std::is_same_v) + { + auto arena_value = copyStringInArena(string_arena, attribute_value); + container[key] = arena_value; + } + else + { + container[key] = attribute_value; + } }; callOnDictionaryAttributeType(attribute.type, type_call); @@ -547,7 +570,8 @@ Pipe FlatDictionary::read(const Names & column_names, size_t max_block_size, siz if (loaded_keys[key_index]) keys.push_back(key_index); - ColumnsWithTypeAndName key_columns = {ColumnWithTypeAndName(getColumnFromPODArray(keys), std::make_shared(), dict_struct.id->name)}; + auto keys_column = getColumnFromPODArray(std::move(keys)); + ColumnsWithTypeAndName key_columns = {ColumnWithTypeAndName(std::move(keys_column), std::make_shared(), dict_struct.id->name)}; std::shared_ptr dictionary = shared_from_this(); auto coordinator = DictionarySourceCoordinator::create(dictionary, column_names, std::move(key_columns), max_block_size); diff --git a/src/Dictionaries/FlatDictionary.h b/src/Dictionaries/FlatDictionary.h index e8f40ea1d66..2578fef3ecb 100644 --- a/src/Dictionaries/FlatDictionary.h +++ b/src/Dictionaries/FlatDictionary.h @@ -127,6 +127,7 @@ private: ContainerType, ContainerType, ContainerType, + ContainerType, ContainerType, ContainerType, ContainerType, @@ -154,9 +155,6 @@ private: template void resize(Attribute & attribute, UInt64 key); - template - void setAttributeValueImpl(Attribute & attribute, UInt64 key, const T & value); - void setAttributeValue(Attribute & attribute, UInt64 key, const Field & value); const DictionaryStructure dict_struct; diff --git a/src/Dictionaries/HashedArrayDictionary.cpp b/src/Dictionaries/HashedArrayDictionary.cpp index 148aaafb160..e35340c7618 100644 --- a/src/Dictionaries/HashedArrayDictionary.cpp +++ b/src/Dictionaries/HashedArrayDictionary.cpp @@ -158,12 +158,6 @@ ColumnUInt8::Ptr HashedArrayDictionary::hasKeys(const Colum auto result = ColumnUInt8::create(keys_size, false); auto & out = result->getData(); - if (attributes.empty()) - { - query_count.fetch_add(keys_size, std::memory_order_relaxed); - return result; - } - size_t keys_found = 0; for (size_t requested_key_index = 0; requested_key_index < keys_size; ++requested_key_index) @@ -753,9 +747,14 @@ Pipe HashedArrayDictionary::read(const Names & column_names ColumnsWithTypeAndName key_columns; if constexpr (dictionary_key_type == DictionaryKeyType::Simple) - key_columns = {ColumnWithTypeAndName(getColumnFromPODArray(keys), std::make_shared(), dict_struct.id->name)}; + { + auto keys_column = getColumnFromPODArray(std::move(keys)); + key_columns = {ColumnWithTypeAndName(std::move(keys_column), std::make_shared(), dict_struct.id->name)}; + } else + { key_columns = deserializeColumnsWithTypeAndNameFromKeys(dict_struct, keys, 0, keys.size()); + } std::shared_ptr dictionary = shared_from_this(); auto coordinator = DictionarySourceCoordinator::create(dictionary, column_names, std::move(key_columns), max_block_size); diff --git a/src/Dictionaries/HashedArrayDictionary.h b/src/Dictionaries/HashedArrayDictionary.h index 80436a3d044..a649fddcc39 100644 --- a/src/Dictionaries/HashedArrayDictionary.h +++ b/src/Dictionaries/HashedArrayDictionary.h @@ -147,6 +147,7 @@ private: AttributeContainerType, AttributeContainerType, AttributeContainerType, + AttributeContainerType, AttributeContainerType, AttributeContainerType, AttributeContainerType, diff --git a/src/Dictionaries/HashedDictionary.cpp b/src/Dictionaries/HashedDictionary.cpp index 7025c771e8f..c83735a6330 100644 --- a/src/Dictionaries/HashedDictionary.cpp +++ b/src/Dictionaries/HashedDictionary.cpp @@ -177,15 +177,25 @@ ColumnUInt8::Ptr HashedDictionary::hasKeys(const Co auto result = ColumnUInt8::create(keys_size, false); auto & out = result->getData(); - if (attributes.empty()) + size_t keys_found = 0; + + if (unlikely(attributes.empty())) { + for (size_t requested_key_index = 0; requested_key_index < keys_size; ++requested_key_index) + { + auto requested_key = extractor.extractCurrentKey(); + out[requested_key_index] = no_attributes_container.find(requested_key) != no_attributes_container.end(); + keys_found += out[requested_key_index]; + extractor.rollbackCurrentKey(); + } + query_count.fetch_add(keys_size, std::memory_order_relaxed); + found_count.fetch_add(keys_found, std::memory_order_relaxed); return result; } const auto & attribute = attributes.front(); bool is_attribute_nullable = attribute.is_nullable_set.has_value(); - size_t keys_found = 0; getAttributeContainer(0, [&](const auto & container) { @@ -423,7 +433,25 @@ void HashedDictionary::blockToAttributes(const Bloc Field column_value_to_insert; - for (size_t attribute_index = 0; attribute_index < attributes.size(); ++attribute_index) + size_t attributes_size = attributes.size(); + + if (unlikely(attributes_size == 0)) + { + for (size_t key_index = 0; key_index < keys_size; ++key_index) + { + auto key = keys_extractor.extractCurrentKey(); + + if constexpr (std::is_same_v) + key = copyStringInArena(string_arena, key); + + no_attributes_container.insert(key); + keys_extractor.rollbackCurrentKey(); + } + + return; + } + + for (size_t attribute_index = 0; attribute_index < attributes_size; ++attribute_index) { const IColumn & attribute_column = *block.safeGetByPosition(skip_keys_size_offset + attribute_index).column; auto & attribute = attributes[attribute_index]; @@ -487,7 +515,21 @@ void HashedDictionary::resize(size_t added_rows) if (unlikely(!added_rows)) return; - for (size_t attribute_index = 0; attribute_index < attributes.size(); ++attribute_index) + size_t attributes_size = attributes.size(); + + if (unlikely(attributes_size == 0)) + { + size_t reserve_size = added_rows + no_attributes_container.size(); + + if constexpr (sparse) + no_attributes_container.resize(reserve_size); + else + no_attributes_container.reserve(reserve_size); + + return; + } + + for (size_t attribute_index = 0; attribute_index < attributes_size; ++attribute_index) { getAttributeContainer(attribute_index, [added_rows](auto & attribute_map) { @@ -570,7 +612,9 @@ void HashedDictionary::loadData() } } else + { resize(block.rows()); + } blockToAttributes(block); } @@ -589,9 +633,10 @@ void HashedDictionary::loadData() template void HashedDictionary::calculateBytesAllocated() { - bytes_allocated += attributes.size() * sizeof(attributes.front()); + size_t attributes_size = attributes.size(); + bytes_allocated += attributes_size * sizeof(attributes.front()); - for (size_t i = 0; i < attributes.size(); ++i) + for (size_t i = 0; i < attributes_size; ++i) { getAttributeContainer(i, [&](const auto & container) { @@ -622,6 +667,22 @@ void HashedDictionary::calculateBytesAllocated() bytes_allocated = attributes[i].is_nullable_set->getBufferSizeInBytes(); } + if (unlikely(attributes_size == 0)) + { + bytes_allocated += sizeof(no_attributes_container); + + if constexpr (sparse) + { + bytes_allocated += no_attributes_container.size() * (sizeof(KeyType)); + bucket_count = no_attributes_container.bucket_count(); + } + else + { + bytes_allocated += no_attributes_container.getBufferSizeInBytes(); + bucket_count = no_attributes_container.getBufferSizeInCells(); + } + } + bytes_allocated += string_arena.size(); if (update_field_loaded_block) @@ -657,13 +718,30 @@ Pipe HashedDictionary::read(const Names & column_na } }); } + else + { + keys.reserve(no_attributes_container.size()); + + for (const auto & key : no_attributes_container) + { + if constexpr (sparse) + keys.emplace_back(key); + else + keys.emplace_back(key.getKey()); + } + } ColumnsWithTypeAndName key_columns; if constexpr (dictionary_key_type == DictionaryKeyType::Simple) - key_columns = {ColumnWithTypeAndName(getColumnFromPODArray(keys), std::make_shared(), dict_struct.id->name)}; + { + auto keys_column = getColumnFromPODArray(std::move(keys)); + key_columns = {ColumnWithTypeAndName(std::move(keys_column), std::make_shared(), dict_struct.id->name)}; + } else + { key_columns = deserializeColumnsWithTypeAndNameFromKeys(dict_struct, keys, 0, keys.size()); + } std::shared_ptr dictionary = shared_from_this(); auto coordinator = DictionarySourceCoordinator::create(dictionary, column_names, std::move(key_columns), max_block_size); diff --git a/src/Dictionaries/HashedDictionary.h b/src/Dictionaries/HashedDictionary.h index c1761944b14..1ef1c58b67c 100644 --- a/src/Dictionaries/HashedDictionary.h +++ b/src/Dictionaries/HashedDictionary.h @@ -5,6 +5,7 @@ #include #include #include +#include #include #include @@ -120,9 +121,14 @@ private: template using CollectionTypeNonSparse = std::conditional_t< dictionary_key_type == DictionaryKeyType::Simple, - HashMap, + HashMap>, HashMapWithSavedHash>>; + using NoAttributesCollectionTypeNonSparse = std::conditional_t< + dictionary_key_type == DictionaryKeyType::Simple, + HashSet>, + HashSetWithSavedHash>>; + /// Here we use sparse_hash_map with DefaultHash<> for the following reasons: /// /// - DefaultHash<> is used for HashMap @@ -140,9 +146,13 @@ private: google::sparse_hash_map>, google::sparse_hash_map>>; + using NoAttributesCollectionTypeSparse = google::sparse_hash_set>; + template using CollectionType = std::conditional_t, CollectionTypeNonSparse>; + using NoAttributesCollectionType = std::conditional_t; + using NullableSet = HashSet>; struct Attribute final @@ -167,6 +177,7 @@ private: CollectionType, CollectionType, CollectionType, + CollectionType, CollectionType, CollectionType, CollectionType, @@ -214,6 +225,7 @@ private: BlockPtr update_field_loaded_block; Arena string_arena; + NoAttributesCollectionType no_attributes_container; }; extern template class HashedDictionary; diff --git a/src/Dictionaries/IPAddressDictionary.h b/src/Dictionaries/IPAddressDictionary.h index 33a9989a9e5..8dddc988caa 100644 --- a/src/Dictionaries/IPAddressDictionary.h +++ b/src/Dictionaries/IPAddressDictionary.h @@ -114,6 +114,7 @@ private: Decimal64, Decimal128, Decimal256, + DateTime64, Float32, Float64, UUID, @@ -137,6 +138,7 @@ private: ContainerType, ContainerType, ContainerType, + ContainerType, ContainerType, ContainerType, ContainerType, diff --git a/src/Dictionaries/RangeHashedDictionary.cpp b/src/Dictionaries/RangeHashedDictionary.cpp index 0d862573b65..979cfce6ce2 100644 --- a/src/Dictionaries/RangeHashedDictionary.cpp +++ b/src/Dictionaries/RangeHashedDictionary.cpp @@ -22,6 +22,11 @@ using RangeStorageType = DB::RangeStorageType; const RangeStorageType RANGE_MIN_NULL_VALUE = std::numeric_limits::max(); const RangeStorageType RANGE_MAX_NULL_VALUE = std::numeric_limits::lowest(); +bool isCorrectDate(const RangeStorageType & date) +{ + return 0 < date && date <= DATE_LUT_MAX_DAY_NUM; +} + // Handle both kinds of null values: explicit nulls of NullableColumn and 'implicit' nulls of Date type. RangeStorageType getColumnIntValueOrDefault(const DB::IColumn & column, size_t index, bool isDate, const RangeStorageType & default_value) { @@ -29,7 +34,7 @@ RangeStorageType getColumnIntValueOrDefault(const DB::IColumn & column, size_t i return default_value; const RangeStorageType result = static_cast(column.getInt(index)); - if (isDate && !DB::Range::isCorrectDate(result)) + if (isDate && !isCorrectDate(result)) return default_value; return result; @@ -57,20 +62,6 @@ namespace ErrorCodes extern const int UNSUPPORTED_METHOD; } -bool Range::isCorrectDate(const RangeStorageType & date) -{ - return 0 < date && date <= DATE_LUT_MAX_DAY_NUM; -} - -bool Range::contains(const RangeStorageType & value) const -{ - return left <= value && value <= right; -} - -static bool operator<(const Range & left, const Range & right) -{ - return std::tie(left.left, left.right) < std::tie(right.left, right.right); -} template RangeHashedDictionary::RangeHashedDictionary( @@ -260,16 +251,8 @@ ColumnUInt8::Ptr RangeHashedDictionary::hasKeys(const Colum if (it) { const auto date = dates[key_index]; - const auto & ranges_and_values = it->getMapped(); - const auto val_it = std::find_if( - std::begin(ranges_and_values), - std::end(ranges_and_values), - [date](const Value & v) - { - return v.range.contains(date); - }); - - out[key_index] = val_it != std::end(ranges_and_values); + const auto & interval_tree = it->getMapped(); + out[key_index] = interval_tree.has(date); keys_found += out[key_index]; } else @@ -324,6 +307,8 @@ void RangeHashedDictionary::loadData() updateData(); } + buildAttributeIntervalTrees(); + if (require_nonempty && 0 == element_count) throw Exception(ErrorCodes::DICTIONARY_IS_EMPTY, "{}: dictionary source is empty and 'require_nonempty' property is set."); @@ -407,30 +392,40 @@ void RangeHashedDictionary::getItemsImpl( if (it) { const auto date = dates[key_index]; - const auto & ranges_and_values = it->getMapped(); - const auto val_it = std::find_if( - std::begin(ranges_and_values), - std::end(ranges_and_values), - [date](const Value & v) - { - return v.range.contains(date); - }); + const auto & interval_tree = it->getMapped(); - if (val_it != std::end(ranges_and_values)) + std::optional min_value; + std::optional min_range; + bool has_interval = false; + + interval_tree.find(date, [&](auto & interval, auto & value) + { + has_interval = true; + + if (min_range && interval < *min_range) + min_range = interval; + else + min_range = interval; + + min_value = value; + + return true; + }); + + if (has_interval) { ++keys_found; - auto & value = val_it->value; if constexpr (is_nullable) { - if (value.has_value()) - set_value(key_index, *value, false); + if (min_value.has_value()) + set_value(key_index, *min_value, false); else set_value(key_index, default_value_extractor[key_index], true); } else { - set_value(key_index, *value, false); + set_value(key_index, *min_value, false); } keys_extractor.rollbackCurrentKey(); @@ -542,7 +537,9 @@ void RangeHashedDictionary::blockToAttributes(const Block & if constexpr (std::is_same_v) key = copyStringInArena(string_arena, key); - setAttributeValue(attribute, key, Range{lower_bound, upper_bound}, attribute_column[key_index]); + if (likely(lower_bound <= upper_bound)) + setAttributeValue(attribute, key, RangeInterval{lower_bound, upper_bound}, attribute_column[key_index]); + keys_extractor.rollbackCurrentKey(); } @@ -550,18 +547,38 @@ void RangeHashedDictionary::blockToAttributes(const Block & } } +template +void RangeHashedDictionary::buildAttributeIntervalTrees() +{ + for (auto & attribute : attributes) + { + auto type_call = [&](const auto & dictionary_attribute_type) + { + using Type = std::decay_t; + using AttributeType = typename Type::AttributeType; + using ValueType = DictionaryValueType; + + auto & collection = std::get>(attribute.maps); + for (auto & [_, ranges] : collection) + ranges.build(); + }; + + callOnDictionaryAttributeType(attribute.type, type_call); + } +} + template template -void RangeHashedDictionary::setAttributeValueImpl(Attribute & attribute, KeyType key, const Range & range, const Field & value) +void RangeHashedDictionary::setAttributeValueImpl(Attribute & attribute, KeyType key, const RangeInterval & interval, const Field & value) { using ValueType = std::conditional_t, StringRef, T>; auto & collection = std::get>(attribute.maps); - Value value_to_insert; + std::optional value_to_insert; if (attribute.is_nullable && value.isNull()) { - value_to_insert = { range, {} }; + value_to_insert = std::nullopt; } else { @@ -569,11 +586,11 @@ void RangeHashedDictionary::setAttributeValueImpl(Attribute { const auto & string = value.get(); StringRef string_ref = copyStringInArena(string_arena, string); - value_to_insert = Value{ range, { string_ref }}; + value_to_insert = { string_ref }; } else { - value_to_insert = Value{ range, { value.get() }}; + value_to_insert = { value.get() }; } } @@ -582,33 +599,25 @@ void RangeHashedDictionary::setAttributeValueImpl(Attribute if (it) { auto & values = it->getMapped(); - - const auto insert_it = std::lower_bound( - std::begin(values), - std::end(values), - range, - [](const Value & lhs, const Range & rhs_range) - { - return lhs.range < rhs_range; - }); - - values.insert(insert_it, std::move(value_to_insert)); + values.emplace(interval, std::move(value_to_insert)); } else { - collection.insert({key, Values{std::move(value_to_insert)}}); + Values values; + values.emplace(interval, value_to_insert); + collection.insert({key, std::move(values)}); } } template -void RangeHashedDictionary::setAttributeValue(Attribute & attribute, KeyType key, const Range & range, const Field & value) +void RangeHashedDictionary::setAttributeValue(Attribute & attribute, KeyType key, const RangeInterval & interval, const Field & value) { auto type_call = [&](const auto &dictionary_attribute_type) { using Type = std::decay_t; using AttributeType = typename Type::AttributeType; - setAttributeValueImpl(attribute, key, range, value); + setAttributeValueImpl(attribute, key, interval, value); }; callOnDictionaryAttributeType(attribute.type, type_call); @@ -650,14 +659,15 @@ void RangeHashedDictionary::getKeysAndDates( end_dates.reserve(collection.size()); const bool is_date = isDate(dict_struct.range_min->type); + (void)(is_date); for (const auto & key : collection) { - for (const auto & value : key.getMapped()) + for (const auto & [interval, _] : key.getMapped()) { keys.push_back(key.getKey()); - start_dates.push_back(value.range.left); - end_dates.push_back(value.range.right); + start_dates.push_back(interval.left); + end_dates.push_back(interval.right); if constexpr (std::numeric_limits::max() > DATE_LUT_MAX_DAY_NUM) /// Avoid warning about tautological comparison in next line. if (is_date && static_cast(end_dates.back()) > DATE_LUT_MAX_DAY_NUM) @@ -676,7 +686,7 @@ PaddedPODArray RangeHashedDictionary::makeDateKeys( for (size_t i = 0; i < keys.size(); ++i) { - if (Range::isCorrectDate(block_start_dates[i])) + if (isCorrectDate(block_start_dates[i])) keys[i] = block_start_dates[i]; // NOLINT else keys[i] = block_end_dates[i]; // NOLINT @@ -707,19 +717,28 @@ Pipe RangeHashedDictionary::read(const Names & column_names using RangeType = typename LeftDataType::FieldType; PaddedPODArray keys; - PaddedPODArray start_dates; - PaddedPODArray end_dates; - getKeysAndDates(keys, start_dates, end_dates); + PaddedPODArray range_start; + PaddedPODArray range_end; + getKeysAndDates(keys, range_start, range_end); - range_min_column = ColumnWithTypeAndName{getColumnFromPODArray(start_dates), dict_struct.range_min->type, dict_struct.range_min->name}; - range_max_column = ColumnWithTypeAndName{getColumnFromPODArray(end_dates), dict_struct.range_max->type, dict_struct.range_max->name}; + auto date_column = getColumnFromPODArray(makeDateKeys(range_start, range_end)); + + auto range_start_column = getColumnFromPODArray(std::move(range_start)); + range_min_column = ColumnWithTypeAndName{std::move(range_start_column), dict_struct.range_min->type, dict_struct.range_min->name}; + + auto range_end_column = getColumnFromPODArray(std::move(range_end)); + range_max_column = ColumnWithTypeAndName{std::move(range_end_column), dict_struct.range_max->type, dict_struct.range_max->name}; if constexpr (dictionary_key_type == DictionaryKeyType::Simple) - key_columns = {ColumnWithTypeAndName(getColumnFromPODArray(keys), std::make_shared(), dict_struct.id->name)}; + { + auto keys_column = getColumnFromPODArray(std::move(keys)); + key_columns = {ColumnWithTypeAndName(std::move(keys_column), std::make_shared(), dict_struct.id->name)}; + } else + { key_columns = deserializeColumnsWithTypeAndNameFromKeys(dict_struct, keys, 0, keys.size()); + } - auto date_column = getColumnFromPODArray(makeDateKeys(start_dates, end_dates)); key_columns.emplace_back(ColumnWithTypeAndName{std::move(date_column), std::make_shared(), ""}); return true; @@ -761,6 +780,9 @@ void registerDictionaryRangeHashed(DictionaryFactory & factory) "{}: dictionary of layout 'range_hashed' requires .structure.range_min and .structure.range_max", full_name); + if (dict_struct.attributes.empty()) + throw Exception(ErrorCodes::UNSUPPORTED_METHOD, "Empty attributes are not supported for dictionary of layout 'range_hashed'"); + const auto dict_id = StorageID::fromDictionaryConfig(config, config_prefix); const DictionaryLifetime dict_lifetime{config, config_prefix + ".lifetime"}; const bool require_nonempty = config.getBool(config_prefix + ".require_nonempty", false); @@ -784,6 +806,9 @@ void registerDictionaryRangeHashed(DictionaryFactory & factory) "{}: dictionary of layout 'complex_key_range_hashed' requires .structure.range_min and .structure.range_max", full_name); + if (dict_struct.attributes.empty()) + throw Exception(ErrorCodes::UNSUPPORTED_METHOD, "Empty attributes are not supported for dictionary of layout 'complex_key_range_hashed'"); + const auto dict_id = StorageID::fromDictionaryConfig(config, config_prefix); const DictionaryLifetime dict_lifetime{config, config_prefix + ".lifetime"}; const bool require_nonempty = config.getBool(config_prefix + ".require_nonempty", false); diff --git a/src/Dictionaries/RangeHashedDictionary.h b/src/Dictionaries/RangeHashedDictionary.h index a9b41a4c4d0..91c714d27e6 100644 --- a/src/Dictionaries/RangeHashedDictionary.h +++ b/src/Dictionaries/RangeHashedDictionary.h @@ -8,26 +8,19 @@ #include #include #include -#include +#include + #include #include #include #include + namespace DB { using RangeStorageType = Int64; -struct Range -{ - RangeStorageType left; - RangeStorageType right; - - static bool isCorrectDate(const RangeStorageType & date); - bool contains(const RangeStorageType & value) const; -}; - template class RangeHashedDictionary final : public IDictionary { @@ -94,22 +87,23 @@ public: Pipe read(const Names & column_names, size_t max_block_size, size_t num_streams) const override; private: - template - struct Value final - { - Range range; - std::optional value; - }; + + using RangeInterval = Interval; template - using Values = std::vector>; + using Values = IntervalMap>; template using CollectionType = std::conditional_t< dictionary_key_type == DictionaryKeyType::Simple, - HashMap>, + HashMap, DefaultHash>, HashMapWithSavedHash, DefaultHash>>; + using NoAttributesCollectionType = std::conditional_t< + dictionary_key_type == DictionaryKeyType::Simple, + HashMap>, + HashMapWithSavedHash>>; + struct Attribute final { public: @@ -133,6 +127,7 @@ private: CollectionType, CollectionType, CollectionType, + CollectionType, CollectionType, CollectionType, CollectionType, @@ -160,10 +155,12 @@ private: void blockToAttributes(const Block & block); - template - void setAttributeValueImpl(Attribute & attribute, KeyType key, const Range & range, const Field & value); + void buildAttributeIntervalTrees(); - void setAttributeValue(Attribute & attribute, KeyType key, const Range & range, const Field & value); + template + void setAttributeValueImpl(Attribute & attribute, KeyType key, const RangeInterval & interval, const Field & value); + + void setAttributeValue(Attribute & attribute, KeyType key, const RangeInterval & interval, const Field & value); template void getKeysAndDates( @@ -198,6 +195,7 @@ private: mutable std::atomic query_count{0}; mutable std::atomic found_count{0}; Arena string_arena; + NoAttributesCollectionType no_attributes_container; }; } diff --git a/src/Dictionaries/RedisDictionarySource.cpp b/src/Dictionaries/RedisDictionarySource.cpp index 24a14d8cc80..a1b406b3424 100644 --- a/src/Dictionaries/RedisDictionarySource.cpp +++ b/src/Dictionaries/RedisDictionarySource.cpp @@ -3,26 +3,6 @@ #include "DictionaryStructure.h" #include "registerDictionaries.h" -namespace DB -{ - -void registerDictionarySourceRedis(DictionarySourceFactory & factory) -{ - auto create_table_source = [=](const DictionaryStructure & dict_struct, - const Poco::Util::AbstractConfiguration & config, - const String & config_prefix, - Block & sample_block, - ContextPtr /* global_context */, - const std::string & /* default_database */, - bool /* created_from_ddl */) -> DictionarySourcePtr { - return std::make_unique(dict_struct, config, config_prefix + ".redis", sample_block); - }; - factory.registerSource("redis", create_table_source); -} - -} - - #include #include #include @@ -33,7 +13,6 @@ void registerDictionarySourceRedis(DictionarySourceFactory & factory) #include "RedisSource.h" - namespace DB { namespace ErrorCodes @@ -42,34 +21,64 @@ namespace DB extern const int INVALID_CONFIG_PARAMETER; extern const int INTERNAL_REDIS_ERROR; extern const int LOGICAL_ERROR; + extern const int TIMEOUT_EXCEEDED; } + static RedisStorageType parseStorageType(const String & storage_type_str) + { + if (storage_type_str == "hash_map") + return RedisStorageType::HASH_MAP; + else if (!storage_type_str.empty() && storage_type_str != "simple") + throw Exception(ErrorCodes::INVALID_CONFIG_PARAMETER, "Unknown storage type {} for Redis dictionary", storage_type_str); - static const size_t max_block_size = 8192; + return RedisStorageType::SIMPLE; + } + + void registerDictionarySourceRedis(DictionarySourceFactory & factory) + { + auto create_table_source = [=](const DictionaryStructure & dict_struct, + const Poco::Util::AbstractConfiguration & config, + const String & config_prefix, + Block & sample_block, + ContextPtr /* global_context */, + const std::string & /* default_database */, + bool /* created_from_ddl */) -> DictionarySourcePtr { + + auto redis_config_prefix = config_prefix + ".redis"; + RedisDictionarySource::Configuration configuration = + { + .host = config.getString(redis_config_prefix + ".host"), + .port = static_cast(config.getUInt(redis_config_prefix + ".port")), + .db_index = config.getUInt(redis_config_prefix + ".db_index", 0), + .password = config.getString(redis_config_prefix + ".password", ""), + .storage_type = parseStorageType(config.getString(redis_config_prefix + ".storage_type", "")), + .pool_size = config.getUInt(redis_config_prefix + ".pool_size", 16), + }; + + return std::make_unique(dict_struct, configuration, sample_block); + }; + + factory.registerSource("redis", create_table_source); + } + + static constexpr size_t REDIS_MAX_BLOCK_SIZE = DEFAULT_BLOCK_SIZE; + static constexpr size_t REDIS_LOCK_ACQUIRE_TIMEOUT_MS = 5000; RedisDictionarySource::RedisDictionarySource( - const DictionaryStructure & dict_struct_, - const String & host_, - UInt16 port_, - UInt8 db_index_, - const String & password_, - RedisStorageType storage_type_, - const Block & sample_block_) - : dict_struct{dict_struct_} - , host{host_} - , port{port_} - , db_index{db_index_} - , password{password_} - , storage_type{storage_type_} - , sample_block{sample_block_} - , client{std::make_shared(host, port)} + const DictionaryStructure & dict_struct_, + const Configuration & configuration_, + const Block & sample_block_) + : dict_struct{dict_struct_} + , configuration(configuration_) + , pool(std::make_shared(configuration.pool_size)) + , sample_block{sample_block_} { if (dict_struct.attributes.size() != 1) throw Exception(ErrorCodes::INVALID_CONFIG_PARAMETER, "Invalid number of non key columns for Redis source: {}, expected 1", DB::toString(dict_struct.attributes.size())); - if (storage_type == RedisStorageType::HASH_MAP) + if (configuration.storage_type == RedisStorageType::HASH_MAP) { if (!dict_struct.key) throw Exception(ErrorCodes::INVALID_CONFIG_PARAMETER, @@ -87,61 +96,13 @@ namespace DB key.name, key.type->getName()); } - - if (!password.empty()) - { - RedisCommand command("AUTH"); - command << password; - String reply = client->execute(command); - if (reply != "OK") - throw Exception(ErrorCodes::INTERNAL_REDIS_ERROR, - "Authentication failed with reason {}", - reply); - } - - if (db_index != 0) - { - RedisCommand command("SELECT"); - command << std::to_string(db_index); - String reply = client->execute(command); - if (reply != "OK") - throw Exception(ErrorCodes::INTERNAL_REDIS_ERROR, - "Selecting database with index {} failed with reason {}", - DB::toString(db_index), - reply); - } } - - RedisDictionarySource::RedisDictionarySource( - const DictionaryStructure & dict_struct_, - const Poco::Util::AbstractConfiguration & config_, - const String & config_prefix_, - Block & sample_block_) - : RedisDictionarySource( - dict_struct_, - config_.getString(config_prefix_ + ".host"), - config_.getUInt(config_prefix_ + ".port"), - config_.getUInt(config_prefix_ + ".db_index", 0), - config_.getString(config_prefix_ + ".password",""), - parseStorageType(config_.getString(config_prefix_ + ".storage_type", "")), - sample_block_) - { - } - - RedisDictionarySource::RedisDictionarySource(const RedisDictionarySource & other) - : RedisDictionarySource{other.dict_struct, - other.host, - other.port, - other.db_index, - other.password, - other.storage_type, - other.sample_block} + : RedisDictionarySource(other.dict_struct, other.configuration, other.sample_block) { } - RedisDictionarySource::~RedisDictionarySource() = default; static String storageTypeToKeyType(RedisStorageType type) @@ -161,24 +122,25 @@ namespace DB Pipe RedisDictionarySource::loadAll() { - if (!client->isConnected()) - client->connect(host, port); + auto connection = getConnection(); RedisCommand command_for_keys("KEYS"); command_for_keys << "*"; /// Get only keys for specified storage type. - auto all_keys = client->execute(command_for_keys); + auto all_keys = connection->client->execute(command_for_keys); if (all_keys.isNull()) - return Pipe(std::make_shared(client, RedisArray{}, storage_type, sample_block, max_block_size)); + return Pipe(std::make_shared( + std::move(connection), RedisArray{}, + configuration.storage_type, sample_block, REDIS_MAX_BLOCK_SIZE)); RedisArray keys; - auto key_type = storageTypeToKeyType(storage_type); + auto key_type = storageTypeToKeyType(configuration.storage_type); for (const auto & key : all_keys) - if (key_type == client->execute(RedisCommand("TYPE").addRedisType(key))) + if (key_type == connection->client->execute(RedisCommand("TYPE").addRedisType(key))) keys.addRedisType(std::move(key)); - if (storage_type == RedisStorageType::HASH_MAP) + if (configuration.storage_type == RedisStorageType::HASH_MAP) { RedisArray hkeys; for (const auto & key : keys) @@ -186,7 +148,7 @@ namespace DB RedisCommand command_for_secondary_keys("HKEYS"); command_for_secondary_keys.addRedisType(key); - auto secondary_keys = client->execute(command_for_secondary_keys); + auto secondary_keys = connection->client->execute(command_for_secondary_keys); RedisArray primary_with_secondary; primary_with_secondary.addRedisType(key); @@ -194,7 +156,7 @@ namespace DB { primary_with_secondary.addRedisType(secondary_key); /// Do not store more than max_block_size values for one request. - if (primary_with_secondary.size() == max_block_size + 1) + if (primary_with_secondary.size() == REDIS_MAX_BLOCK_SIZE + 1) { hkeys.add(primary_with_secondary); primary_with_secondary.clear(); @@ -209,16 +171,16 @@ namespace DB keys = std::move(hkeys); } - return Pipe(std::make_shared(client, std::move(keys), storage_type, sample_block, max_block_size)); + return Pipe(std::make_shared( + std::move(connection), std::move(keys), + configuration.storage_type, sample_block, REDIS_MAX_BLOCK_SIZE)); } - Pipe RedisDictionarySource::loadIds(const std::vector & ids) { - if (!client->isConnected()) - client->connect(host, port); + auto connection = getConnection(); - if (storage_type == RedisStorageType::HASH_MAP) + if (configuration.storage_type == RedisStorageType::HASH_MAP) throw Exception(ErrorCodes::UNSUPPORTED_METHOD, "Cannot use loadIds with 'hash_map' storage type"); if (!dict_struct.id) @@ -229,13 +191,14 @@ namespace DB for (UInt64 id : ids) keys << DB::toString(id); - return Pipe(std::make_shared(client, std::move(keys), storage_type, sample_block, max_block_size)); + return Pipe(std::make_shared( + std::move(connection), std::move(keys), + configuration.storage_type, sample_block, REDIS_MAX_BLOCK_SIZE)); } Pipe RedisDictionarySource::loadKeys(const Columns & key_columns, const std::vector & requested_rows) { - if (!client->isConnected()) - client->connect(host, port); + auto connection = getConnection(); if (key_columns.size() != dict_struct.key->size()) throw Exception(ErrorCodes::LOGICAL_ERROR, "The size of key_columns does not equal to the size of dictionary key"); @@ -250,7 +213,7 @@ namespace DB if (isInteger(type)) key << DB::toString(key_columns[i]->get64(row)); else if (isString(type)) - key << get((*key_columns[i])[row]); + key << get((*key_columns[i])[row]); else throw Exception(ErrorCodes::LOGICAL_ERROR, "Unexpected type of key in Redis dictionary"); } @@ -258,22 +221,65 @@ namespace DB keys.add(key); } - return Pipe(std::make_shared(client, std::move(keys), storage_type, sample_block, max_block_size)); + return Pipe(std::make_shared( + std::move(connection), std::move(keys), + configuration.storage_type, sample_block, REDIS_MAX_BLOCK_SIZE)); } - String RedisDictionarySource::toString() const { - return "Redis: " + host + ':' + DB::toString(port); + return "Redis: " + configuration.host + ':' + DB::toString(configuration.port); } - RedisStorageType RedisDictionarySource::parseStorageType(const String & storage_type_str) + RedisDictionarySource::ConnectionPtr RedisDictionarySource::getConnection() const { - if (storage_type_str == "hash_map") - return RedisStorageType::HASH_MAP; - else if (!storage_type_str.empty() && storage_type_str != "simple") - throw Exception(ErrorCodes::INVALID_CONFIG_PARAMETER, "Unknown storage type {} for Redis dictionary", storage_type_str); + ClientPtr client; + bool ok = pool->tryBorrowObject(client, + [] { return std::make_unique(); }, + REDIS_LOCK_ACQUIRE_TIMEOUT_MS); - return RedisStorageType::SIMPLE; + if (!ok) + throw Exception(ErrorCodes::TIMEOUT_EXCEEDED, + "Could not get connection from pool, timeout exceeded {} seconds", + REDIS_LOCK_ACQUIRE_TIMEOUT_MS); + + if (!client->isConnected()) + { + try + { + client->connect(configuration.host, configuration.port); + + if (!configuration.password.empty()) + { + RedisCommand command("AUTH"); + command << configuration.password; + String reply = client->execute(command); + if (reply != "OK") + throw Exception(ErrorCodes::INTERNAL_REDIS_ERROR, + "Authentication failed with reason {}", reply); + } + + if (configuration.db_index != 0) + { + RedisCommand command("SELECT"); + command << std::to_string(configuration.db_index); + String reply = client->execute(command); + if (reply != "OK") + throw Exception(ErrorCodes::INTERNAL_REDIS_ERROR, + "Selecting database with index {} failed with reason {}", + configuration.db_index, reply); + } + } + catch (...) + { + if (client->isConnected()) + client->disconnect(); + + pool->returnObject(std::move(client)); + throw; + } + } + + return std::make_unique(pool, std::move(client)); } } diff --git a/src/Dictionaries/RedisDictionarySource.h b/src/Dictionaries/RedisDictionarySource.h index eff97dede0c..af12981f348 100644 --- a/src/Dictionaries/RedisDictionarySource.h +++ b/src/Dictionaries/RedisDictionarySource.h @@ -1,6 +1,7 @@ #pragma once #include +#include #include "DictionaryStructure.h" #include "IDictionarySource.h" @@ -20,13 +21,13 @@ namespace Poco } } - namespace DB { -namespace ErrorCodes -{ - extern const int NOT_IMPLEMENTED; -} + namespace ErrorCodes + { + extern const int NOT_IMPLEMENTED; + } + enum class RedisStorageType { SIMPLE, @@ -36,24 +37,46 @@ namespace ErrorCodes class RedisDictionarySource final : public IDictionarySource { - RedisDictionarySource( - const DictionaryStructure & dict_struct, - const std::string & host, - UInt16 port, - UInt8 db_index, - const std::string & password, - RedisStorageType storage_type, - const Block & sample_block); - public: using RedisArray = Poco::Redis::Array; using RedisCommand = Poco::Redis::Command; + using ClientPtr = std::unique_ptr; + using Pool = BorrowedObjectPool; + using PoolPtr = std::shared_ptr; + + struct Configuration + { + const std::string host; + const UInt16 port; + const UInt32 db_index; + const std::string password; + const RedisStorageType storage_type; + const size_t pool_size; + }; + + struct Connection + { + Connection(PoolPtr pool_, ClientPtr client_) + : pool(std::move(pool_)), client(std::move(client_)) + { + } + + ~Connection() + { + pool->returnObject(std::move(client)); + } + + PoolPtr pool; + ClientPtr client; + }; + + using ConnectionPtr = std::unique_ptr; + RedisDictionarySource( - const DictionaryStructure & dict_struct, - const Poco::Util::AbstractConfiguration & config, - const std::string & config_prefix, - Block & sample_block); + const DictionaryStructure & dict_struct_, + const Configuration & configuration_, + const Block & sample_block_); RedisDictionarySource(const RedisDictionarySource & other); @@ -81,17 +104,12 @@ namespace ErrorCodes std::string toString() const override; private: - static RedisStorageType parseStorageType(const std::string& storage_type); + ConnectionPtr getConnection() const; const DictionaryStructure dict_struct; - const std::string host; - const UInt16 port; - const UInt8 db_index; - const std::string password; - const RedisStorageType storage_type; + const Configuration configuration; + + PoolPtr pool; Block sample_block; - - std::shared_ptr client; }; - } diff --git a/src/Dictionaries/RedisSource.cpp b/src/Dictionaries/RedisSource.cpp index ad5cf8a0977..6089b836d98 100644 --- a/src/Dictionaries/RedisSource.cpp +++ b/src/Dictionaries/RedisSource.cpp @@ -30,20 +30,22 @@ namespace DB RedisSource::RedisSource( - const std::shared_ptr & client_, - const RedisArray & keys_, - const RedisStorageType & storage_type_, - const DB::Block & sample_block, - const size_t max_block_size_) - : SourceWithProgress(sample_block) - , client(client_), keys(keys_), storage_type(storage_type_), max_block_size{max_block_size_} + ConnectionPtr connection_, + const RedisArray & keys_, + const RedisStorageType & storage_type_, + const DB::Block & sample_block, + size_t max_block_size_) + : SourceWithProgress(sample_block) + , connection(std::move(connection_)) + , keys(keys_) + , storage_type(storage_type_) + , max_block_size{max_block_size_} { description.init(sample_block); } RedisSource::~RedisSource() = default; - namespace { using ValueType = ExternalResultDescription::ValueType; @@ -121,7 +123,6 @@ namespace DB } } - Chunk RedisSource::generate() { if (keys.isNull() || description.sample_block.rows() == 0 || cursor >= keys.size()) @@ -168,7 +169,7 @@ namespace DB for (const auto & elem : keys_array) command_for_values.addRedisType(elem); - auto values = client->execute(command_for_values); + auto values = connection->client->execute(command_for_values); if (keys_array.size() != values.size() + 1) // 'HMGET' primary_key secondary_keys throw Exception(ErrorCodes::NUMBER_OF_COLUMNS_DOESNT_MATCH, @@ -199,7 +200,7 @@ namespace DB for (size_t i = 0; i < need_values; ++i) command_for_values.add(keys.get(cursor + i)); - auto values = client->execute(command_for_values); + auto values = connection->client->execute(command_for_values); if (values.size() != need_values) throw Exception(ErrorCodes::INTERNAL_REDIS_ERROR, "Inconsistent sizes of keys and values in Redis request"); diff --git a/src/Dictionaries/RedisSource.h b/src/Dictionaries/RedisSource.h index db2e643eb4e..24507998f58 100644 --- a/src/Dictionaries/RedisSource.h +++ b/src/Dictionaries/RedisSource.h @@ -24,13 +24,14 @@ namespace DB public: using RedisArray = Poco::Redis::Array; using RedisBulkString = Poco::Redis::BulkString; + using ConnectionPtr = RedisDictionarySource::ConnectionPtr; RedisSource( - const std::shared_ptr & client_, - const Poco::Redis::Array & keys_, - const RedisStorageType & storage_type_, - const Block & sample_block, - const size_t max_block_size); + ConnectionPtr connection_, + const Poco::Redis::Array & keys_, + const RedisStorageType & storage_type_, + const Block & sample_block, + size_t max_block_size); ~RedisSource() override; @@ -39,7 +40,7 @@ namespace DB private: Chunk generate() override; - std::shared_ptr client; + ConnectionPtr connection; Poco::Redis::Array keys; RedisStorageType storage_type; const size_t max_block_size; diff --git a/src/Dictionaries/XDBCDictionarySource.cpp b/src/Dictionaries/XDBCDictionarySource.cpp index ab7cf65eb8b..e95094cac47 100644 --- a/src/Dictionaries/XDBCDictionarySource.cpp +++ b/src/Dictionaries/XDBCDictionarySource.cpp @@ -18,6 +18,7 @@ #include #include #include +#include namespace DB diff --git a/src/Disks/DiskCacheWrapper.cpp b/src/Disks/DiskCacheWrapper.cpp index f741b8242f5..46ea46f85ef 100644 --- a/src/Disks/DiskCacheWrapper.cpp +++ b/src/Disks/DiskCacheWrapper.cpp @@ -65,8 +65,9 @@ std::shared_ptr DiskCacheWrapper::acquireDownloadMetadata( std::unique_lock lock{mutex}; auto it = file_downloads.find(path); - if (it != file_downloads.end() && !it->second.expired()) - return it->second.lock(); + if (it != file_downloads.end()) + if (auto x = it->second.lock()) + return x; std::shared_ptr metadata( new FileDownloadMetadata, diff --git a/src/Disks/S3/DiskS3.cpp b/src/Disks/S3/DiskS3.cpp index 0bebf91df97..ed960528abe 100644 --- a/src/Disks/S3/DiskS3.cpp +++ b/src/Disks/S3/DiskS3.cpp @@ -999,6 +999,7 @@ void DiskS3::restoreFileOperations(const RestoreInformation & restore_informatio if (metadata_disk->exists(to_path)) metadata_disk->removeRecursive(to_path); + createDirectories(directoryPath(to_path)); metadata_disk->moveDirectory(from_path, to_path); } } diff --git a/src/Disks/S3/ProxyResolverConfiguration.cpp b/src/Disks/S3/ProxyResolverConfiguration.cpp index 0fc7a9a1fa8..eeac54163b1 100644 --- a/src/Disks/S3/ProxyResolverConfiguration.cpp +++ b/src/Disks/S3/ProxyResolverConfiguration.cpp @@ -8,6 +8,7 @@ #include #include #include +#include namespace DB::ErrorCodes { @@ -44,13 +45,36 @@ Aws::Client::ClientConfigurationPerRequest ProxyResolverConfiguration::getConfig Poco::Timespan(1000000), /// Send timeout. Poco::Timespan(1000000) /// Receive timeout. ); - auto session = makeHTTPSession(endpoint, timeouts); try { /// It should be just empty GET request. Poco::Net::HTTPRequest request(Poco::Net::HTTPRequest::HTTP_GET, endpoint.getPath(), Poco::Net::HTTPRequest::HTTP_1_1); - session->sendRequest(request); + + const auto & host = endpoint.getHost(); + auto resolved_hosts = DNSResolver::instance().resolveHostAll(host); + + if (resolved_hosts.empty()) + throw Exception(ErrorCodes::BAD_ARGUMENTS, "Proxy resolver cannot resolve host {}", host); + + HTTPSessionPtr session; + + for (size_t i = 0; i < resolved_hosts.size(); ++i) + { + auto resolved_endpoint = endpoint; + resolved_endpoint.setHost(resolved_hosts[i].toString()); + session = makeHTTPSession(endpoint, timeouts, false); + + try + { + session->sendRequest(request); + } + catch (...) + { + if (i + 1 == resolved_hosts.size()) + throw; + } + } Poco::Net::HTTPResponse response; auto & response_body_stream = session->receiveResponse(response); diff --git a/src/Formats/CMakeLists.txt b/src/Formats/CMakeLists.txt index 12def0fb1d0..6e6aa6d4553 100644 --- a/src/Formats/CMakeLists.txt +++ b/src/Formats/CMakeLists.txt @@ -1 +1,21 @@ +if (TARGET ch_contrib::avrocpp) + set(USE_AVRO 1) +endif() +if (TARGET ch_contrib::parquet) + set(USE_PARQUET 1) + set(USE_ARROW 1) + set(USE_ORC 1) +endif() +if (TARGET ch_contrib::snappy) + set(USE_SNAPPY 1) +endif() +if (TARGET ch_contrib::protobuf) + set(USE_PROTOBUF 1) +endif() +if (TARGET ch_contrib::msgpack) + set(USE_MSGPACK 1) +endif() +if (TARGET ch_contrib::capnp) + set(USE_CAPNP 1) +endif() configure_file(config_formats.h.in ${ConfigIncludePath}/config_formats.h) diff --git a/src/Formats/FormatFactory.cpp b/src/Formats/FormatFactory.cpp index e00a473f584..3f220cbb20a 100644 --- a/src/Formats/FormatFactory.cpp +++ b/src/Formats/FormatFactory.cpp @@ -394,6 +394,27 @@ void FormatFactory::registerNonTrivialPrefixAndSuffixChecker(const String & name target = std::move(non_trivial_prefix_and_suffix_checker); } +void FormatFactory::registerAppendSupportChecker(const String & name, AppendSupportChecker append_support_checker) +{ + auto & target = dict[name].append_support_checker; + if (target) + throw Exception("FormatFactory: Suffix checker " + name + " is already registered", ErrorCodes::LOGICAL_ERROR); + target = std::move(append_support_checker); +} + +void FormatFactory::markFormatHasNoAppendSupport(const String & name) +{ + registerAppendSupportChecker(name, [](const FormatSettings &){ return false; }); +} + +bool FormatFactory::checkIfFormatSupportAppend(const String & name, ContextPtr context, const std::optional & format_settings_) +{ + auto format_settings = format_settings_ ? *format_settings_ : getFormatSettings(context); + auto & append_support_checker = dict[name].append_support_checker; + /// By default we consider that format supports append + return !append_support_checker || append_support_checker(format_settings); +} + void FormatFactory::registerOutputFormat(const String & name, OutputCreator output_creator) { auto & target = dict[name].output_creator; diff --git a/src/Formats/FormatFactory.h b/src/Formats/FormatFactory.h index a5eaa43a29f..228d5234959 100644 --- a/src/Formats/FormatFactory.h +++ b/src/Formats/FormatFactory.h @@ -93,6 +93,10 @@ private: /// The checker should return true if parallel parsing should be disabled. using NonTrivialPrefixAndSuffixChecker = std::function; + /// Some formats can support append depending on settings. + /// The checker should return true if format support append. + using AppendSupportChecker = std::function; + using SchemaReaderCreator = std::function; using ExternalSchemaReaderCreator = std::function; @@ -106,6 +110,7 @@ private: bool supports_parallel_formatting{false}; bool is_column_oriented{false}; NonTrivialPrefixAndSuffixChecker non_trivial_prefix_and_suffix_checker; + AppendSupportChecker append_support_checker; }; using FormatsDictionary = std::unordered_map; @@ -167,6 +172,14 @@ public: void registerNonTrivialPrefixAndSuffixChecker(const String & name, NonTrivialPrefixAndSuffixChecker non_trivial_prefix_and_suffix_checker); + void registerAppendSupportChecker(const String & name, AppendSupportChecker append_support_checker); + + /// If format always doesn't support append, you can use this method instead of + /// registerAppendSupportChecker with append_support_checker that always returns true. + void markFormatHasNoAppendSupport(const String & name); + + bool checkIfFormatSupportAppend(const String & name, ContextPtr context, const std::optional & format_settings_ = std::nullopt); + /// Register format by its name. void registerInputFormat(const String & name, InputCreator input_creator); void registerOutputFormat(const String & name, OutputCreator output_creator); diff --git a/src/Functions/CMakeLists.txt b/src/Functions/CMakeLists.txt index b071e2a9e62..b7020ea128e 100644 --- a/src/Functions/CMakeLists.txt +++ b/src/Functions/CMakeLists.txt @@ -1,3 +1,4 @@ +include(configure_config.cmake) configure_file(config_functions.h.in ${ConfigIncludePath}/config_functions.h) add_subdirectory(divide) @@ -12,30 +13,26 @@ add_library(clickhouse_functions ${clickhouse_functions_sources}) target_link_libraries(clickhouse_functions PUBLIC - ${BASE64_LIBRARY} - ${CITYHASH_LIBRARIES} - ${FARMHASH_LIBRARIES} - ${FASTOPS_LIBRARY} + ch_contrib::cityhash + ch_contrib::farmhash clickhouse_dictionaries clickhouse_dictionaries_embedded clickhouse_parsers - consistent-hashing + ch_contrib::consistent_hashing dbms - metrohash - murmurhash + ch_contrib::metrohash + ch_contrib::murmurhash PRIVATE - ${ZLIB_LIBRARIES} + ch_contrib::zlib boost::filesystem divide_impl ) -if (OPENSSL_CRYPTO_LIBRARY) - target_link_libraries(clickhouse_functions PUBLIC ${OPENSSL_CRYPTO_LIBRARY}) +if (TARGET OpenSSL::Crypto) + target_link_libraries(clickhouse_functions PUBLIC OpenSSL::Crypto) endif() -target_include_directories(clickhouse_functions SYSTEM PRIVATE ${SPARSEHASH_INCLUDE_DIR}) - if (CMAKE_BUILD_TYPE_UC STREQUAL "RELEASE" OR CMAKE_BUILD_TYPE_UC STREQUAL "RELWITHDEBINFO" OR CMAKE_BUILD_TYPE_UC STREQUAL "MINSIZEREL") @@ -57,47 +54,46 @@ else() message(STATUS "Generating debugger info for ClickHouse functions") endif() -if (USE_ICU) - target_link_libraries (clickhouse_functions PRIVATE ${ICU_LIBRARIES}) - target_include_directories(clickhouse_functions SYSTEM PRIVATE ${ICU_INCLUDE_DIRS}) +if (TARGET ch_contrib::icu) + target_link_libraries (clickhouse_functions PRIVATE ch_contrib::icu) endif () -if (USE_FASTOPS) - target_include_directories (clickhouse_functions SYSTEM PRIVATE ${FASTOPS_INCLUDE_DIR}) +if (TARGET ch_contrib::fastops) + target_link_libraries (clickhouse_functions PRIVATE ch_contrib::fastops) endif () if (ENABLE_EXAMPLES) add_subdirectory(examples) endif () -if (USE_EMBEDDED_COMPILER) - target_link_libraries(clickhouse_functions PRIVATE ${REQUIRED_LLVM_LIBRARIES}) - target_include_directories(clickhouse_functions SYSTEM BEFORE PUBLIC ${LLVM_INCLUDE_DIRS}) +if (TARGET ch_contrib::llvm) + target_link_libraries(clickhouse_functions PRIVATE ch_contrib::llvm) endif () -if(USE_BASE64) - target_include_directories(clickhouse_functions SYSTEM PRIVATE ${BASE64_INCLUDE_DIR}) +if (TARGET ch_contrib::base64) + target_link_libraries(clickhouse_functions PRIVATE ch_contrib::base64) endif() -target_link_libraries(clickhouse_functions PRIVATE lz4) +target_link_libraries(clickhouse_functions PRIVATE ch_contrib::lz4) -if (USE_NLP) - target_link_libraries(clickhouse_functions PRIVATE cld2) +if (ENABLE_NLP) + target_link_libraries(clickhouse_functions PRIVATE ch_contrib::cld2) endif() -if (USE_H3) - target_link_libraries(clickhouse_functions PRIVATE ${H3_LIBRARY}) - target_include_directories(clickhouse_functions SYSTEM PRIVATE ${H3_INCLUDE_DIR}) +if (TARGET ch_contrib::h3) + target_link_libraries (clickhouse_functions PRIVATE ch_contrib::h3) endif() -target_link_libraries(clickhouse_functions PRIVATE hyperscan) - -if(USE_SIMDJSON) - target_link_libraries(clickhouse_functions PRIVATE simdjson) +if (TARGET ch_contrib::hyperscan) + target_link_libraries(clickhouse_functions PRIVATE ch_contrib::hyperscan) endif() -if(USE_RAPIDJSON) - target_include_directories(clickhouse_functions SYSTEM PRIVATE ${RAPIDJSON_INCLUDE_DIR}) +if (TARGET ch_contrib::simdjson) + target_link_libraries(clickhouse_functions PRIVATE ch_contrib::simdjson) +endif() + +if (TARGET ch_contrib::rapidjson) + target_link_libraries(clickhouse_functions PRIVATE ch_contrib::rapidjson) endif() # ClickHouse developers may use platform-dependent code under some macro (e.g. `#ifdef ENABLE_MULTITARGET`). @@ -125,9 +121,6 @@ add_subdirectory(JSONPath) # Signed integer overflow on user-provided data inside boost::geometry - ignore. set_source_files_properties("pointInPolygon.cpp" PROPERTIES COMPILE_FLAGS -fno-sanitize=signed-integer-overflow) -# target_link_libraries(clickhouse_functions PRIVATE ${S2_LIBRARY}) -target_include_directories(clickhouse_functions SYSTEM PUBLIC ${S2_GEOMETRY_INCLUDE_DIR}) - if (ENABLE_FUZZING) add_compile_definitions(FUZZING_MODE=1) endif () diff --git a/src/Functions/FunctionBase64Conversion.h b/src/Functions/FunctionBase64Conversion.h index 6b1234f6228..a1d6b966660 100644 --- a/src/Functions/FunctionBase64Conversion.h +++ b/src/Functions/FunctionBase64Conversion.h @@ -124,13 +124,26 @@ public: if constexpr (std::is_same_v) { - outlen = _tb64e(reinterpret_cast(source), srclen, reinterpret_cast(dst_pos)); + /* + * Some bug in sse arm64 implementation? + * `base64Encode(repeat('a', 46))` returns wrong padding character + */ +#if defined(__aarch64__) + outlen = tb64senc(reinterpret_cast(source), srclen, reinterpret_cast(dst_pos)); +#else + outlen = _tb64e(reinterpret_cast(source), srclen, reinterpret_cast(dst_pos)); +#endif } else if constexpr (std::is_same_v) { if (srclen > 0) { - outlen = _tb64d(reinterpret_cast(source), srclen, reinterpret_cast(dst_pos)); +#if defined(__aarch64__) + outlen = tb64sdec(reinterpret_cast(source), srclen, reinterpret_cast(dst_pos)); +#else + outlen = _tb64d(reinterpret_cast(source), srclen, reinterpret_cast(dst_pos)); +#endif + if (!outlen) throw Exception("Failed to " + getName() + " input '" + String(reinterpret_cast(source), srclen) + "'", ErrorCodes::INCORRECT_DATA); } diff --git a/src/Functions/FunctionBinaryArithmetic.h b/src/Functions/FunctionBinaryArithmetic.h index a1c3320f88a..4aaaf37e6cf 100644 --- a/src/Functions/FunctionBinaryArithmetic.h +++ b/src/Functions/FunctionBinaryArithmetic.h @@ -139,6 +139,9 @@ public: Case::allow_decimal && IsDataTypeDecimal && IsFloatingPoint, DataTypeFloat64>, + Case::bit_hamming_distance && IsIntegral && IsIntegral, + DataTypeUInt8>, + /// Decimal Real is not supported (traditional DBs convert Decimal Real to Real) Case && !IsIntegralOrExtendedOrDecimal, InvalidType>, Case && !IsIntegralOrExtendedOrDecimal, InvalidType>, diff --git a/src/Functions/FunctionMathUnary.h b/src/Functions/FunctionMathUnary.h index d9ca162ba16..e0b9355e0a6 100644 --- a/src/Functions/FunctionMathUnary.h +++ b/src/Functions/FunctionMathUnary.h @@ -43,16 +43,19 @@ private: DataTypePtr getReturnTypeImpl(const DataTypes & arguments) const override { - const auto & arg = arguments.front(); - if (!isNumber(arg)) - throw Exception{"Illegal type " + arg->getName() + " of argument of function " + getName(), - ErrorCodes::ILLEGAL_TYPE_OF_ARGUMENT}; + const auto & argument = arguments.front(); + + if (!isNumber(argument)) + throw Exception(ErrorCodes::ILLEGAL_TYPE_OF_ARGUMENT, + "Illegal type {} of argument of function {}", + argument->getName(), + getName()); /// Integers are converted to Float64. - if (Impl::always_returns_float64 || !isFloat(arg)) + if (Impl::always_returns_float64 || !isFloat(argument)) return std::make_shared(); else - return arg; + return argument; } template @@ -155,8 +158,10 @@ private: }; if (!callOnBasicType(col.type->getTypeId(), call)) - throw Exception{"Illegal column " + col.column->getName() + " of argument of function " + getName(), - ErrorCodes::ILLEGAL_COLUMN}; + throw Exception(ErrorCodes::ILLEGAL_COLUMN, + "Illegal column {} of argument of function {}", + col.column->getName(), + getName()); return res; } @@ -164,19 +169,17 @@ private: template -struct UnaryFunctionPlain +struct UnaryFunctionVectorized { static constexpr auto name = Name::name; static constexpr auto rows_per_iteration = 1; static constexpr bool always_returns_float64 = true; template - static void execute(const T * src, Float64 * dst) + static void execute(const T * __restrict src, Float64 * __restrict dst) { - dst[0] = static_cast(Function(static_cast(src[0]))); + *dst = Function(static_cast(*src)); } }; -#define UnaryFunctionVectorized UnaryFunctionPlain - } diff --git a/src/Functions/FunctionsBinaryRepr.cpp b/src/Functions/FunctionsBinaryRepr.cpp index 20b2acac88a..b8733cfc644 100644 --- a/src/Functions/FunctionsBinaryRepr.cpp +++ b/src/Functions/FunctionsBinaryRepr.cpp @@ -2,6 +2,7 @@ #include #include #include +#include #include #include #include @@ -42,7 +43,7 @@ struct HexImpl static constexpr size_t word_size = 2; template - static void executeOneUInt(T x, char *& out) + static void executeOneUInt(T x, char *& out, bool skip_leading_zero = true, bool auto_close = true) { bool was_nonzero = false; for (int offset = (sizeof(T) - 1) * 8; offset >= 0; offset -= 8) @@ -50,15 +51,18 @@ struct HexImpl UInt8 byte = x >> offset; /// Skip leading zeros - if (byte == 0 && !was_nonzero && offset) //-V560 + if (byte == 0 && !was_nonzero && offset && skip_leading_zero) //-V560 continue; was_nonzero = true; writeHexByteUppercase(byte, out); out += word_size; } - *out = '\0'; - ++out; + if (auto_close) + { + *out = '\0'; + ++out; + } } static void executeOneString(const UInt8 * pos, const UInt8 * end, char *& out) @@ -130,7 +134,7 @@ struct BinImpl static constexpr size_t word_size = 8; template - static void executeOneUInt(T x, char *& out) + static void executeOneUInt(T x, char *& out, bool skip_leading_zero = true, bool auto_close = true) { bool was_nonzero = false; for (int offset = (sizeof(T) - 1) * 8; offset >= 0; offset -= 8) @@ -138,15 +142,18 @@ struct BinImpl UInt8 byte = x >> offset; /// Skip leading zeros - if (byte == 0 && !was_nonzero && offset) //-V560 + if (byte == 0 && !was_nonzero && offset && skip_leading_zero) //-V560 continue; was_nonzero = true; writeBinByte(byte, out); out += word_size; } - *out = '\0'; - ++out; + if (auto_close) + { + *out = '\0'; + ++out; + } } template @@ -275,6 +282,7 @@ public: !which.isUInt() && !which.isFloat() && !which.isDecimal() && + !which.isUUID() && !which.isAggregateFunction()) throw Exception("Illegal type " + arguments[0]->getName() + " of argument of function " + getName(), ErrorCodes::ILLEGAL_TYPE_OF_ARGUMENT); @@ -306,7 +314,8 @@ public: tryExecuteFloat(column, res_column) || tryExecuteDecimal(column, res_column) || tryExecuteDecimal(column, res_column) || - tryExecuteDecimal(column, res_column)) + tryExecuteDecimal(column, res_column) || + tryExecuteUUID(column, res_column)) return res_column; throw Exception("Illegal column " + arguments[0].column->getName() @@ -480,6 +489,54 @@ public: return false; } } + + bool tryExecuteUUID(const IColumn * col, ColumnPtr & col_res) const + { + const ColumnUUID * col_vec = checkAndGetColumn(col); + + static constexpr size_t MAX_LENGTH = sizeof(UUID) * word_size + 1; /// Including trailing zero byte. + + if (col_vec) + { + auto col_str = ColumnString::create(); + ColumnString::Chars & out_vec = col_str->getChars(); + ColumnString::Offsets & out_offsets = col_str->getOffsets(); + + const typename ColumnUUID::Container & in_vec = col_vec->getData(); + const UUID* uuid = in_vec.data(); + + size_t size = in_vec.size(); + out_offsets.resize(size); + out_vec.resize(size * (word_size+1) + MAX_LENGTH); /// word_size+1 is length of one byte in hex/bin plus zero byte. + + size_t pos = 0; + for (size_t i = 0; i < size; ++i) + { + /// Manual exponential growth, so as not to rely on the linear amortized work time of `resize` (no one guarantees it). + if (pos + MAX_LENGTH > out_vec.size()) + out_vec.resize(out_vec.size() * word_size + MAX_LENGTH); + + char * begin = reinterpret_cast(&out_vec[pos]); + char * end = begin; + + // use executeOnUInt instead of using executeOneString + // because the latter one outputs the string in the memory order + Impl::executeOneUInt(uuid[i].toUnderType().items[0], end, false, false); + Impl::executeOneUInt(uuid[i].toUnderType().items[1], end, false, true); + + pos += end - begin; + out_offsets[i] = pos; + } + out_vec.resize(pos); + + col_res = std::move(col_str); + return true; + } + else + { + return false; + } + } }; /// Decode number or string from string with binary or hexadecimal representation diff --git a/src/Functions/FunctionsConversion.cpp b/src/Functions/FunctionsConversion.cpp index f32d5df8a21..4f5f6ae483f 100644 --- a/src/Functions/FunctionsConversion.cpp +++ b/src/Functions/FunctionsConversion.cpp @@ -32,7 +32,7 @@ void registerFunctionsConversion(FunctionFactory & factory) factory.registerFunction(); factory.registerFunction(); - /// MysQL compatibility alias. + /// MySQL compatibility alias. factory.registerFunction("DATE", FunctionFactory::CaseInsensitive); factory.registerFunction(); factory.registerFunction(); diff --git a/src/Functions/FunctionsConversion.h b/src/Functions/FunctionsConversion.h index 62e62b5f5dc..de0f9c6accb 100644 --- a/src/Functions/FunctionsConversion.h +++ b/src/Functions/FunctionsConversion.h @@ -185,6 +185,15 @@ struct ConvertImpl bool result_is_bool = isBool(result_type); for (size_t i = 0; i < input_rows_count; ++i) { + if constexpr (std::is_same_v) + { + if (result_is_bool) + { + vec_to[i] = vec_from[i] != FromFieldType(0); + continue; + } + } + if constexpr (std::is_same_v != std::is_same_v) { throw Exception("Conversion between numeric types and UUID is not supported", ErrorCodes::NOT_IMPLEMENTED); @@ -269,12 +278,6 @@ struct ConvertImpl vec_to[i] = static_cast(vec_from[i]); } } - - if constexpr (std::is_same_v) - { - if (result_is_bool) - vec_to[i] = static_cast(vec_to[i]); - } } } @@ -2802,10 +2805,16 @@ private: } const auto * from_type = checkAndGetDataType(from_type_untyped.get()); + const auto * from_type_map = checkAndGetDataType(from_type_untyped.get()); + + /// Convert from Map + if (from_type_map) + from_type = checkAndGetDataType(from_type_map->getNestedType().get()); + if (!from_type) { throw Exception(ErrorCodes::TYPE_MISMATCH, - "CAST AS Array can only be performed between same-dimensional Array or String types"); + "CAST AS Array can only be performed between same-dimensional Array, Map or String types"); } DataTypePtr from_nested_type = from_type->getNestedType(); @@ -2825,9 +2834,16 @@ private: return [nested_function, from_nested_type, to_nested_type]( ColumnsWithTypeAndName & arguments, const DataTypePtr &, const ColumnNullable * nullable_source, size_t /*input_rows_count*/) -> ColumnPtr { - const auto & array_arg = arguments.front(); + const auto & argument_column = arguments.front(); - if (const ColumnArray * col_array = checkAndGetColumn(array_arg.column.get())) + const ColumnArray * col_array = nullptr; + + if (const ColumnMap * col_map = checkAndGetColumn(argument_column.column.get())) + col_array = &col_map->getNestedColumn(); + else + col_array = checkAndGetColumn(argument_column.column.get()); + + if (col_array) { /// create columns for converting nested column containing original and result columns ColumnsWithTypeAndName nested_columns{{ col_array->getDataPtr(), from_nested_type, "" }}; @@ -2839,7 +2855,11 @@ private: return ColumnArray::create(result_column, col_array->getOffsetsPtr()); } else - throw Exception{"Illegal column " + array_arg.column->getName() + " for function CAST AS Array", ErrorCodes::LOGICAL_ERROR}; + { + throw Exception(ErrorCodes::LOGICAL_ERROR, + "Illegal column {} for function CAST AS Array", + argument_column.column->getName()); + } }; } diff --git a/src/Functions/FunctionsHashing.cpp b/src/Functions/FunctionsHashing.cpp index 3f334e9c302..cbafd4bcec2 100644 --- a/src/Functions/FunctionsHashing.cpp +++ b/src/Functions/FunctionsHashing.cpp @@ -37,9 +37,7 @@ void registerFunctionsHashing(FunctionFactory & factory) factory.registerFunction(); factory.registerFunction(); -#if USE_XXHASH factory.registerFunction(); factory.registerFunction(); -#endif } } diff --git a/src/Functions/FunctionsHashing.h b/src/Functions/FunctionsHashing.h index a42e6b0bf65..88a0e9524b3 100644 --- a/src/Functions/FunctionsHashing.h +++ b/src/Functions/FunctionsHashing.h @@ -12,10 +12,7 @@ #include #include #include - -#if USE_XXHASH -# include -#endif +#include #if USE_SSL # include @@ -551,9 +548,6 @@ struct ImplMetroHash64 static constexpr bool use_int_hash_for_pods = true; }; - -#if USE_XXHASH - struct ImplXxHash32 { static constexpr auto name = "xxHash32"; @@ -574,7 +568,6 @@ struct ImplXxHash32 static constexpr bool use_int_hash_for_pods = false; }; - struct ImplXxHash64 { static constexpr auto name = "xxHash64"; @@ -592,9 +585,6 @@ struct ImplXxHash64 static constexpr bool use_int_hash_for_pods = false; }; -#endif - - template class FunctionStringHashFixedString : public IFunction { @@ -1413,9 +1403,7 @@ using FunctionJavaHash = FunctionAnyHash; using FunctionJavaHashUTF16LE = FunctionAnyHash; using FunctionHiveHash = FunctionAnyHash; -#if USE_XXHASH - using FunctionXxHash32 = FunctionAnyHash; - using FunctionXxHash64 = FunctionAnyHash; -#endif +using FunctionXxHash32 = FunctionAnyHash; +using FunctionXxHash64 = FunctionAnyHash; } diff --git a/src/Functions/FunctionsLogical.cpp b/src/Functions/FunctionsLogical.cpp index 87a2ecd4c57..0dee048dae3 100644 --- a/src/Functions/FunctionsLogical.cpp +++ b/src/Functions/FunctionsLogical.cpp @@ -11,6 +11,7 @@ #include #include #include +#include #include #include @@ -483,10 +484,14 @@ DataTypePtr FunctionAnyArityLogical::getReturnTypeImpl(const DataTyp ErrorCodes::TOO_FEW_ARGUMENTS_FOR_FUNCTION); bool has_nullable_arguments = false; + bool has_bool_arguments = false; for (size_t i = 0; i < arguments.size(); ++i) { const auto & arg_type = arguments[i]; + if (isBool(arg_type)) + has_bool_arguments = true; + if (!has_nullable_arguments) { has_nullable_arguments = arg_type->isNullable(); @@ -503,7 +508,7 @@ DataTypePtr FunctionAnyArityLogical::getReturnTypeImpl(const DataTyp ErrorCodes::ILLEGAL_TYPE_OF_ARGUMENT); } - auto result_type = std::make_shared(); + auto result_type = has_bool_arguments ? DataTypeFactory::instance().get("Bool") : std::make_shared(); return has_nullable_arguments ? makeNullable(result_type) : result_type; @@ -711,7 +716,7 @@ DataTypePtr FunctionUnaryLogical::getReturnTypeImpl(const DataTypes + ") of argument of function " + getName(), ErrorCodes::ILLEGAL_TYPE_OF_ARGUMENT); - return std::make_shared(); + return isBool(arguments[0]) ? DataTypeFactory::instance().get("Bool") : std::make_shared(); } template