mirror of
https://github.com/ClickHouse/ClickHouse.git
synced 2024-12-02 12:32:04 +00:00
Merge branch 'master' of github.com:ClickHouse/ClickHouse into ADQM-1070
This commit is contained in:
commit
fe70819465
3
.github/workflows/backport_branches.yml
vendored
3
.github/workflows/backport_branches.yml
vendored
@ -3,6 +3,9 @@ name: BackportPR
|
|||||||
env:
|
env:
|
||||||
# Force the stdout and stderr streams to be unbuffered
|
# Force the stdout and stderr streams to be unbuffered
|
||||||
PYTHONUNBUFFERED: 1
|
PYTHONUNBUFFERED: 1
|
||||||
|
# Export system tables to ClickHouse Cloud
|
||||||
|
CLICKHOUSE_CI_LOGS_HOST: ${{ secrets.CLICKHOUSE_CI_LOGS_HOST }}
|
||||||
|
CLICKHOUSE_CI_LOGS_PASSWORD: ${{ secrets.CLICKHOUSE_CI_LOGS_PASSWORD }}
|
||||||
|
|
||||||
on: # yamllint disable-line rule:truthy
|
on: # yamllint disable-line rule:truthy
|
||||||
push:
|
push:
|
||||||
|
46
.github/workflows/master.yml
vendored
46
.github/workflows/master.yml
vendored
@ -3,6 +3,9 @@ name: MasterCI
|
|||||||
env:
|
env:
|
||||||
# Force the stdout and stderr streams to be unbuffered
|
# Force the stdout and stderr streams to be unbuffered
|
||||||
PYTHONUNBUFFERED: 1
|
PYTHONUNBUFFERED: 1
|
||||||
|
# Export system tables to ClickHouse Cloud
|
||||||
|
CLICKHOUSE_CI_LOGS_HOST: ${{ secrets.CLICKHOUSE_CI_LOGS_HOST }}
|
||||||
|
CLICKHOUSE_CI_LOGS_PASSWORD: ${{ secrets.CLICKHOUSE_CI_LOGS_PASSWORD }}
|
||||||
|
|
||||||
on: # yamllint disable-line rule:truthy
|
on: # yamllint disable-line rule:truthy
|
||||||
push:
|
push:
|
||||||
@ -892,6 +895,48 @@ jobs:
|
|||||||
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
|
sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
|
||||||
|
BuilderBinS390X:
|
||||||
|
needs: [DockerHubPush]
|
||||||
|
runs-on: [self-hosted, builder]
|
||||||
|
steps:
|
||||||
|
- name: Set envs
|
||||||
|
run: |
|
||||||
|
cat >> "$GITHUB_ENV" << 'EOF'
|
||||||
|
TEMP_PATH=${{runner.temp}}/build_check
|
||||||
|
IMAGES_PATH=${{runner.temp}}/images_path
|
||||||
|
REPO_COPY=${{runner.temp}}/build_check/ClickHouse
|
||||||
|
CACHES_PATH=${{runner.temp}}/../ccaches
|
||||||
|
BUILD_NAME=binary_s390x
|
||||||
|
EOF
|
||||||
|
- name: Download changed images
|
||||||
|
uses: actions/download-artifact@v3
|
||||||
|
with:
|
||||||
|
name: changed_images
|
||||||
|
path: ${{ env.IMAGES_PATH }}
|
||||||
|
- name: Check out repository code
|
||||||
|
uses: ClickHouse/checkout@v1
|
||||||
|
with:
|
||||||
|
clear-repository: true
|
||||||
|
submodules: true
|
||||||
|
fetch-depth: 0 # otherwise we will have no info about contributors
|
||||||
|
- name: Build
|
||||||
|
run: |
|
||||||
|
sudo rm -fr "$TEMP_PATH"
|
||||||
|
mkdir -p "$TEMP_PATH"
|
||||||
|
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
||||||
|
cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME"
|
||||||
|
- name: Upload build URLs to artifacts
|
||||||
|
if: ${{ success() || failure() }}
|
||||||
|
uses: actions/upload-artifact@v3
|
||||||
|
with:
|
||||||
|
name: ${{ env.BUILD_URLS }}
|
||||||
|
path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json
|
||||||
|
- name: Cleanup
|
||||||
|
if: always()
|
||||||
|
run: |
|
||||||
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
|
sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
|
||||||
############################################################################################
|
############################################################################################
|
||||||
##################################### Docker images #######################################
|
##################################### Docker images #######################################
|
||||||
############################################################################################
|
############################################################################################
|
||||||
@ -975,6 +1020,7 @@ jobs:
|
|||||||
- BuilderBinFreeBSD
|
- BuilderBinFreeBSD
|
||||||
- BuilderBinPPC64
|
- BuilderBinPPC64
|
||||||
- BuilderBinRISCV64
|
- BuilderBinRISCV64
|
||||||
|
- BuilderBinS390X
|
||||||
- BuilderBinAmd64Compat
|
- BuilderBinAmd64Compat
|
||||||
- BuilderBinAarch64V80Compat
|
- BuilderBinAarch64V80Compat
|
||||||
- BuilderBinClangTidy
|
- BuilderBinClangTidy
|
||||||
|
81
.github/workflows/pull_request.yml
vendored
81
.github/workflows/pull_request.yml
vendored
@ -3,6 +3,9 @@ name: PullRequestCI
|
|||||||
env:
|
env:
|
||||||
# Force the stdout and stderr streams to be unbuffered
|
# Force the stdout and stderr streams to be unbuffered
|
||||||
PYTHONUNBUFFERED: 1
|
PYTHONUNBUFFERED: 1
|
||||||
|
# Export system tables to ClickHouse Cloud
|
||||||
|
CLICKHOUSE_CI_LOGS_HOST: ${{ secrets.CLICKHOUSE_CI_LOGS_HOST }}
|
||||||
|
CLICKHOUSE_CI_LOGS_PASSWORD: ${{ secrets.CLICKHOUSE_CI_LOGS_PASSWORD }}
|
||||||
|
|
||||||
on: # yamllint disable-line rule:truthy
|
on: # yamllint disable-line rule:truthy
|
||||||
pull_request:
|
pull_request:
|
||||||
@ -952,6 +955,47 @@ jobs:
|
|||||||
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
|
sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
|
||||||
|
BuilderBinS390X:
|
||||||
|
needs: [DockerHubPush, FastTest, StyleCheck]
|
||||||
|
runs-on: [self-hosted, builder]
|
||||||
|
steps:
|
||||||
|
- name: Set envs
|
||||||
|
run: |
|
||||||
|
cat >> "$GITHUB_ENV" << 'EOF'
|
||||||
|
TEMP_PATH=${{runner.temp}}/build_check
|
||||||
|
IMAGES_PATH=${{runner.temp}}/images_path
|
||||||
|
REPO_COPY=${{runner.temp}}/build_check/ClickHouse
|
||||||
|
CACHES_PATH=${{runner.temp}}/../ccaches
|
||||||
|
BUILD_NAME=binary_s390x
|
||||||
|
EOF
|
||||||
|
- name: Download changed images
|
||||||
|
uses: actions/download-artifact@v3
|
||||||
|
with:
|
||||||
|
name: changed_images
|
||||||
|
path: ${{ env.IMAGES_PATH }}
|
||||||
|
- name: Check out repository code
|
||||||
|
uses: ClickHouse/checkout@v1
|
||||||
|
with:
|
||||||
|
clear-repository: true
|
||||||
|
submodules: true
|
||||||
|
- name: Build
|
||||||
|
run: |
|
||||||
|
sudo rm -fr "$TEMP_PATH"
|
||||||
|
mkdir -p "$TEMP_PATH"
|
||||||
|
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
||||||
|
cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME"
|
||||||
|
- name: Upload build URLs to artifacts
|
||||||
|
if: ${{ success() || failure() }}
|
||||||
|
uses: actions/upload-artifact@v3
|
||||||
|
with:
|
||||||
|
name: ${{ env.BUILD_URLS }}
|
||||||
|
path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json
|
||||||
|
- name: Cleanup
|
||||||
|
if: always()
|
||||||
|
run: |
|
||||||
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
|
sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
|
||||||
############################################################################################
|
############################################################################################
|
||||||
##################################### Docker images #######################################
|
##################################### Docker images #######################################
|
||||||
############################################################################################
|
############################################################################################
|
||||||
@ -1034,6 +1078,7 @@ jobs:
|
|||||||
- BuilderBinFreeBSD
|
- BuilderBinFreeBSD
|
||||||
- BuilderBinPPC64
|
- BuilderBinPPC64
|
||||||
- BuilderBinRISCV64
|
- BuilderBinRISCV64
|
||||||
|
- BuilderBinS390X
|
||||||
- BuilderBinAmd64Compat
|
- BuilderBinAmd64Compat
|
||||||
- BuilderBinAarch64V80Compat
|
- BuilderBinAarch64V80Compat
|
||||||
- BuilderBinClangTidy
|
- BuilderBinClangTidy
|
||||||
@ -5182,3 +5227,39 @@ jobs:
|
|||||||
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
|
##############################################################################################
|
||||||
|
##################################### SQL TEST ###############################################
|
||||||
|
##############################################################################################
|
||||||
|
SQLTest:
|
||||||
|
needs: [BuilderDebRelease]
|
||||||
|
runs-on: [self-hosted, fuzzer-unit-tester]
|
||||||
|
steps:
|
||||||
|
- name: Set envs
|
||||||
|
run: |
|
||||||
|
cat >> "$GITHUB_ENV" << 'EOF'
|
||||||
|
TEMP_PATH=${{runner.temp}}/sqltest
|
||||||
|
REPORTS_PATH=${{runner.temp}}/reports_dir
|
||||||
|
CHECK_NAME=SQLTest
|
||||||
|
REPO_COPY=${{runner.temp}}/sqltest/ClickHouse
|
||||||
|
EOF
|
||||||
|
- name: Download json reports
|
||||||
|
uses: actions/download-artifact@v3
|
||||||
|
with:
|
||||||
|
path: ${{ env.REPORTS_PATH }}
|
||||||
|
- name: Check out repository code
|
||||||
|
uses: ClickHouse/checkout@v1
|
||||||
|
with:
|
||||||
|
clear-repository: true
|
||||||
|
- name: SQLTest
|
||||||
|
run: |
|
||||||
|
sudo rm -fr "$TEMP_PATH"
|
||||||
|
mkdir -p "$TEMP_PATH"
|
||||||
|
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
||||||
|
cd "$REPO_COPY/tests/ci"
|
||||||
|
python3 sqltest.py "$CHECK_NAME"
|
||||||
|
- name: Cleanup
|
||||||
|
if: always()
|
||||||
|
run: |
|
||||||
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
|
sudo rm -fr "$TEMP_PATH"
|
||||||
|
3
.github/workflows/release_branches.yml
vendored
3
.github/workflows/release_branches.yml
vendored
@ -3,6 +3,9 @@ name: ReleaseBranchCI
|
|||||||
env:
|
env:
|
||||||
# Force the stdout and stderr streams to be unbuffered
|
# Force the stdout and stderr streams to be unbuffered
|
||||||
PYTHONUNBUFFERED: 1
|
PYTHONUNBUFFERED: 1
|
||||||
|
# Export system tables to ClickHouse Cloud
|
||||||
|
CLICKHOUSE_CI_LOGS_HOST: ${{ secrets.CLICKHOUSE_CI_LOGS_HOST }}
|
||||||
|
CLICKHOUSE_CI_LOGS_PASSWORD: ${{ secrets.CLICKHOUSE_CI_LOGS_PASSWORD }}
|
||||||
|
|
||||||
on: # yamllint disable-line rule:truthy
|
on: # yamllint disable-line rule:truthy
|
||||||
push:
|
push:
|
||||||
|
16
.gitmodules
vendored
16
.gitmodules
vendored
@ -331,6 +331,10 @@
|
|||||||
[submodule "contrib/liburing"]
|
[submodule "contrib/liburing"]
|
||||||
path = contrib/liburing
|
path = contrib/liburing
|
||||||
url = https://github.com/axboe/liburing
|
url = https://github.com/axboe/liburing
|
||||||
|
[submodule "contrib/libarchive"]
|
||||||
|
path = contrib/libarchive
|
||||||
|
url = https://github.com/libarchive/libarchive.git
|
||||||
|
ignore = dirty
|
||||||
[submodule "contrib/libfiu"]
|
[submodule "contrib/libfiu"]
|
||||||
path = contrib/libfiu
|
path = contrib/libfiu
|
||||||
url = https://github.com/ClickHouse/libfiu.git
|
url = https://github.com/ClickHouse/libfiu.git
|
||||||
@ -343,3 +347,15 @@
|
|||||||
[submodule "contrib/incbin"]
|
[submodule "contrib/incbin"]
|
||||||
path = contrib/incbin
|
path = contrib/incbin
|
||||||
url = https://github.com/graphitemaster/incbin.git
|
url = https://github.com/graphitemaster/incbin.git
|
||||||
|
[submodule "contrib/usearch"]
|
||||||
|
path = contrib/usearch
|
||||||
|
url = https://github.com/unum-cloud/usearch.git
|
||||||
|
[submodule "contrib/SimSIMD"]
|
||||||
|
path = contrib/SimSIMD
|
||||||
|
url = https://github.com/ashvardanian/SimSIMD.git
|
||||||
|
[submodule "contrib/FP16"]
|
||||||
|
path = contrib/FP16
|
||||||
|
url = https://github.com/Maratyszcza/FP16.git
|
||||||
|
[submodule "contrib/robin-map"]
|
||||||
|
path = contrib/robin-map
|
||||||
|
url = https://github.com/Tessil/robin-map.git
|
||||||
|
@ -52,7 +52,6 @@
|
|||||||
* Add new setting `disable_url_encoding` that allows to disable decoding/encoding path in uri in URL engine. [#52337](https://github.com/ClickHouse/ClickHouse/pull/52337) ([Kruglov Pavel](https://github.com/Avogar)).
|
* Add new setting `disable_url_encoding` that allows to disable decoding/encoding path in uri in URL engine. [#52337](https://github.com/ClickHouse/ClickHouse/pull/52337) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||||
|
|
||||||
#### Performance Improvement
|
#### Performance Improvement
|
||||||
* Writing parquet files is 10x faster, it's multi-threaded now. Almost the same speed as reading. [#49367](https://github.com/ClickHouse/ClickHouse/pull/49367) ([Michael Kolupaev](https://github.com/al13n321)).
|
|
||||||
* Enable automatic selection of the sparse serialization format by default. It improves performance. The format is supported since version 22.1. After this change, downgrading to versions older than 22.1 might not be possible. You can turn off the usage of the sparse serialization format by providing the `ratio_of_defaults_for_sparse_serialization = 1` setting for your MergeTree tables. [#49631](https://github.com/ClickHouse/ClickHouse/pull/49631) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
* Enable automatic selection of the sparse serialization format by default. It improves performance. The format is supported since version 22.1. After this change, downgrading to versions older than 22.1 might not be possible. You can turn off the usage of the sparse serialization format by providing the `ratio_of_defaults_for_sparse_serialization = 1` setting for your MergeTree tables. [#49631](https://github.com/ClickHouse/ClickHouse/pull/49631) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
* Enable `move_all_conditions_to_prewhere` and `enable_multiple_prewhere_read_steps` settings by default. [#46365](https://github.com/ClickHouse/ClickHouse/pull/46365) ([Alexander Gololobov](https://github.com/davenger)).
|
* Enable `move_all_conditions_to_prewhere` and `enable_multiple_prewhere_read_steps` settings by default. [#46365](https://github.com/ClickHouse/ClickHouse/pull/46365) ([Alexander Gololobov](https://github.com/davenger)).
|
||||||
* Improves performance of some queries by tuning allocator. [#46416](https://github.com/ClickHouse/ClickHouse/pull/46416) ([Azat Khuzhin](https://github.com/azat)).
|
* Improves performance of some queries by tuning allocator. [#46416](https://github.com/ClickHouse/ClickHouse/pull/46416) ([Azat Khuzhin](https://github.com/azat)).
|
||||||
@ -114,6 +113,7 @@
|
|||||||
* Now interserver port will be closed only after tables are shut down. [#52498](https://github.com/ClickHouse/ClickHouse/pull/52498) ([alesapin](https://github.com/alesapin)).
|
* Now interserver port will be closed only after tables are shut down. [#52498](https://github.com/ClickHouse/ClickHouse/pull/52498) ([alesapin](https://github.com/alesapin)).
|
||||||
|
|
||||||
#### Experimental Feature
|
#### Experimental Feature
|
||||||
|
* Writing parquet files is 10x faster, it's multi-threaded now. Almost the same speed as reading. [#49367](https://github.com/ClickHouse/ClickHouse/pull/49367) ([Michael Kolupaev](https://github.com/al13n321)). This is controlled by the setting `output_format_parquet_use_custom_encoder` which is disabled by default, because the feature is non-ideal.
|
||||||
* Added support for [PRQL](https://prql-lang.org/) as a query language. [#50686](https://github.com/ClickHouse/ClickHouse/pull/50686) ([János Benjamin Antal](https://github.com/antaljanosbenjamin)).
|
* Added support for [PRQL](https://prql-lang.org/) as a query language. [#50686](https://github.com/ClickHouse/ClickHouse/pull/50686) ([János Benjamin Antal](https://github.com/antaljanosbenjamin)).
|
||||||
* Allow to add disk name for custom disks. Previously custom disks would use an internal generated disk name. Now it will be possible with `disk = disk_<name>(...)` (e.g. disk will have name `name`) . [#51552](https://github.com/ClickHouse/ClickHouse/pull/51552) ([Kseniia Sumarokova](https://github.com/kssenii)). This syntax can be changed in this release.
|
* Allow to add disk name for custom disks. Previously custom disks would use an internal generated disk name. Now it will be possible with `disk = disk_<name>(...)` (e.g. disk will have name `name`) . [#51552](https://github.com/ClickHouse/ClickHouse/pull/51552) ([Kseniia Sumarokova](https://github.com/kssenii)). This syntax can be changed in this release.
|
||||||
* (experimental MaterializedMySQL) Fixed crash when `mysqlxx::Pool::Entry` is used after it was disconnected. [#52063](https://github.com/ClickHouse/ClickHouse/pull/52063) ([Val Doroshchuk](https://github.com/valbok)).
|
* (experimental MaterializedMySQL) Fixed crash when `mysqlxx::Pool::Entry` is used after it was disconnected. [#52063](https://github.com/ClickHouse/ClickHouse/pull/52063) ([Val Doroshchuk](https://github.com/valbok)).
|
||||||
|
@ -208,9 +208,6 @@ option(OMIT_HEAVY_DEBUG_SYMBOLS
|
|||||||
"Do not generate debugger info for heavy modules (ClickHouse functions and dictionaries, some contrib)"
|
"Do not generate debugger info for heavy modules (ClickHouse functions and dictionaries, some contrib)"
|
||||||
${OMIT_HEAVY_DEBUG_SYMBOLS_DEFAULT})
|
${OMIT_HEAVY_DEBUG_SYMBOLS_DEFAULT})
|
||||||
|
|
||||||
if (CMAKE_BUILD_TYPE_UC STREQUAL "DEBUG")
|
|
||||||
set(USE_DEBUG_HELPERS ON)
|
|
||||||
endif()
|
|
||||||
option(USE_DEBUG_HELPERS "Enable debug helpers" ${USE_DEBUG_HELPERS})
|
option(USE_DEBUG_HELPERS "Enable debug helpers" ${USE_DEBUG_HELPERS})
|
||||||
|
|
||||||
option(BUILD_STANDALONE_KEEPER "Build keeper as small standalone binary" OFF)
|
option(BUILD_STANDALONE_KEEPER "Build keeper as small standalone binary" OFF)
|
||||||
|
@ -23,11 +23,8 @@ curl https://clickhouse.com/ | sh
|
|||||||
|
|
||||||
## Upcoming Events
|
## Upcoming Events
|
||||||
|
|
||||||
* [**v23.7 Release Webinar**](https://clickhouse.com/company/events/v23-7-community-release-call?utm_source=github&utm_medium=social&utm_campaign=release-webinar-2023-07) - Jul 27 - 23.7 is rapidly approaching. Original creator, co-founder, and CTO of ClickHouse Alexey Milovidov will walk us through the highlights of the release.
|
* [**v23.8 Community Call**](https://clickhouse.com/company/events/v23-8-community-release-call?utm_source=github&utm_medium=social&utm_campaign=release-webinar-2023-08) - Aug 31 - 23.8 is rapidly approaching. Original creator, co-founder, and CTO of ClickHouse Alexey Milovidov will walk us through the highlights of the release.
|
||||||
* [**ClickHouse Meetup in Boston**](https://www.meetup.com/clickhouse-boston-user-group/events/293913596) - Jul 18
|
* [**ClickHouse & AI - A Meetup in San Francisco**](https://www.meetup.com/clickhouse-silicon-valley-meetup-group/events/294472987) - Aug 8
|
||||||
* [**ClickHouse Meetup in NYC**](https://www.meetup.com/clickhouse-new-york-user-group/events/293913441) - Jul 19
|
|
||||||
* [**ClickHouse Meetup in Toronto**](https://www.meetup.com/clickhouse-toronto-user-group/events/294183127) - Jul 20
|
|
||||||
* [**ClickHouse Meetup in Singapore**](https://www.meetup.com/clickhouse-singapore-meetup-group/events/294428050/) - Jul 27
|
|
||||||
* [**ClickHouse Meetup in Paris**](https://www.meetup.com/clickhouse-france-user-group/events/294283460) - Sep 12
|
* [**ClickHouse Meetup in Paris**](https://www.meetup.com/clickhouse-france-user-group/events/294283460) - Sep 12
|
||||||
|
|
||||||
Also, keep an eye out for upcoming meetups around the world. Somewhere else you want us to be? Please feel free to reach out to tyler <at> clickhouse <dot> com.
|
Also, keep an eye out for upcoming meetups around the world. Somewhere else you want us to be? Please feel free to reach out to tyler <at> clickhouse <dot> com.
|
||||||
|
@ -3,6 +3,7 @@
|
|||||||
#include <magic_enum.hpp>
|
#include <magic_enum.hpp>
|
||||||
#include <fmt/format.h>
|
#include <fmt/format.h>
|
||||||
|
|
||||||
|
|
||||||
template <class T> concept is_enum = std::is_enum_v<T>;
|
template <class T> concept is_enum = std::is_enum_v<T>;
|
||||||
|
|
||||||
namespace detail
|
namespace detail
|
||||||
|
@ -7,8 +7,6 @@
|
|||||||
#include <base/find_symbols.h>
|
#include <base/find_symbols.h>
|
||||||
#include <base/preciseExp10.h>
|
#include <base/preciseExp10.h>
|
||||||
|
|
||||||
#include <iostream>
|
|
||||||
|
|
||||||
#define JSON_MAX_DEPTH 100
|
#define JSON_MAX_DEPTH 100
|
||||||
|
|
||||||
|
|
||||||
|
@ -8,8 +8,10 @@
|
|||||||
#include <functional>
|
#include <functional>
|
||||||
#include <iosfwd>
|
#include <iosfwd>
|
||||||
|
|
||||||
|
#include <base/defines.h>
|
||||||
#include <base/types.h>
|
#include <base/types.h>
|
||||||
#include <base/unaligned.h>
|
#include <base/unaligned.h>
|
||||||
|
#include <base/simd.h>
|
||||||
|
|
||||||
#include <city.h>
|
#include <city.h>
|
||||||
|
|
||||||
@ -28,6 +30,11 @@
|
|||||||
#define CRC_INT __crc32cd
|
#define CRC_INT __crc32cd
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
#if defined(__aarch64__) && defined(__ARM_NEON)
|
||||||
|
#include <arm_neon.h>
|
||||||
|
#pragma clang diagnostic ignored "-Wreserved-identifier"
|
||||||
|
#endif
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The std::string_view-like container to avoid creating strings to find substrings in the hash table.
|
* The std::string_view-like container to avoid creating strings to find substrings in the hash table.
|
||||||
@ -73,14 +80,14 @@ using StringRefs = std::vector<StringRef>;
|
|||||||
* For more information, see hash_map_string_2.cpp
|
* For more information, see hash_map_string_2.cpp
|
||||||
*/
|
*/
|
||||||
|
|
||||||
inline bool compareSSE2(const char * p1, const char * p2)
|
inline bool compare8(const char * p1, const char * p2)
|
||||||
{
|
{
|
||||||
return 0xFFFF == _mm_movemask_epi8(_mm_cmpeq_epi8(
|
return 0xFFFF == _mm_movemask_epi8(_mm_cmpeq_epi8(
|
||||||
_mm_loadu_si128(reinterpret_cast<const __m128i *>(p1)),
|
_mm_loadu_si128(reinterpret_cast<const __m128i *>(p1)),
|
||||||
_mm_loadu_si128(reinterpret_cast<const __m128i *>(p2))));
|
_mm_loadu_si128(reinterpret_cast<const __m128i *>(p2))));
|
||||||
}
|
}
|
||||||
|
|
||||||
inline bool compareSSE2x4(const char * p1, const char * p2)
|
inline bool compare64(const char * p1, const char * p2)
|
||||||
{
|
{
|
||||||
return 0xFFFF == _mm_movemask_epi8(
|
return 0xFFFF == _mm_movemask_epi8(
|
||||||
_mm_and_si128(
|
_mm_and_si128(
|
||||||
@ -100,7 +107,30 @@ inline bool compareSSE2x4(const char * p1, const char * p2)
|
|||||||
_mm_loadu_si128(reinterpret_cast<const __m128i *>(p2) + 3)))));
|
_mm_loadu_si128(reinterpret_cast<const __m128i *>(p2) + 3)))));
|
||||||
}
|
}
|
||||||
|
|
||||||
inline bool memequalSSE2Wide(const char * p1, const char * p2, size_t size)
|
#elif defined(__aarch64__) && defined(__ARM_NEON)
|
||||||
|
|
||||||
|
inline bool compare8(const char * p1, const char * p2)
|
||||||
|
{
|
||||||
|
uint64_t mask = getNibbleMask(vceqq_u8(
|
||||||
|
vld1q_u8(reinterpret_cast<const unsigned char *>(p1)), vld1q_u8(reinterpret_cast<const unsigned char *>(p2))));
|
||||||
|
return 0xFFFFFFFFFFFFFFFF == mask;
|
||||||
|
}
|
||||||
|
|
||||||
|
inline bool compare64(const char * p1, const char * p2)
|
||||||
|
{
|
||||||
|
uint64_t mask = getNibbleMask(vandq_u8(
|
||||||
|
vandq_u8(vceqq_u8(vld1q_u8(reinterpret_cast<const unsigned char *>(p1)), vld1q_u8(reinterpret_cast<const unsigned char *>(p2))),
|
||||||
|
vceqq_u8(vld1q_u8(reinterpret_cast<const unsigned char *>(p1 + 16)), vld1q_u8(reinterpret_cast<const unsigned char *>(p2 + 16)))),
|
||||||
|
vandq_u8(vceqq_u8(vld1q_u8(reinterpret_cast<const unsigned char *>(p1 + 32)), vld1q_u8(reinterpret_cast<const unsigned char *>(p2 + 32))),
|
||||||
|
vceqq_u8(vld1q_u8(reinterpret_cast<const unsigned char *>(p1 + 48)), vld1q_u8(reinterpret_cast<const unsigned char *>(p2 + 48))))));
|
||||||
|
return 0xFFFFFFFFFFFFFFFF == mask;
|
||||||
|
}
|
||||||
|
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#if defined(__SSE2__) || (defined(__aarch64__) && defined(__ARM_NEON))
|
||||||
|
|
||||||
|
inline bool memequalWide(const char * p1, const char * p2, size_t size)
|
||||||
{
|
{
|
||||||
/** The order of branches and the trick with overlapping comparisons
|
/** The order of branches and the trick with overlapping comparisons
|
||||||
* are the same as in memcpy implementation.
|
* are the same as in memcpy implementation.
|
||||||
@ -137,7 +167,7 @@ inline bool memequalSSE2Wide(const char * p1, const char * p2, size_t size)
|
|||||||
|
|
||||||
while (size >= 64)
|
while (size >= 64)
|
||||||
{
|
{
|
||||||
if (compareSSE2x4(p1, p2))
|
if (compare64(p1, p2))
|
||||||
{
|
{
|
||||||
p1 += 64;
|
p1 += 64;
|
||||||
p2 += 64;
|
p2 += 64;
|
||||||
@ -149,17 +179,16 @@ inline bool memequalSSE2Wide(const char * p1, const char * p2, size_t size)
|
|||||||
|
|
||||||
switch (size / 16)
|
switch (size / 16)
|
||||||
{
|
{
|
||||||
case 3: if (!compareSSE2(p1 + 32, p2 + 32)) return false; [[fallthrough]];
|
case 3: if (!compare8(p1 + 32, p2 + 32)) return false; [[fallthrough]];
|
||||||
case 2: if (!compareSSE2(p1 + 16, p2 + 16)) return false; [[fallthrough]];
|
case 2: if (!compare8(p1 + 16, p2 + 16)) return false; [[fallthrough]];
|
||||||
case 1: if (!compareSSE2(p1, p2)) return false;
|
case 1: if (!compare8(p1, p2)) return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
return compareSSE2(p1 + size - 16, p2 + size - 16);
|
return compare8(p1 + size - 16, p2 + size - 16);
|
||||||
}
|
}
|
||||||
|
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
|
||||||
inline bool operator== (StringRef lhs, StringRef rhs)
|
inline bool operator== (StringRef lhs, StringRef rhs)
|
||||||
{
|
{
|
||||||
if (lhs.size != rhs.size)
|
if (lhs.size != rhs.size)
|
||||||
@ -168,8 +197,8 @@ inline bool operator== (StringRef lhs, StringRef rhs)
|
|||||||
if (lhs.size == 0)
|
if (lhs.size == 0)
|
||||||
return true;
|
return true;
|
||||||
|
|
||||||
#if defined(__SSE2__)
|
#if defined(__SSE2__) || (defined(__aarch64__) && defined(__ARM_NEON))
|
||||||
return memequalSSE2Wide(lhs.data, rhs.data, lhs.size);
|
return memequalWide(lhs.data, rhs.data, lhs.size);
|
||||||
#else
|
#else
|
||||||
return 0 == memcmp(lhs.data, rhs.data, lhs.size);
|
return 0 == memcmp(lhs.data, rhs.data, lhs.size);
|
||||||
#endif
|
#endif
|
||||||
@ -274,6 +303,8 @@ struct CRC32Hash
|
|||||||
if (size == 0)
|
if (size == 0)
|
||||||
return 0;
|
return 0;
|
||||||
|
|
||||||
|
chassert(pos);
|
||||||
|
|
||||||
if (size < 8)
|
if (size < 8)
|
||||||
{
|
{
|
||||||
return static_cast<unsigned>(hashLessThan8(x.data, x.size));
|
return static_cast<unsigned>(hashLessThan8(x.data, x.size));
|
||||||
|
@ -115,8 +115,15 @@
|
|||||||
/// because SIGABRT is easier to debug than SIGTRAP (the second one makes gdb crazy)
|
/// because SIGABRT is easier to debug than SIGTRAP (the second one makes gdb crazy)
|
||||||
#if !defined(chassert)
|
#if !defined(chassert)
|
||||||
#if defined(ABORT_ON_LOGICAL_ERROR)
|
#if defined(ABORT_ON_LOGICAL_ERROR)
|
||||||
|
// clang-format off
|
||||||
|
#include <base/types.h>
|
||||||
|
namespace DB
|
||||||
|
{
|
||||||
|
void abortOnFailedAssertion(const String & description);
|
||||||
|
}
|
||||||
#define chassert(x) static_cast<bool>(x) ? void(0) : ::DB::abortOnFailedAssertion(#x)
|
#define chassert(x) static_cast<bool>(x) ? void(0) : ::DB::abortOnFailedAssertion(#x)
|
||||||
#define UNREACHABLE() abort()
|
#define UNREACHABLE() abort()
|
||||||
|
// clang-format off
|
||||||
#else
|
#else
|
||||||
/// Here sizeof() trick is used to suppress unused warning for result,
|
/// Here sizeof() trick is used to suppress unused warning for result,
|
||||||
/// since simple "(void)x" will evaluate the expression, while
|
/// since simple "(void)x" will evaluate the expression, while
|
||||||
|
14
base/base/simd.h
Normal file
14
base/base/simd.h
Normal file
@ -0,0 +1,14 @@
|
|||||||
|
#pragma once
|
||||||
|
|
||||||
|
#if defined(__aarch64__) && defined(__ARM_NEON)
|
||||||
|
|
||||||
|
# include <arm_neon.h>
|
||||||
|
# pragma clang diagnostic ignored "-Wreserved-identifier"
|
||||||
|
|
||||||
|
/// Returns a 64 bit mask of nibbles (4 bits for each byte).
|
||||||
|
inline uint64_t getNibbleMask(uint8x16_t res)
|
||||||
|
{
|
||||||
|
return vget_lane_u64(vreinterpret_u64_u8(vshrn_n_u16(vreinterpretq_u16_u8(res), 4)), 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
#endif
|
@ -12,7 +12,6 @@
|
|||||||
#include <tuple>
|
#include <tuple>
|
||||||
#include <limits>
|
#include <limits>
|
||||||
|
|
||||||
#include <boost/multiprecision/cpp_bin_float.hpp>
|
|
||||||
#include <boost/math/special_functions/fpclassify.hpp>
|
#include <boost/math/special_functions/fpclassify.hpp>
|
||||||
|
|
||||||
// NOLINTBEGIN(*)
|
// NOLINTBEGIN(*)
|
||||||
@ -22,6 +21,7 @@
|
|||||||
#define CONSTEXPR_FROM_DOUBLE constexpr
|
#define CONSTEXPR_FROM_DOUBLE constexpr
|
||||||
using FromDoubleIntermediateType = long double;
|
using FromDoubleIntermediateType = long double;
|
||||||
#else
|
#else
|
||||||
|
#include <boost/multiprecision/cpp_bin_float.hpp>
|
||||||
/// `wide_integer_from_builtin` can't be constexpr with non-literal `cpp_bin_float_double_extended`
|
/// `wide_integer_from_builtin` can't be constexpr with non-literal `cpp_bin_float_double_extended`
|
||||||
#define CONSTEXPR_FROM_DOUBLE
|
#define CONSTEXPR_FROM_DOUBLE
|
||||||
using FromDoubleIntermediateType = boost::multiprecision::cpp_bin_float_double_extended;
|
using FromDoubleIntermediateType = boost::multiprecision::cpp_bin_float_double_extended;
|
||||||
|
@ -19,7 +19,6 @@
|
|||||||
#include "Poco/UTF16Encoding.h"
|
#include "Poco/UTF16Encoding.h"
|
||||||
#include "Poco/Buffer.h"
|
#include "Poco/Buffer.h"
|
||||||
#include "Poco/Exception.h"
|
#include "Poco/Exception.h"
|
||||||
#include <iostream>
|
|
||||||
|
|
||||||
|
|
||||||
using Poco::Buffer;
|
using Poco::Buffer;
|
||||||
|
@ -97,7 +97,7 @@ namespace Data
|
|||||||
///
|
///
|
||||||
/// static void extract(std::size_t pos, Person& obj, const Person& defVal, AbstractExtractor::Ptr pExt)
|
/// static void extract(std::size_t pos, Person& obj, const Person& defVal, AbstractExtractor::Ptr pExt)
|
||||||
/// {
|
/// {
|
||||||
/// // defVal is the default person we should use if we encunter NULL entries, so we take the individual fields
|
/// // defVal is the default person we should use if we encounter NULL entries, so we take the individual fields
|
||||||
/// // as defaults. You can do more complex checking, ie return defVal if only one single entry of the fields is null etc...
|
/// // as defaults. You can do more complex checking, ie return defVal if only one single entry of the fields is null etc...
|
||||||
/// poco_assert_dbg (!pExt.isNull());
|
/// poco_assert_dbg (!pExt.isNull());
|
||||||
/// std::string lastName;
|
/// std::string lastName;
|
||||||
|
@ -57,7 +57,7 @@ public:
|
|||||||
URI();
|
URI();
|
||||||
/// Creates an empty URI.
|
/// Creates an empty URI.
|
||||||
|
|
||||||
explicit URI(const std::string & uri, bool disable_url_encoding = false);
|
explicit URI(const std::string & uri, bool enable_url_encoding = true);
|
||||||
/// Parses an URI from the given string. Throws a
|
/// Parses an URI from the given string. Throws a
|
||||||
/// SyntaxException if the uri is not valid.
|
/// SyntaxException if the uri is not valid.
|
||||||
|
|
||||||
@ -362,7 +362,7 @@ private:
|
|||||||
std::string _query;
|
std::string _query;
|
||||||
std::string _fragment;
|
std::string _fragment;
|
||||||
|
|
||||||
bool _disable_url_encoding = false;
|
bool _enable_url_encoding = true;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
|
@ -16,7 +16,6 @@
|
|||||||
#include "Poco/TaskManager.h"
|
#include "Poco/TaskManager.h"
|
||||||
#include "Poco/Exception.h"
|
#include "Poco/Exception.h"
|
||||||
|
|
||||||
#include <iostream>
|
|
||||||
#include <array>
|
#include <array>
|
||||||
|
|
||||||
|
|
||||||
|
@ -36,8 +36,8 @@ URI::URI():
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
URI::URI(const std::string& uri, bool decode_and_encode_path):
|
URI::URI(const std::string& uri, bool enable_url_encoding):
|
||||||
_port(0), _disable_url_encoding(decode_and_encode_path)
|
_port(0), _enable_url_encoding(enable_url_encoding)
|
||||||
{
|
{
|
||||||
parse(uri);
|
parse(uri);
|
||||||
}
|
}
|
||||||
@ -108,7 +108,7 @@ URI::URI(const URI& uri):
|
|||||||
_path(uri._path),
|
_path(uri._path),
|
||||||
_query(uri._query),
|
_query(uri._query),
|
||||||
_fragment(uri._fragment),
|
_fragment(uri._fragment),
|
||||||
_disable_url_encoding(uri._disable_url_encoding)
|
_enable_url_encoding(uri._enable_url_encoding)
|
||||||
{
|
{
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -121,7 +121,7 @@ URI::URI(const URI& baseURI, const std::string& relativeURI):
|
|||||||
_path(baseURI._path),
|
_path(baseURI._path),
|
||||||
_query(baseURI._query),
|
_query(baseURI._query),
|
||||||
_fragment(baseURI._fragment),
|
_fragment(baseURI._fragment),
|
||||||
_disable_url_encoding(baseURI._disable_url_encoding)
|
_enable_url_encoding(baseURI._enable_url_encoding)
|
||||||
{
|
{
|
||||||
resolve(relativeURI);
|
resolve(relativeURI);
|
||||||
}
|
}
|
||||||
@ -153,7 +153,7 @@ URI& URI::operator = (const URI& uri)
|
|||||||
_path = uri._path;
|
_path = uri._path;
|
||||||
_query = uri._query;
|
_query = uri._query;
|
||||||
_fragment = uri._fragment;
|
_fragment = uri._fragment;
|
||||||
_disable_url_encoding = uri._disable_url_encoding;
|
_enable_url_encoding = uri._enable_url_encoding;
|
||||||
}
|
}
|
||||||
return *this;
|
return *this;
|
||||||
}
|
}
|
||||||
@ -184,7 +184,7 @@ void URI::swap(URI& uri)
|
|||||||
std::swap(_path, uri._path);
|
std::swap(_path, uri._path);
|
||||||
std::swap(_query, uri._query);
|
std::swap(_query, uri._query);
|
||||||
std::swap(_fragment, uri._fragment);
|
std::swap(_fragment, uri._fragment);
|
||||||
std::swap(_disable_url_encoding, uri._disable_url_encoding);
|
std::swap(_enable_url_encoding, uri._enable_url_encoding);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@ -687,18 +687,18 @@ void URI::decode(const std::string& str, std::string& decodedStr, bool plusAsSpa
|
|||||||
|
|
||||||
void URI::encodePath(std::string & encodedStr) const
|
void URI::encodePath(std::string & encodedStr) const
|
||||||
{
|
{
|
||||||
if (_disable_url_encoding)
|
if (_enable_url_encoding)
|
||||||
encodedStr = _path;
|
|
||||||
else
|
|
||||||
encode(_path, RESERVED_PATH, encodedStr);
|
encode(_path, RESERVED_PATH, encodedStr);
|
||||||
|
else
|
||||||
|
encodedStr = _path;
|
||||||
}
|
}
|
||||||
|
|
||||||
void URI::decodePath(const std::string & encodedStr)
|
void URI::decodePath(const std::string & encodedStr)
|
||||||
{
|
{
|
||||||
if (_disable_url_encoding)
|
if (_enable_url_encoding)
|
||||||
_path = encodedStr;
|
|
||||||
else
|
|
||||||
decode(encodedStr, _path);
|
decode(encodedStr, _path);
|
||||||
|
else
|
||||||
|
_path = encodedStr;
|
||||||
}
|
}
|
||||||
|
|
||||||
bool URI::isWellKnownPort() const
|
bool URI::isWellKnownPort() const
|
||||||
|
@ -14,7 +14,6 @@
|
|||||||
|
|
||||||
#include "Poco/JSON/Object.h"
|
#include "Poco/JSON/Object.h"
|
||||||
#include <iostream>
|
#include <iostream>
|
||||||
#include <sstream>
|
|
||||||
|
|
||||||
|
|
||||||
using Poco::Dynamic::Var;
|
using Poco::Dynamic::Var;
|
||||||
|
@ -26,7 +26,6 @@
|
|||||||
#include "Poco/CountingStream.h"
|
#include "Poco/CountingStream.h"
|
||||||
#include "Poco/RegularExpression.h"
|
#include "Poco/RegularExpression.h"
|
||||||
#include <sstream>
|
#include <sstream>
|
||||||
#include <iostream>
|
|
||||||
|
|
||||||
|
|
||||||
using Poco::NumberFormatter;
|
using Poco::NumberFormatter;
|
||||||
|
@ -146,7 +146,7 @@ namespace Net
|
|||||||
|
|
||||||
std::string cipherList;
|
std::string cipherList;
|
||||||
/// Specifies the supported ciphers in OpenSSL notation.
|
/// Specifies the supported ciphers in OpenSSL notation.
|
||||||
/// Defaults to "ALL:!ADH:!LOW:!EXP:!MD5:@STRENGTH".
|
/// Defaults to "ALL:!ADH:!LOW:!EXP:!MD5:!3DES:@STRENGTH".
|
||||||
|
|
||||||
std::string dhParamsFile;
|
std::string dhParamsFile;
|
||||||
/// Specifies a file containing Diffie-Hellman parameters.
|
/// Specifies a file containing Diffie-Hellman parameters.
|
||||||
@ -172,7 +172,7 @@ namespace Net
|
|||||||
VerificationMode verificationMode = VERIFY_RELAXED,
|
VerificationMode verificationMode = VERIFY_RELAXED,
|
||||||
int verificationDepth = 9,
|
int verificationDepth = 9,
|
||||||
bool loadDefaultCAs = false,
|
bool loadDefaultCAs = false,
|
||||||
const std::string & cipherList = "ALL:!ADH:!LOW:!EXP:!MD5:@STRENGTH");
|
const std::string & cipherList = "ALL:!ADH:!LOW:!EXP:!MD5:!3DES:@STRENGTH");
|
||||||
/// Creates a Context.
|
/// Creates a Context.
|
||||||
///
|
///
|
||||||
/// * usage specifies whether the context is used by a client or server.
|
/// * usage specifies whether the context is used by a client or server.
|
||||||
@ -200,7 +200,7 @@ namespace Net
|
|||||||
VerificationMode verificationMode = VERIFY_RELAXED,
|
VerificationMode verificationMode = VERIFY_RELAXED,
|
||||||
int verificationDepth = 9,
|
int verificationDepth = 9,
|
||||||
bool loadDefaultCAs = false,
|
bool loadDefaultCAs = false,
|
||||||
const std::string & cipherList = "ALL:!ADH:!LOW:!EXP:!MD5:@STRENGTH");
|
const std::string & cipherList = "ALL:!ADH:!LOW:!EXP:!MD5:!3DES:@STRENGTH");
|
||||||
/// Creates a Context.
|
/// Creates a Context.
|
||||||
///
|
///
|
||||||
/// * usage specifies whether the context is used by a client or server.
|
/// * usage specifies whether the context is used by a client or server.
|
||||||
|
@ -76,7 +76,7 @@ namespace Net
|
|||||||
/// <verificationMode>none|relaxed|strict|once</verificationMode>
|
/// <verificationMode>none|relaxed|strict|once</verificationMode>
|
||||||
/// <verificationDepth>1..9</verificationDepth>
|
/// <verificationDepth>1..9</verificationDepth>
|
||||||
/// <loadDefaultCAFile>true|false</loadDefaultCAFile>
|
/// <loadDefaultCAFile>true|false</loadDefaultCAFile>
|
||||||
/// <cipherList>ALL:!ADH:!LOW:!EXP:!MD5:@STRENGTH</cipherList>
|
/// <cipherList>ALL:!ADH:!LOW:!EXP:!MD5:!3DES:@STRENGTH</cipherList>
|
||||||
/// <preferServerCiphers>true|false</preferServerCiphers>
|
/// <preferServerCiphers>true|false</preferServerCiphers>
|
||||||
/// <privateKeyPassphraseHandler>
|
/// <privateKeyPassphraseHandler>
|
||||||
/// <name>KeyFileHandler</name>
|
/// <name>KeyFileHandler</name>
|
||||||
|
@ -41,7 +41,7 @@ Context::Params::Params():
|
|||||||
verificationMode(VERIFY_RELAXED),
|
verificationMode(VERIFY_RELAXED),
|
||||||
verificationDepth(9),
|
verificationDepth(9),
|
||||||
loadDefaultCAs(false),
|
loadDefaultCAs(false),
|
||||||
cipherList("ALL:!ADH:!LOW:!EXP:!MD5:@STRENGTH")
|
cipherList("ALL:!ADH:!LOW:!EXP:!MD5:!3DES:@STRENGTH")
|
||||||
{
|
{
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -4,10 +4,19 @@ macro(add_glob cur_list)
|
|||||||
endmacro()
|
endmacro()
|
||||||
|
|
||||||
macro(add_headers_and_sources prefix common_path)
|
macro(add_headers_and_sources prefix common_path)
|
||||||
add_glob(${prefix}_headers ${CMAKE_CURRENT_SOURCE_DIR} ${common_path}/*.h)
|
add_glob(${prefix}_headers ${common_path}/*.h)
|
||||||
add_glob(${prefix}_sources ${common_path}/*.cpp ${common_path}/*.c ${common_path}/*.h)
|
add_glob(${prefix}_sources ${common_path}/*.cpp ${common_path}/*.c)
|
||||||
endmacro()
|
endmacro()
|
||||||
|
|
||||||
macro(add_headers_only prefix common_path)
|
macro(add_headers_only prefix common_path)
|
||||||
add_glob(${prefix}_headers ${CMAKE_CURRENT_SOURCE_DIR} ${common_path}/*.h)
|
add_glob(${prefix}_headers ${common_path}/*.h)
|
||||||
|
endmacro()
|
||||||
|
|
||||||
|
macro(extract_into_parent_list src_list dest_list)
|
||||||
|
list(REMOVE_ITEM ${src_list} ${ARGN})
|
||||||
|
get_filename_component(__dir_name ${CMAKE_CURRENT_SOURCE_DIR} NAME)
|
||||||
|
foreach(file IN ITEMS ${ARGN})
|
||||||
|
list(APPEND ${dest_list} ${__dir_name}/${file})
|
||||||
|
endforeach()
|
||||||
|
set(${dest_list} "${${dest_list}}" PARENT_SCOPE)
|
||||||
endmacro()
|
endmacro()
|
||||||
|
@ -20,6 +20,9 @@ set (CMAKE_SYSROOT "${TOOLCHAIN_PATH}/s390x-linux-gnu/libc")
|
|||||||
set (CMAKE_C_FLAGS "${CMAKE_C_FLAGS} --gcc-toolchain=${TOOLCHAIN_PATH}")
|
set (CMAKE_C_FLAGS "${CMAKE_C_FLAGS} --gcc-toolchain=${TOOLCHAIN_PATH}")
|
||||||
set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} --gcc-toolchain=${TOOLCHAIN_PATH}")
|
set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} --gcc-toolchain=${TOOLCHAIN_PATH}")
|
||||||
set (CMAKE_ASM_FLAGS "${CMAKE_ASM_FLAGS} --gcc-toolchain=${TOOLCHAIN_PATH}")
|
set (CMAKE_ASM_FLAGS "${CMAKE_ASM_FLAGS} --gcc-toolchain=${TOOLCHAIN_PATH}")
|
||||||
|
set (CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -fuse-ld=mold -Wl,-L${CMAKE_SYSROOT}/usr/lib64")
|
||||||
|
set (CMAKE_MODULE_LINKER_FLAGS "${CMAKE_MODULE_LINKER_FLAGS} -fuse-ld=mold -Wl,-L${CMAKE_SYSROOT}/usr/lib64")
|
||||||
|
set (CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} -fuse-ld=mold -Wl,-L${CMAKE_SYSROOT}/usr/lib64")
|
||||||
|
|
||||||
set (HAS_PRE_1970_EXITCODE "0" CACHE STRING "Result from TRY_RUN" FORCE)
|
set (HAS_PRE_1970_EXITCODE "0" CACHE STRING "Result from TRY_RUN" FORCE)
|
||||||
set (HAS_PRE_1970_EXITCODE__TRYRUN_OUTPUT "" CACHE STRING "Output from TRY_RUN" FORCE)
|
set (HAS_PRE_1970_EXITCODE__TRYRUN_OUTPUT "" CACHE STRING "Output from TRY_RUN" FORCE)
|
||||||
|
@ -19,6 +19,19 @@ else ()
|
|||||||
message (FATAL_ERROR "Platform ${CMAKE_SYSTEM_NAME} is not supported")
|
message (FATAL_ERROR "Platform ${CMAKE_SYSTEM_NAME} is not supported")
|
||||||
endif ()
|
endif ()
|
||||||
|
|
||||||
|
# Since we always use toolchain files to generate hermetic builds, cmake will
|
||||||
|
# always think it's a cross-compilation, See
|
||||||
|
# https://cmake.org/cmake/help/latest/variable/CMAKE_CROSSCOMPILING.html
|
||||||
|
#
|
||||||
|
# This will slow down cmake configuration and compilation. For instance, LLVM
|
||||||
|
# will try to configure NATIVE LLVM targets with all tests enabled (You'll see
|
||||||
|
# Building native llvm-tblgen...).
|
||||||
|
#
|
||||||
|
# Here, we set it manually by checking the system name and processor.
|
||||||
|
if (${CMAKE_SYSTEM_NAME} STREQUAL ${CMAKE_HOST_SYSTEM_NAME} AND ${CMAKE_SYSTEM_PROCESSOR} STREQUAL ${CMAKE_HOST_SYSTEM_PROCESSOR})
|
||||||
|
set (CMAKE_CROSSCOMPILING 0)
|
||||||
|
endif ()
|
||||||
|
|
||||||
if (CMAKE_CROSSCOMPILING)
|
if (CMAKE_CROSSCOMPILING)
|
||||||
if (OS_DARWIN)
|
if (OS_DARWIN)
|
||||||
# FIXME: broken dependencies
|
# FIXME: broken dependencies
|
||||||
@ -47,7 +60,7 @@ if (CMAKE_CROSSCOMPILING)
|
|||||||
set (ENABLE_RUST OFF CACHE INTERNAL "")
|
set (ENABLE_RUST OFF CACHE INTERNAL "")
|
||||||
elseif (ARCH_S390X)
|
elseif (ARCH_S390X)
|
||||||
set (ENABLE_GRPC OFF CACHE INTERNAL "")
|
set (ENABLE_GRPC OFF CACHE INTERNAL "")
|
||||||
set (ENABLE_SENTRY OFF CACHE INTERNAL "")
|
set (ENABLE_RUST OFF CACHE INTERNAL "")
|
||||||
endif ()
|
endif ()
|
||||||
elseif (OS_FREEBSD)
|
elseif (OS_FREEBSD)
|
||||||
# FIXME: broken dependencies
|
# FIXME: broken dependencies
|
||||||
|
12
contrib/CMakeLists.txt
vendored
12
contrib/CMakeLists.txt
vendored
@ -92,6 +92,7 @@ add_contrib (google-protobuf-cmake google-protobuf)
|
|||||||
add_contrib (openldap-cmake openldap)
|
add_contrib (openldap-cmake openldap)
|
||||||
add_contrib (grpc-cmake grpc)
|
add_contrib (grpc-cmake grpc)
|
||||||
add_contrib (msgpack-c-cmake msgpack-c)
|
add_contrib (msgpack-c-cmake msgpack-c)
|
||||||
|
add_contrib (libarchive-cmake libarchive)
|
||||||
|
|
||||||
add_contrib (corrosion-cmake corrosion)
|
add_contrib (corrosion-cmake corrosion)
|
||||||
|
|
||||||
@ -195,6 +196,17 @@ if (ARCH_S390X)
|
|||||||
add_contrib(crc32-s390x-cmake crc32-s390x)
|
add_contrib(crc32-s390x-cmake crc32-s390x)
|
||||||
endif()
|
endif()
|
||||||
add_contrib (annoy-cmake annoy)
|
add_contrib (annoy-cmake annoy)
|
||||||
|
|
||||||
|
option(ENABLE_USEARCH "Enable USearch (Approximate Neighborhood Search, HNSW) support" ${ENABLE_LIBRARIES})
|
||||||
|
if (ENABLE_USEARCH)
|
||||||
|
add_contrib (FP16-cmake FP16)
|
||||||
|
add_contrib (robin-map-cmake robin-map)
|
||||||
|
add_contrib (SimSIMD-cmake SimSIMD)
|
||||||
|
add_contrib (usearch-cmake usearch) # requires: FP16, robin-map, SimdSIMD
|
||||||
|
else ()
|
||||||
|
message(STATUS "Not using USearch")
|
||||||
|
endif ()
|
||||||
|
|
||||||
add_contrib (xxHash-cmake xxHash)
|
add_contrib (xxHash-cmake xxHash)
|
||||||
|
|
||||||
add_contrib (libbcrypt-cmake libbcrypt)
|
add_contrib (libbcrypt-cmake libbcrypt)
|
||||||
|
1
contrib/FP16
vendored
Submodule
1
contrib/FP16
vendored
Submodule
@ -0,0 +1 @@
|
|||||||
|
Subproject commit 0a92994d729ff76a58f692d3028ca1b64b145d91
|
1
contrib/FP16-cmake/CMakeLists.txt
Normal file
1
contrib/FP16-cmake/CMakeLists.txt
Normal file
@ -0,0 +1 @@
|
|||||||
|
# See contrib/usearch-cmake/CMakeLists.txt
|
1
contrib/SimSIMD
vendored
Submodule
1
contrib/SimSIMD
vendored
Submodule
@ -0,0 +1 @@
|
|||||||
|
Subproject commit de2cb75b9e9e3389d5e1e51fd9f8ed151f3c17cf
|
1
contrib/SimSIMD-cmake/CMakeLists.txt
Normal file
1
contrib/SimSIMD-cmake/CMakeLists.txt
Normal file
@ -0,0 +1 @@
|
|||||||
|
# See contrib/usearch-cmake/CMakeLists.txt
|
2
contrib/base64
vendored
2
contrib/base64
vendored
@ -1 +1 @@
|
|||||||
Subproject commit 9499e0c4945589973b9ea1bc927377cfbc84aa46
|
Subproject commit 8628e258090f9eb76d90ac3c91e1ab4690e9aa11
|
2
contrib/boost
vendored
2
contrib/boost
vendored
@ -1 +1 @@
|
|||||||
Subproject commit aec12eea7fc762721ae16943d1361340c66c9c17
|
Subproject commit 063a9372b4ae304e869a5c5724971d0501552731
|
@ -19,6 +19,12 @@ add_library (_boost_filesystem ${SRCS_FILESYSTEM})
|
|||||||
add_library (boost::filesystem ALIAS _boost_filesystem)
|
add_library (boost::filesystem ALIAS _boost_filesystem)
|
||||||
target_include_directories (_boost_filesystem SYSTEM BEFORE PUBLIC ${LIBRARY_DIR})
|
target_include_directories (_boost_filesystem SYSTEM BEFORE PUBLIC ${LIBRARY_DIR})
|
||||||
|
|
||||||
|
if (OS_LINUX)
|
||||||
|
target_compile_definitions (_boost_filesystem PRIVATE
|
||||||
|
BOOST_FILESYSTEM_HAS_POSIX_AT_APIS=1
|
||||||
|
)
|
||||||
|
endif ()
|
||||||
|
|
||||||
# headers-only
|
# headers-only
|
||||||
|
|
||||||
add_library (_boost_headers_only INTERFACE)
|
add_library (_boost_headers_only INTERFACE)
|
||||||
@ -172,9 +178,9 @@ endif()
|
|||||||
# coroutine
|
# coroutine
|
||||||
|
|
||||||
set (SRCS_COROUTINE
|
set (SRCS_COROUTINE
|
||||||
"${LIBRARY_DIR}/libs/coroutine/detail/coroutine_context.cpp"
|
"${LIBRARY_DIR}/libs/coroutine/src/detail/coroutine_context.cpp"
|
||||||
"${LIBRARY_DIR}/libs/coroutine/exceptions.cpp"
|
"${LIBRARY_DIR}/libs/coroutine/src/exceptions.cpp"
|
||||||
"${LIBRARY_DIR}/libs/coroutine/posix/stack_traits.cpp"
|
"${LIBRARY_DIR}/libs/coroutine/src/posix/stack_traits.cpp"
|
||||||
)
|
)
|
||||||
add_library (_boost_coroutine ${SRCS_COROUTINE})
|
add_library (_boost_coroutine ${SRCS_COROUTINE})
|
||||||
add_library (boost::coroutine ALIAS _boost_coroutine)
|
add_library (boost::coroutine ALIAS _boost_coroutine)
|
||||||
|
@ -73,8 +73,8 @@ struct uint128
|
|||||||
|
|
||||||
uint128() = default;
|
uint128() = default;
|
||||||
uint128(uint64 low64_, uint64 high64_) : low64(low64_), high64(high64_) {}
|
uint128(uint64 low64_, uint64 high64_) : low64(low64_), high64(high64_) {}
|
||||||
friend bool operator ==(const uint128 & x, const uint128 & y) { return (x.low64 == y.low64) && (x.high64 == y.high64); }
|
|
||||||
friend bool operator !=(const uint128 & x, const uint128 & y) { return !(x == y); }
|
friend auto operator<=>(const uint128 &, const uint128 &) = default;
|
||||||
};
|
};
|
||||||
|
|
||||||
inline uint64 Uint128Low64(const uint128 & x) { return x.low64; }
|
inline uint64 Uint128Low64(const uint128 & x) { return x.low64; }
|
||||||
|
2
contrib/curl
vendored
2
contrib/curl
vendored
@ -1 +1 @@
|
|||||||
Subproject commit b0edf0b7dae44d9e66f270a257cf654b35d5263d
|
Subproject commit eb3b049df526bf125eda23218e680ce7fa9ec46c
|
@ -8,125 +8,122 @@ endif()
|
|||||||
set (LIBRARY_DIR "${ClickHouse_SOURCE_DIR}/contrib/curl")
|
set (LIBRARY_DIR "${ClickHouse_SOURCE_DIR}/contrib/curl")
|
||||||
|
|
||||||
set (SRCS
|
set (SRCS
|
||||||
"${LIBRARY_DIR}/lib/fopen.c"
|
"${LIBRARY_DIR}/lib/altsvc.c"
|
||||||
"${LIBRARY_DIR}/lib/noproxy.c"
|
"${LIBRARY_DIR}/lib/amigaos.c"
|
||||||
"${LIBRARY_DIR}/lib/idn.c"
|
"${LIBRARY_DIR}/lib/asyn-thread.c"
|
||||||
"${LIBRARY_DIR}/lib/cfilters.c"
|
"${LIBRARY_DIR}/lib/base64.c"
|
||||||
"${LIBRARY_DIR}/lib/cf-socket.c"
|
"${LIBRARY_DIR}/lib/bufq.c"
|
||||||
|
"${LIBRARY_DIR}/lib/bufref.c"
|
||||||
|
"${LIBRARY_DIR}/lib/cf-h1-proxy.c"
|
||||||
"${LIBRARY_DIR}/lib/cf-haproxy.c"
|
"${LIBRARY_DIR}/lib/cf-haproxy.c"
|
||||||
"${LIBRARY_DIR}/lib/cf-https-connect.c"
|
"${LIBRARY_DIR}/lib/cf-https-connect.c"
|
||||||
"${LIBRARY_DIR}/lib/file.c"
|
"${LIBRARY_DIR}/lib/cf-socket.c"
|
||||||
"${LIBRARY_DIR}/lib/timeval.c"
|
"${LIBRARY_DIR}/lib/cfilters.c"
|
||||||
"${LIBRARY_DIR}/lib/base64.c"
|
"${LIBRARY_DIR}/lib/conncache.c"
|
||||||
"${LIBRARY_DIR}/lib/hostip.c"
|
|
||||||
"${LIBRARY_DIR}/lib/progress.c"
|
|
||||||
"${LIBRARY_DIR}/lib/formdata.c"
|
|
||||||
"${LIBRARY_DIR}/lib/cookie.c"
|
|
||||||
"${LIBRARY_DIR}/lib/http.c"
|
|
||||||
"${LIBRARY_DIR}/lib/sendf.c"
|
|
||||||
"${LIBRARY_DIR}/lib/url.c"
|
|
||||||
"${LIBRARY_DIR}/lib/dict.c"
|
|
||||||
"${LIBRARY_DIR}/lib/if2ip.c"
|
|
||||||
"${LIBRARY_DIR}/lib/speedcheck.c"
|
|
||||||
"${LIBRARY_DIR}/lib/ldap.c"
|
|
||||||
"${LIBRARY_DIR}/lib/version.c"
|
|
||||||
"${LIBRARY_DIR}/lib/getenv.c"
|
|
||||||
"${LIBRARY_DIR}/lib/escape.c"
|
|
||||||
"${LIBRARY_DIR}/lib/mprintf.c"
|
|
||||||
"${LIBRARY_DIR}/lib/telnet.c"
|
|
||||||
"${LIBRARY_DIR}/lib/netrc.c"
|
|
||||||
"${LIBRARY_DIR}/lib/getinfo.c"
|
|
||||||
"${LIBRARY_DIR}/lib/transfer.c"
|
|
||||||
"${LIBRARY_DIR}/lib/strcase.c"
|
|
||||||
"${LIBRARY_DIR}/lib/easy.c"
|
|
||||||
"${LIBRARY_DIR}/lib/curl_fnmatch.c"
|
|
||||||
"${LIBRARY_DIR}/lib/curl_log.c"
|
|
||||||
"${LIBRARY_DIR}/lib/fileinfo.c"
|
|
||||||
"${LIBRARY_DIR}/lib/krb5.c"
|
|
||||||
"${LIBRARY_DIR}/lib/memdebug.c"
|
|
||||||
"${LIBRARY_DIR}/lib/http_chunks.c"
|
|
||||||
"${LIBRARY_DIR}/lib/strtok.c"
|
|
||||||
"${LIBRARY_DIR}/lib/connect.c"
|
"${LIBRARY_DIR}/lib/connect.c"
|
||||||
"${LIBRARY_DIR}/lib/llist.c"
|
|
||||||
"${LIBRARY_DIR}/lib/hash.c"
|
|
||||||
"${LIBRARY_DIR}/lib/multi.c"
|
|
||||||
"${LIBRARY_DIR}/lib/content_encoding.c"
|
"${LIBRARY_DIR}/lib/content_encoding.c"
|
||||||
"${LIBRARY_DIR}/lib/share.c"
|
"${LIBRARY_DIR}/lib/cookie.c"
|
||||||
"${LIBRARY_DIR}/lib/http_digest.c"
|
"${LIBRARY_DIR}/lib/curl_addrinfo.c"
|
||||||
"${LIBRARY_DIR}/lib/md4.c"
|
"${LIBRARY_DIR}/lib/curl_des.c"
|
||||||
"${LIBRARY_DIR}/lib/md5.c"
|
"${LIBRARY_DIR}/lib/curl_endian.c"
|
||||||
"${LIBRARY_DIR}/lib/http_negotiate.c"
|
"${LIBRARY_DIR}/lib/curl_fnmatch.c"
|
||||||
"${LIBRARY_DIR}/lib/inet_pton.c"
|
"${LIBRARY_DIR}/lib/curl_get_line.c"
|
||||||
"${LIBRARY_DIR}/lib/strtoofft.c"
|
"${LIBRARY_DIR}/lib/curl_gethostname.c"
|
||||||
"${LIBRARY_DIR}/lib/strerror.c"
|
"${LIBRARY_DIR}/lib/curl_gssapi.c"
|
||||||
"${LIBRARY_DIR}/lib/amigaos.c"
|
"${LIBRARY_DIR}/lib/curl_memrchr.c"
|
||||||
|
"${LIBRARY_DIR}/lib/curl_multibyte.c"
|
||||||
|
"${LIBRARY_DIR}/lib/curl_ntlm_core.c"
|
||||||
|
"${LIBRARY_DIR}/lib/curl_ntlm_wb.c"
|
||||||
|
"${LIBRARY_DIR}/lib/curl_path.c"
|
||||||
|
"${LIBRARY_DIR}/lib/curl_range.c"
|
||||||
|
"${LIBRARY_DIR}/lib/curl_rtmp.c"
|
||||||
|
"${LIBRARY_DIR}/lib/curl_sasl.c"
|
||||||
|
"${LIBRARY_DIR}/lib/curl_sspi.c"
|
||||||
|
"${LIBRARY_DIR}/lib/curl_threads.c"
|
||||||
|
"${LIBRARY_DIR}/lib/curl_trc.c"
|
||||||
|
"${LIBRARY_DIR}/lib/dict.c"
|
||||||
|
"${LIBRARY_DIR}/lib/doh.c"
|
||||||
|
"${LIBRARY_DIR}/lib/dynbuf.c"
|
||||||
|
"${LIBRARY_DIR}/lib/dynhds.c"
|
||||||
|
"${LIBRARY_DIR}/lib/easy.c"
|
||||||
|
"${LIBRARY_DIR}/lib/escape.c"
|
||||||
|
"${LIBRARY_DIR}/lib/file.c"
|
||||||
|
"${LIBRARY_DIR}/lib/fileinfo.c"
|
||||||
|
"${LIBRARY_DIR}/lib/fopen.c"
|
||||||
|
"${LIBRARY_DIR}/lib/formdata.c"
|
||||||
|
"${LIBRARY_DIR}/lib/getenv.c"
|
||||||
|
"${LIBRARY_DIR}/lib/getinfo.c"
|
||||||
|
"${LIBRARY_DIR}/lib/gopher.c"
|
||||||
|
"${LIBRARY_DIR}/lib/hash.c"
|
||||||
|
"${LIBRARY_DIR}/lib/headers.c"
|
||||||
|
"${LIBRARY_DIR}/lib/hmac.c"
|
||||||
"${LIBRARY_DIR}/lib/hostasyn.c"
|
"${LIBRARY_DIR}/lib/hostasyn.c"
|
||||||
|
"${LIBRARY_DIR}/lib/hostip.c"
|
||||||
"${LIBRARY_DIR}/lib/hostip4.c"
|
"${LIBRARY_DIR}/lib/hostip4.c"
|
||||||
"${LIBRARY_DIR}/lib/hostip6.c"
|
"${LIBRARY_DIR}/lib/hostip6.c"
|
||||||
"${LIBRARY_DIR}/lib/hostsyn.c"
|
"${LIBRARY_DIR}/lib/hostsyn.c"
|
||||||
|
"${LIBRARY_DIR}/lib/hsts.c"
|
||||||
|
"${LIBRARY_DIR}/lib/http.c"
|
||||||
|
"${LIBRARY_DIR}/lib/http2.c"
|
||||||
|
"${LIBRARY_DIR}/lib/http_aws_sigv4.c"
|
||||||
|
"${LIBRARY_DIR}/lib/http_chunks.c"
|
||||||
|
"${LIBRARY_DIR}/lib/http_digest.c"
|
||||||
|
"${LIBRARY_DIR}/lib/http_negotiate.c"
|
||||||
|
"${LIBRARY_DIR}/lib/http_ntlm.c"
|
||||||
|
"${LIBRARY_DIR}/lib/http_proxy.c"
|
||||||
|
"${LIBRARY_DIR}/lib/idn.c"
|
||||||
|
"${LIBRARY_DIR}/lib/if2ip.c"
|
||||||
|
"${LIBRARY_DIR}/lib/imap.c"
|
||||||
"${LIBRARY_DIR}/lib/inet_ntop.c"
|
"${LIBRARY_DIR}/lib/inet_ntop.c"
|
||||||
|
"${LIBRARY_DIR}/lib/inet_pton.c"
|
||||||
|
"${LIBRARY_DIR}/lib/krb5.c"
|
||||||
|
"${LIBRARY_DIR}/lib/ldap.c"
|
||||||
|
"${LIBRARY_DIR}/lib/llist.c"
|
||||||
|
"${LIBRARY_DIR}/lib/md4.c"
|
||||||
|
"${LIBRARY_DIR}/lib/md5.c"
|
||||||
|
"${LIBRARY_DIR}/lib/memdebug.c"
|
||||||
|
"${LIBRARY_DIR}/lib/mime.c"
|
||||||
|
"${LIBRARY_DIR}/lib/mprintf.c"
|
||||||
|
"${LIBRARY_DIR}/lib/mqtt.c"
|
||||||
|
"${LIBRARY_DIR}/lib/multi.c"
|
||||||
|
"${LIBRARY_DIR}/lib/netrc.c"
|
||||||
|
"${LIBRARY_DIR}/lib/nonblock.c"
|
||||||
|
"${LIBRARY_DIR}/lib/noproxy.c"
|
||||||
|
"${LIBRARY_DIR}/lib/openldap.c"
|
||||||
"${LIBRARY_DIR}/lib/parsedate.c"
|
"${LIBRARY_DIR}/lib/parsedate.c"
|
||||||
|
"${LIBRARY_DIR}/lib/pingpong.c"
|
||||||
|
"${LIBRARY_DIR}/lib/pop3.c"
|
||||||
|
"${LIBRARY_DIR}/lib/progress.c"
|
||||||
|
"${LIBRARY_DIR}/lib/psl.c"
|
||||||
|
"${LIBRARY_DIR}/lib/rand.c"
|
||||||
|
"${LIBRARY_DIR}/lib/rename.c"
|
||||||
|
"${LIBRARY_DIR}/lib/rtsp.c"
|
||||||
"${LIBRARY_DIR}/lib/select.c"
|
"${LIBRARY_DIR}/lib/select.c"
|
||||||
"${LIBRARY_DIR}/lib/splay.c"
|
"${LIBRARY_DIR}/lib/sendf.c"
|
||||||
"${LIBRARY_DIR}/lib/strdup.c"
|
"${LIBRARY_DIR}/lib/setopt.c"
|
||||||
|
"${LIBRARY_DIR}/lib/sha256.c"
|
||||||
|
"${LIBRARY_DIR}/lib/share.c"
|
||||||
|
"${LIBRARY_DIR}/lib/slist.c"
|
||||||
|
"${LIBRARY_DIR}/lib/smb.c"
|
||||||
|
"${LIBRARY_DIR}/lib/smtp.c"
|
||||||
|
"${LIBRARY_DIR}/lib/socketpair.c"
|
||||||
"${LIBRARY_DIR}/lib/socks.c"
|
"${LIBRARY_DIR}/lib/socks.c"
|
||||||
"${LIBRARY_DIR}/lib/curl_addrinfo.c"
|
|
||||||
"${LIBRARY_DIR}/lib/socks_gssapi.c"
|
"${LIBRARY_DIR}/lib/socks_gssapi.c"
|
||||||
"${LIBRARY_DIR}/lib/socks_sspi.c"
|
"${LIBRARY_DIR}/lib/socks_sspi.c"
|
||||||
"${LIBRARY_DIR}/lib/curl_sspi.c"
|
"${LIBRARY_DIR}/lib/speedcheck.c"
|
||||||
"${LIBRARY_DIR}/lib/slist.c"
|
"${LIBRARY_DIR}/lib/splay.c"
|
||||||
"${LIBRARY_DIR}/lib/nonblock.c"
|
"${LIBRARY_DIR}/lib/strcase.c"
|
||||||
"${LIBRARY_DIR}/lib/curl_memrchr.c"
|
"${LIBRARY_DIR}/lib/strdup.c"
|
||||||
"${LIBRARY_DIR}/lib/imap.c"
|
"${LIBRARY_DIR}/lib/strerror.c"
|
||||||
"${LIBRARY_DIR}/lib/pop3.c"
|
"${LIBRARY_DIR}/lib/strtok.c"
|
||||||
"${LIBRARY_DIR}/lib/smtp.c"
|
"${LIBRARY_DIR}/lib/strtoofft.c"
|
||||||
"${LIBRARY_DIR}/lib/pingpong.c"
|
|
||||||
"${LIBRARY_DIR}/lib/rtsp.c"
|
|
||||||
"${LIBRARY_DIR}/lib/curl_threads.c"
|
|
||||||
"${LIBRARY_DIR}/lib/warnless.c"
|
|
||||||
"${LIBRARY_DIR}/lib/hmac.c"
|
|
||||||
"${LIBRARY_DIR}/lib/curl_rtmp.c"
|
|
||||||
"${LIBRARY_DIR}/lib/openldap.c"
|
|
||||||
"${LIBRARY_DIR}/lib/curl_gethostname.c"
|
|
||||||
"${LIBRARY_DIR}/lib/gopher.c"
|
|
||||||
"${LIBRARY_DIR}/lib/http_proxy.c"
|
|
||||||
"${LIBRARY_DIR}/lib/asyn-thread.c"
|
|
||||||
"${LIBRARY_DIR}/lib/curl_gssapi.c"
|
|
||||||
"${LIBRARY_DIR}/lib/http_ntlm.c"
|
|
||||||
"${LIBRARY_DIR}/lib/curl_ntlm_wb.c"
|
|
||||||
"${LIBRARY_DIR}/lib/curl_ntlm_core.c"
|
|
||||||
"${LIBRARY_DIR}/lib/curl_sasl.c"
|
|
||||||
"${LIBRARY_DIR}/lib/rand.c"
|
|
||||||
"${LIBRARY_DIR}/lib/curl_multibyte.c"
|
|
||||||
"${LIBRARY_DIR}/lib/conncache.c"
|
|
||||||
"${LIBRARY_DIR}/lib/cf-h1-proxy.c"
|
|
||||||
"${LIBRARY_DIR}/lib/http2.c"
|
|
||||||
"${LIBRARY_DIR}/lib/smb.c"
|
|
||||||
"${LIBRARY_DIR}/lib/curl_endian.c"
|
|
||||||
"${LIBRARY_DIR}/lib/curl_des.c"
|
|
||||||
"${LIBRARY_DIR}/lib/system_win32.c"
|
"${LIBRARY_DIR}/lib/system_win32.c"
|
||||||
"${LIBRARY_DIR}/lib/mime.c"
|
"${LIBRARY_DIR}/lib/telnet.c"
|
||||||
"${LIBRARY_DIR}/lib/sha256.c"
|
|
||||||
"${LIBRARY_DIR}/lib/setopt.c"
|
|
||||||
"${LIBRARY_DIR}/lib/curl_path.c"
|
|
||||||
"${LIBRARY_DIR}/lib/curl_range.c"
|
|
||||||
"${LIBRARY_DIR}/lib/psl.c"
|
|
||||||
"${LIBRARY_DIR}/lib/doh.c"
|
|
||||||
"${LIBRARY_DIR}/lib/urlapi.c"
|
|
||||||
"${LIBRARY_DIR}/lib/curl_get_line.c"
|
|
||||||
"${LIBRARY_DIR}/lib/altsvc.c"
|
|
||||||
"${LIBRARY_DIR}/lib/socketpair.c"
|
|
||||||
"${LIBRARY_DIR}/lib/bufref.c"
|
|
||||||
"${LIBRARY_DIR}/lib/bufq.c"
|
|
||||||
"${LIBRARY_DIR}/lib/dynbuf.c"
|
|
||||||
"${LIBRARY_DIR}/lib/dynhds.c"
|
|
||||||
"${LIBRARY_DIR}/lib/hsts.c"
|
|
||||||
"${LIBRARY_DIR}/lib/http_aws_sigv4.c"
|
|
||||||
"${LIBRARY_DIR}/lib/mqtt.c"
|
|
||||||
"${LIBRARY_DIR}/lib/rename.c"
|
|
||||||
"${LIBRARY_DIR}/lib/headers.c"
|
|
||||||
"${LIBRARY_DIR}/lib/timediff.c"
|
"${LIBRARY_DIR}/lib/timediff.c"
|
||||||
"${LIBRARY_DIR}/lib/vauth/vauth.c"
|
"${LIBRARY_DIR}/lib/timeval.c"
|
||||||
|
"${LIBRARY_DIR}/lib/transfer.c"
|
||||||
|
"${LIBRARY_DIR}/lib/url.c"
|
||||||
|
"${LIBRARY_DIR}/lib/urlapi.c"
|
||||||
"${LIBRARY_DIR}/lib/vauth/cleartext.c"
|
"${LIBRARY_DIR}/lib/vauth/cleartext.c"
|
||||||
"${LIBRARY_DIR}/lib/vauth/cram.c"
|
"${LIBRARY_DIR}/lib/vauth/cram.c"
|
||||||
"${LIBRARY_DIR}/lib/vauth/digest.c"
|
"${LIBRARY_DIR}/lib/vauth/digest.c"
|
||||||
@ -138,23 +135,24 @@ set (SRCS
|
|||||||
"${LIBRARY_DIR}/lib/vauth/oauth2.c"
|
"${LIBRARY_DIR}/lib/vauth/oauth2.c"
|
||||||
"${LIBRARY_DIR}/lib/vauth/spnego_gssapi.c"
|
"${LIBRARY_DIR}/lib/vauth/spnego_gssapi.c"
|
||||||
"${LIBRARY_DIR}/lib/vauth/spnego_sspi.c"
|
"${LIBRARY_DIR}/lib/vauth/spnego_sspi.c"
|
||||||
|
"${LIBRARY_DIR}/lib/vauth/vauth.c"
|
||||||
|
"${LIBRARY_DIR}/lib/version.c"
|
||||||
"${LIBRARY_DIR}/lib/vquic/vquic.c"
|
"${LIBRARY_DIR}/lib/vquic/vquic.c"
|
||||||
"${LIBRARY_DIR}/lib/vtls/openssl.c"
|
"${LIBRARY_DIR}/lib/vssh/libssh.c"
|
||||||
|
"${LIBRARY_DIR}/lib/vssh/libssh2.c"
|
||||||
|
"${LIBRARY_DIR}/lib/vtls/bearssl.c"
|
||||||
"${LIBRARY_DIR}/lib/vtls/gtls.c"
|
"${LIBRARY_DIR}/lib/vtls/gtls.c"
|
||||||
"${LIBRARY_DIR}/lib/vtls/vtls.c"
|
"${LIBRARY_DIR}/lib/vtls/hostcheck.c"
|
||||||
"${LIBRARY_DIR}/lib/vtls/nss.c"
|
"${LIBRARY_DIR}/lib/vtls/keylog.c"
|
||||||
"${LIBRARY_DIR}/lib/vtls/wolfssl.c"
|
"${LIBRARY_DIR}/lib/vtls/mbedtls.c"
|
||||||
|
"${LIBRARY_DIR}/lib/vtls/openssl.c"
|
||||||
"${LIBRARY_DIR}/lib/vtls/schannel.c"
|
"${LIBRARY_DIR}/lib/vtls/schannel.c"
|
||||||
"${LIBRARY_DIR}/lib/vtls/schannel_verify.c"
|
"${LIBRARY_DIR}/lib/vtls/schannel_verify.c"
|
||||||
"${LIBRARY_DIR}/lib/vtls/sectransp.c"
|
"${LIBRARY_DIR}/lib/vtls/sectransp.c"
|
||||||
"${LIBRARY_DIR}/lib/vtls/gskit.c"
|
"${LIBRARY_DIR}/lib/vtls/vtls.c"
|
||||||
"${LIBRARY_DIR}/lib/vtls/mbedtls.c"
|
"${LIBRARY_DIR}/lib/vtls/wolfssl.c"
|
||||||
"${LIBRARY_DIR}/lib/vtls/bearssl.c"
|
|
||||||
"${LIBRARY_DIR}/lib/vtls/keylog.c"
|
|
||||||
"${LIBRARY_DIR}/lib/vtls/x509asn1.c"
|
"${LIBRARY_DIR}/lib/vtls/x509asn1.c"
|
||||||
"${LIBRARY_DIR}/lib/vtls/hostcheck.c"
|
"${LIBRARY_DIR}/lib/warnless.c"
|
||||||
"${LIBRARY_DIR}/lib/vssh/libssh2.c"
|
|
||||||
"${LIBRARY_DIR}/lib/vssh/libssh.c"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
add_library (_curl ${SRCS})
|
add_library (_curl ${SRCS})
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
option(ENABLE_ISAL_LIBRARY "Enable ISA-L library" ${ENABLE_LIBRARIES})
|
option(ENABLE_ISAL_LIBRARY "Enable ISA-L library" ${ENABLE_LIBRARIES})
|
||||||
if (ARCH_AARCH64)
|
|
||||||
# Disable ISA-L libray on aarch64.
|
# ISA-L is only available for x86-64, so it shall be disabled for other platforms
|
||||||
|
if (NOT ARCH_AMD64)
|
||||||
set (ENABLE_ISAL_LIBRARY OFF)
|
set (ENABLE_ISAL_LIBRARY OFF)
|
||||||
endif ()
|
endif ()
|
||||||
|
|
||||||
|
2
contrib/krb5
vendored
2
contrib/krb5
vendored
@ -1 +1 @@
|
|||||||
Subproject commit b56ce6ba690e1f320df1a64afa34980c3e462617
|
Subproject commit 71b06c2276009ae649c7703019f3b4605f66fd3d
|
1
contrib/libarchive
vendored
Submodule
1
contrib/libarchive
vendored
Submodule
@ -0,0 +1 @@
|
|||||||
|
Subproject commit ee45796171324519f0c0bfd012018dd099296336
|
182
contrib/libarchive-cmake/CMakeLists.txt
Normal file
182
contrib/libarchive-cmake/CMakeLists.txt
Normal file
@ -0,0 +1,182 @@
|
|||||||
|
set (LIBRARY_DIR "${ClickHouse_SOURCE_DIR}/contrib/libarchive")
|
||||||
|
|
||||||
|
set(SRCS
|
||||||
|
"${LIBRARY_DIR}/libarchive/archive_acl.c"
|
||||||
|
"${LIBRARY_DIR}/libarchive/archive_blake2sp_ref.c"
|
||||||
|
"${LIBRARY_DIR}/libarchive/archive_blake2s_ref.c"
|
||||||
|
"${LIBRARY_DIR}/libarchive/archive_check_magic.c"
|
||||||
|
"${LIBRARY_DIR}/libarchive/archive_cmdline.c"
|
||||||
|
"${LIBRARY_DIR}/libarchive/archive_cryptor.c"
|
||||||
|
"${LIBRARY_DIR}/libarchive/archive_digest.c"
|
||||||
|
"${LIBRARY_DIR}/libarchive/archive_disk_acl_darwin.c"
|
||||||
|
"${LIBRARY_DIR}/libarchive/archive_disk_acl_freebsd.c"
|
||||||
|
"${LIBRARY_DIR}/libarchive/archive_disk_acl_linux.c"
|
||||||
|
"${LIBRARY_DIR}/libarchive/archive_disk_acl_sunos.c"
|
||||||
|
"${LIBRARY_DIR}/libarchive/archive_entry.c"
|
||||||
|
"${LIBRARY_DIR}/libarchive/archive_entry_copy_bhfi.c"
|
||||||
|
"${LIBRARY_DIR}/libarchive/archive_entry_copy_stat.c"
|
||||||
|
"${LIBRARY_DIR}/libarchive/archive_entry_link_resolver.c"
|
||||||
|
"${LIBRARY_DIR}/libarchive/archive_entry_sparse.c"
|
||||||
|
"${LIBRARY_DIR}/libarchive/archive_entry_stat.c"
|
||||||
|
"${LIBRARY_DIR}/libarchive/archive_entry_strmode.c"
|
||||||
|
"${LIBRARY_DIR}/libarchive/archive_entry_xattr.c"
|
||||||
|
"${LIBRARY_DIR}/libarchive/archive_getdate.c"
|
||||||
|
"${LIBRARY_DIR}/libarchive/archive_hmac.c"
|
||||||
|
"${LIBRARY_DIR}/libarchive/archive_match.c"
|
||||||
|
"${LIBRARY_DIR}/libarchive/archive_options.c"
|
||||||
|
"${LIBRARY_DIR}/libarchive/archive_pack_dev.c"
|
||||||
|
"${LIBRARY_DIR}/libarchive/archive_pathmatch.c"
|
||||||
|
"${LIBRARY_DIR}/libarchive/archive_ppmd7.c"
|
||||||
|
"${LIBRARY_DIR}/libarchive/archive_ppmd8.c"
|
||||||
|
"${LIBRARY_DIR}/libarchive/archive_random.c"
|
||||||
|
"${LIBRARY_DIR}/libarchive/archive_rb.c"
|
||||||
|
"${LIBRARY_DIR}/libarchive/archive_read_add_passphrase.c"
|
||||||
|
"${LIBRARY_DIR}/libarchive/archive_read_append_filter.c"
|
||||||
|
"${LIBRARY_DIR}/libarchive/archive_read.c"
|
||||||
|
"${LIBRARY_DIR}/libarchive/archive_read_data_into_fd.c"
|
||||||
|
"${LIBRARY_DIR}/libarchive/archive_read_disk_entry_from_file.c"
|
||||||
|
"${LIBRARY_DIR}/libarchive/archive_read_disk_posix.c"
|
||||||
|
"${LIBRARY_DIR}/libarchive/archive_read_disk_set_standard_lookup.c"
|
||||||
|
"${LIBRARY_DIR}/libarchive/archive_read_disk_windows.c"
|
||||||
|
"${LIBRARY_DIR}/libarchive/archive_read_extract2.c"
|
||||||
|
"${LIBRARY_DIR}/libarchive/archive_read_extract.c"
|
||||||
|
"${LIBRARY_DIR}/libarchive/archive_read_open_fd.c"
|
||||||
|
"${LIBRARY_DIR}/libarchive/archive_read_open_file.c"
|
||||||
|
"${LIBRARY_DIR}/libarchive/archive_read_open_filename.c"
|
||||||
|
"${LIBRARY_DIR}/libarchive/archive_read_open_memory.c"
|
||||||
|
"${LIBRARY_DIR}/libarchive/archive_read_set_format.c"
|
||||||
|
"${LIBRARY_DIR}/libarchive/archive_read_set_options.c"
|
||||||
|
"${LIBRARY_DIR}/libarchive/archive_read_support_filter_all.c"
|
||||||
|
"${LIBRARY_DIR}/libarchive/archive_read_support_filter_by_code.c"
|
||||||
|
"${LIBRARY_DIR}/libarchive/archive_read_support_filter_bzip2.c"
|
||||||
|
"${LIBRARY_DIR}/libarchive/archive_read_support_filter_compress.c"
|
||||||
|
"${LIBRARY_DIR}/libarchive/archive_read_support_filter_grzip.c"
|
||||||
|
"${LIBRARY_DIR}/libarchive/archive_read_support_filter_gzip.c"
|
||||||
|
"${LIBRARY_DIR}/libarchive/archive_read_support_filter_lrzip.c"
|
||||||
|
"${LIBRARY_DIR}/libarchive/archive_read_support_filter_lz4.c"
|
||||||
|
"${LIBRARY_DIR}/libarchive/archive_read_support_filter_lzop.c"
|
||||||
|
"${LIBRARY_DIR}/libarchive/archive_read_support_filter_none.c"
|
||||||
|
"${LIBRARY_DIR}/libarchive/archive_read_support_filter_program.c"
|
||||||
|
"${LIBRARY_DIR}/libarchive/archive_read_support_filter_rpm.c"
|
||||||
|
"${LIBRARY_DIR}/libarchive/archive_read_support_filter_uu.c"
|
||||||
|
"${LIBRARY_DIR}/libarchive/archive_read_support_filter_xz.c"
|
||||||
|
"${LIBRARY_DIR}/libarchive/archive_read_support_filter_zstd.c"
|
||||||
|
"${LIBRARY_DIR}/libarchive/archive_read_support_format_7zip.c"
|
||||||
|
"${LIBRARY_DIR}/libarchive/archive_read_support_format_all.c"
|
||||||
|
"${LIBRARY_DIR}/libarchive/archive_read_support_format_ar.c"
|
||||||
|
"${LIBRARY_DIR}/libarchive/archive_read_support_format_by_code.c"
|
||||||
|
"${LIBRARY_DIR}/libarchive/archive_read_support_format_cab.c"
|
||||||
|
"${LIBRARY_DIR}/libarchive/archive_read_support_format_cpio.c"
|
||||||
|
"${LIBRARY_DIR}/libarchive/archive_read_support_format_empty.c"
|
||||||
|
"${LIBRARY_DIR}/libarchive/archive_read_support_format_iso9660.c"
|
||||||
|
"${LIBRARY_DIR}/libarchive/archive_read_support_format_lha.c"
|
||||||
|
"${LIBRARY_DIR}/libarchive/archive_read_support_format_mtree.c"
|
||||||
|
"${LIBRARY_DIR}/libarchive/archive_read_support_format_rar5.c"
|
||||||
|
"${LIBRARY_DIR}/libarchive/archive_read_support_format_rar.c"
|
||||||
|
"${LIBRARY_DIR}/libarchive/archive_read_support_format_raw.c"
|
||||||
|
"${LIBRARY_DIR}/libarchive/archive_read_support_format_tar.c"
|
||||||
|
"${LIBRARY_DIR}/libarchive/archive_read_support_format_warc.c"
|
||||||
|
"${LIBRARY_DIR}/libarchive/archive_read_support_format_xar.c"
|
||||||
|
"${LIBRARY_DIR}/libarchive/archive_read_support_format_zip.c"
|
||||||
|
"${LIBRARY_DIR}/libarchive/archive_string.c"
|
||||||
|
"${LIBRARY_DIR}/libarchive/archive_string_sprintf.c"
|
||||||
|
"${LIBRARY_DIR}/libarchive/archive_util.c"
|
||||||
|
"${LIBRARY_DIR}/libarchive/archive_version_details.c"
|
||||||
|
"${LIBRARY_DIR}/libarchive/archive_virtual.c"
|
||||||
|
"${LIBRARY_DIR}/libarchive/archive_windows.c"
|
||||||
|
"${LIBRARY_DIR}/libarchive/archive_write_add_filter_b64encode.c"
|
||||||
|
"${LIBRARY_DIR}/libarchive/archive_write_add_filter_by_name.c"
|
||||||
|
"${LIBRARY_DIR}/libarchive/archive_write_add_filter_bzip2.c"
|
||||||
|
"${LIBRARY_DIR}/libarchive/archive_write_add_filter.c"
|
||||||
|
"${LIBRARY_DIR}/libarchive/archive_write_add_filter_compress.c"
|
||||||
|
"${LIBRARY_DIR}/libarchive/archive_write_add_filter_grzip.c"
|
||||||
|
"${LIBRARY_DIR}/libarchive/archive_write_add_filter_gzip.c"
|
||||||
|
"${LIBRARY_DIR}/libarchive/archive_write_add_filter_lrzip.c"
|
||||||
|
"${LIBRARY_DIR}/libarchive/archive_write_add_filter_lz4.c"
|
||||||
|
"${LIBRARY_DIR}/libarchive/archive_write_add_filter_lzop.c"
|
||||||
|
"${LIBRARY_DIR}/libarchive/archive_write_add_filter_none.c"
|
||||||
|
"${LIBRARY_DIR}/libarchive/archive_write_add_filter_program.c"
|
||||||
|
"${LIBRARY_DIR}/libarchive/archive_write_add_filter_uuencode.c"
|
||||||
|
"${LIBRARY_DIR}/libarchive/archive_write_add_filter_xz.c"
|
||||||
|
"${LIBRARY_DIR}/libarchive/archive_write_add_filter_zstd.c"
|
||||||
|
"${LIBRARY_DIR}/libarchive/archive_write.c"
|
||||||
|
"${LIBRARY_DIR}/libarchive/archive_write_disk_posix.c"
|
||||||
|
"${LIBRARY_DIR}/libarchive/archive_write_disk_set_standard_lookup.c"
|
||||||
|
"${LIBRARY_DIR}/libarchive/archive_write_disk_windows.c"
|
||||||
|
"${LIBRARY_DIR}/libarchive/archive_write_open_fd.c"
|
||||||
|
"${LIBRARY_DIR}/libarchive/archive_write_open_file.c"
|
||||||
|
"${LIBRARY_DIR}/libarchive/archive_write_open_filename.c"
|
||||||
|
"${LIBRARY_DIR}/libarchive/archive_write_open_memory.c"
|
||||||
|
"${LIBRARY_DIR}/libarchive/archive_write_set_format_7zip.c"
|
||||||
|
"${LIBRARY_DIR}/libarchive/archive_write_set_format_ar.c"
|
||||||
|
"${LIBRARY_DIR}/libarchive/archive_write_set_format_by_name.c"
|
||||||
|
"${LIBRARY_DIR}/libarchive/archive_write_set_format.c"
|
||||||
|
"${LIBRARY_DIR}/libarchive/archive_write_set_format_cpio_binary.c"
|
||||||
|
"${LIBRARY_DIR}/libarchive/archive_write_set_format_cpio.c"
|
||||||
|
"${LIBRARY_DIR}/libarchive/archive_write_set_format_cpio_newc.c"
|
||||||
|
"${LIBRARY_DIR}/libarchive/archive_write_set_format_cpio_odc.c"
|
||||||
|
"${LIBRARY_DIR}/libarchive/archive_write_set_format_filter_by_ext.c"
|
||||||
|
"${LIBRARY_DIR}/libarchive/archive_write_set_format_gnutar.c"
|
||||||
|
"${LIBRARY_DIR}/libarchive/archive_write_set_format_iso9660.c"
|
||||||
|
"${LIBRARY_DIR}/libarchive/archive_write_set_format_mtree.c"
|
||||||
|
"${LIBRARY_DIR}/libarchive/archive_write_set_format_pax.c"
|
||||||
|
"${LIBRARY_DIR}/libarchive/archive_write_set_format_raw.c"
|
||||||
|
"${LIBRARY_DIR}/libarchive/archive_write_set_format_shar.c"
|
||||||
|
"${LIBRARY_DIR}/libarchive/archive_write_set_format_ustar.c"
|
||||||
|
"${LIBRARY_DIR}/libarchive/archive_write_set_format_v7tar.c"
|
||||||
|
"${LIBRARY_DIR}/libarchive/archive_write_set_format_warc.c"
|
||||||
|
"${LIBRARY_DIR}/libarchive/archive_write_set_format_xar.c"
|
||||||
|
"${LIBRARY_DIR}/libarchive/archive_write_set_format_zip.c"
|
||||||
|
"${LIBRARY_DIR}/libarchive/archive_write_set_options.c"
|
||||||
|
"${LIBRARY_DIR}/libarchive/archive_write_set_passphrase.c"
|
||||||
|
"${LIBRARY_DIR}/libarchive/filter_fork_posix.c"
|
||||||
|
"${LIBRARY_DIR}/libarchive/filter_fork_windows.c"
|
||||||
|
"${LIBRARY_DIR}/libarchive/xxhash.c"
|
||||||
|
)
|
||||||
|
|
||||||
|
add_library(_libarchive ${SRCS})
|
||||||
|
target_include_directories(_libarchive PUBLIC
|
||||||
|
${CMAKE_CURRENT_SOURCE_DIR}
|
||||||
|
"${LIBRARY_DIR}/libarchive"
|
||||||
|
)
|
||||||
|
|
||||||
|
target_compile_definitions(_libarchive PUBLIC
|
||||||
|
HAVE_CONFIG_H
|
||||||
|
)
|
||||||
|
|
||||||
|
target_compile_options(_libarchive PRIVATE "-Wno-reserved-macro-identifier")
|
||||||
|
|
||||||
|
if (TARGET ch_contrib::xz)
|
||||||
|
target_compile_definitions(_libarchive PUBLIC HAVE_LZMA_H=1 HAVE_LIBLZMA=1)
|
||||||
|
target_link_libraries(_libarchive PRIVATE ch_contrib::xz)
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if (TARGET ch_contrib::zlib)
|
||||||
|
target_compile_definitions(_libarchive PUBLIC HAVE_ZLIB_H=1)
|
||||||
|
target_link_libraries(_libarchive PRIVATE ch_contrib::zlib)
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if (TARGET ch_contrib::zstd)
|
||||||
|
target_compile_definitions(_libarchive PUBLIC HAVE_ZSTD_H=1 HAVE_LIBZSTD=1)
|
||||||
|
target_link_libraries(_libarchive PRIVATE ch_contrib::zstd)
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if (TARGET ch_contrib::bzip2)
|
||||||
|
target_compile_definitions(_libarchive PUBLIC HAVE_BZLIB_H=1)
|
||||||
|
target_link_libraries(_libarchive PRIVATE ch_contrib::bzip2)
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if (OS_LINUX)
|
||||||
|
target_compile_definitions(
|
||||||
|
_libarchive PUBLIC
|
||||||
|
MAJOR_IN_SYSMACROS=1
|
||||||
|
HAVE_LINUX_FS_H=1
|
||||||
|
HAVE_STRUCT_STAT_ST_MTIM_TV_NSEC=1
|
||||||
|
HAVE_LINUX_TYPES_H=1
|
||||||
|
HAVE_SYS_STATFS_H=1
|
||||||
|
HAVE_FUTIMESAT=1
|
||||||
|
HAVE_ICONV=1
|
||||||
|
)
|
||||||
|
endif()
|
||||||
|
|
||||||
|
add_library(ch_contrib::libarchive ALIAS _libarchive)
|
1391
contrib/libarchive-cmake/config.h
Normal file
1391
contrib/libarchive-cmake/config.h
Normal file
File diff suppressed because it is too large
Load Diff
@ -17,7 +17,8 @@
|
|||||||
#ifndef METROHASH_PLATFORM_H
|
#ifndef METROHASH_PLATFORM_H
|
||||||
#define METROHASH_PLATFORM_H
|
#define METROHASH_PLATFORM_H
|
||||||
|
|
||||||
#include <stdint.h>
|
#include <bit>
|
||||||
|
#include <cstdint>
|
||||||
#include <cstring>
|
#include <cstring>
|
||||||
|
|
||||||
// rotate right idiom recognized by most compilers
|
// rotate right idiom recognized by most compilers
|
||||||
@ -33,6 +34,11 @@ inline static uint64_t read_u64(const void * const ptr)
|
|||||||
// so we use memcpy() which is the most portable. clang & gcc usually translates `memcpy()` into a single `load` instruction
|
// so we use memcpy() which is the most portable. clang & gcc usually translates `memcpy()` into a single `load` instruction
|
||||||
// when hardware supports it, so using memcpy() is efficient too.
|
// when hardware supports it, so using memcpy() is efficient too.
|
||||||
memcpy(&result, ptr, sizeof(result));
|
memcpy(&result, ptr, sizeof(result));
|
||||||
|
|
||||||
|
#if __BYTE_ORDER__ == __ORDER_BIG_ENDIAN__
|
||||||
|
result = std::byteswap(result);
|
||||||
|
#endif
|
||||||
|
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -40,6 +46,11 @@ inline static uint64_t read_u32(const void * const ptr)
|
|||||||
{
|
{
|
||||||
uint32_t result;
|
uint32_t result;
|
||||||
memcpy(&result, ptr, sizeof(result));
|
memcpy(&result, ptr, sizeof(result));
|
||||||
|
|
||||||
|
#if __BYTE_ORDER__ == __ORDER_BIG_ENDIAN__
|
||||||
|
result = std::byteswap(result);
|
||||||
|
#endif
|
||||||
|
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -47,6 +58,11 @@ inline static uint64_t read_u16(const void * const ptr)
|
|||||||
{
|
{
|
||||||
uint16_t result;
|
uint16_t result;
|
||||||
memcpy(&result, ptr, sizeof(result));
|
memcpy(&result, ptr, sizeof(result));
|
||||||
|
|
||||||
|
#if __BYTE_ORDER__ == __ORDER_BIG_ENDIAN__
|
||||||
|
result = std::byteswap(result);
|
||||||
|
#endif
|
||||||
|
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
2
contrib/llvm-project
vendored
2
contrib/llvm-project
vendored
@ -1 +1 @@
|
|||||||
Subproject commit d857c707fccd50423bea1c4710dc469cf89607a9
|
Subproject commit e7b8befca85c8b847614432dba250c22d35fbae0
|
@ -1,18 +1,16 @@
|
|||||||
if (APPLE OR NOT ARCH_AMD64 OR SANITIZE STREQUAL "undefined")
|
if (APPLE OR SANITIZE STREQUAL "undefined")
|
||||||
set (ENABLE_EMBEDDED_COMPILER_DEFAULT OFF)
|
set (ENABLE_EMBEDDED_COMPILER_DEFAULT OFF)
|
||||||
else()
|
else()
|
||||||
set (ENABLE_EMBEDDED_COMPILER_DEFAULT ON)
|
set (ENABLE_EMBEDDED_COMPILER_DEFAULT ON)
|
||||||
endif()
|
endif()
|
||||||
|
|
||||||
option (ENABLE_EMBEDDED_COMPILER "Enable support for 'compile_expressions' option for query execution" ${ENABLE_EMBEDDED_COMPILER_DEFAULT})
|
option (ENABLE_EMBEDDED_COMPILER "Enable support for JIT compilation during query execution" ${ENABLE_EMBEDDED_COMPILER_DEFAULT})
|
||||||
|
|
||||||
if (NOT ENABLE_EMBEDDED_COMPILER)
|
if (NOT ENABLE_EMBEDDED_COMPILER)
|
||||||
message(STATUS "Not using LLVM")
|
message(STATUS "Not using LLVM")
|
||||||
return()
|
return()
|
||||||
endif()
|
endif()
|
||||||
|
|
||||||
# TODO: Enable compilation on AArch64
|
|
||||||
|
|
||||||
set (LLVM_VERSION "15.0.0bundled")
|
set (LLVM_VERSION "15.0.0bundled")
|
||||||
set (LLVM_INCLUDE_DIRS
|
set (LLVM_INCLUDE_DIRS
|
||||||
"${ClickHouse_SOURCE_DIR}/contrib/llvm-project/llvm/include"
|
"${ClickHouse_SOURCE_DIR}/contrib/llvm-project/llvm/include"
|
||||||
@ -58,18 +56,30 @@ set (REQUIRED_LLVM_LIBRARIES
|
|||||||
LLVMDemangle
|
LLVMDemangle
|
||||||
)
|
)
|
||||||
|
|
||||||
# if (ARCH_AMD64)
|
if (ARCH_AMD64)
|
||||||
|
set (LLVM_TARGETS_TO_BUILD "X86" CACHE INTERNAL "")
|
||||||
list(APPEND REQUIRED_LLVM_LIBRARIES LLVMX86Info LLVMX86Desc LLVMX86CodeGen)
|
list(APPEND REQUIRED_LLVM_LIBRARIES LLVMX86Info LLVMX86Desc LLVMX86CodeGen)
|
||||||
# elseif (ARCH_AARCH64)
|
elseif (ARCH_AARCH64)
|
||||||
# list(APPEND REQUIRED_LLVM_LIBRARIES LLVMAArch64Info LLVMAArch64Desc LLVMAArch64CodeGen)
|
set (LLVM_TARGETS_TO_BUILD "AArch64" CACHE INTERNAL "")
|
||||||
# endif ()
|
list(APPEND REQUIRED_LLVM_LIBRARIES LLVMAArch64Info LLVMAArch64Desc LLVMAArch64CodeGen)
|
||||||
|
elseif (ARCH_PPC64LE)
|
||||||
|
set (LLVM_TARGETS_TO_BUILD "PowerPC" CACHE INTERNAL "")
|
||||||
|
list(APPEND REQUIRED_LLVM_LIBRARIES LLVMPowerPCInfo LLVMPowerPCDesc LLVMPowerPCCodeGen)
|
||||||
|
elseif (ARCH_S390X)
|
||||||
|
set (LLVM_TARGETS_TO_BUILD "SystemZ" CACHE INTERNAL "")
|
||||||
|
list(APPEND REQUIRED_LLVM_LIBRARIES LLVMSystemZInfo LLVMSystemZDesc LLVMSystemZCodeGen)
|
||||||
|
elseif (ARCH_RISCV64)
|
||||||
|
set (LLVM_TARGETS_TO_BUILD "RISCV" CACHE INTERNAL "")
|
||||||
|
list(APPEND REQUIRED_LLVM_LIBRARIES LLVMRISCVInfo LLVMRISCVDesc LLVMRISCVCodeGen)
|
||||||
|
endif ()
|
||||||
|
|
||||||
|
message (STATUS "LLVM TARGETS TO BUILD ${LLVM_TARGETS_TO_BUILD}")
|
||||||
|
|
||||||
set (CMAKE_INSTALL_RPATH "ON") # Do not adjust RPATH in llvm, since then it will not be able to find libcxx/libcxxabi/libunwind
|
set (CMAKE_INSTALL_RPATH "ON") # Do not adjust RPATH in llvm, since then it will not be able to find libcxx/libcxxabi/libunwind
|
||||||
set (LLVM_COMPILER_CHECKED 1 CACHE INTERNAL "") # Skip internal compiler selection
|
set (LLVM_COMPILER_CHECKED 1 CACHE INTERNAL "") # Skip internal compiler selection
|
||||||
set (LLVM_ENABLE_EH 1 CACHE INTERNAL "") # With exception handling
|
set (LLVM_ENABLE_EH 1 CACHE INTERNAL "") # With exception handling
|
||||||
set (LLVM_ENABLE_RTTI 1 CACHE INTERNAL "")
|
set (LLVM_ENABLE_RTTI 1 CACHE INTERNAL "")
|
||||||
set (LLVM_ENABLE_PIC 0 CACHE INTERNAL "")
|
set (LLVM_ENABLE_PIC 0 CACHE INTERNAL "")
|
||||||
set (LLVM_TARGETS_TO_BUILD "X86" CACHE STRING "") # for x86 + ARM: "X86;AArch64"
|
|
||||||
|
|
||||||
# Omit unnecessary stuff (just the options which are ON by default)
|
# Omit unnecessary stuff (just the options which are ON by default)
|
||||||
set(LLVM_ENABLE_BACKTRACES 0 CACHE INTERNAL "")
|
set(LLVM_ENABLE_BACKTRACES 0 CACHE INTERNAL "")
|
||||||
@ -99,15 +109,12 @@ set(LLVM_ENABLE_BINDINGS 0 CACHE INTERNAL "")
|
|||||||
set (LLVM_SOURCE_DIR "${ClickHouse_SOURCE_DIR}/contrib/llvm-project/llvm")
|
set (LLVM_SOURCE_DIR "${ClickHouse_SOURCE_DIR}/contrib/llvm-project/llvm")
|
||||||
set (LLVM_BINARY_DIR "${ClickHouse_BINARY_DIR}/contrib/llvm-project/llvm")
|
set (LLVM_BINARY_DIR "${ClickHouse_BINARY_DIR}/contrib/llvm-project/llvm")
|
||||||
|
|
||||||
# Since we always use toolchain files to generate hermatic builds, cmake will
|
message (STATUS "LLVM CMAKE CROSS COMPILING ${CMAKE_CROSSCOMPILING}")
|
||||||
# think it's a cross compilation, and LLVM will try to configure NATIVE LLVM
|
if (CMAKE_CROSSCOMPILING)
|
||||||
# targets with all tests enabled, which will slow down cmake configuration and
|
set (LLVM_HOST_TRIPLE "${CMAKE_C_COMPILER_TARGET}" CACHE INTERNAL "")
|
||||||
# compilation (You'll see Building native llvm-tblgen...). Let's disable the
|
message (STATUS "CROSS COMPILING SET LLVM HOST TRIPLE ${LLVM_HOST_TRIPLE}")
|
||||||
# cross compiling indicator for now.
|
endif()
|
||||||
#
|
|
||||||
# TODO We should let cmake know whether it's indeed a cross compilation in the
|
|
||||||
# first place.
|
|
||||||
set (CMAKE_CROSSCOMPILING 0)
|
|
||||||
add_subdirectory ("${LLVM_SOURCE_DIR}" "${LLVM_BINARY_DIR}")
|
add_subdirectory ("${LLVM_SOURCE_DIR}" "${LLVM_BINARY_DIR}")
|
||||||
|
|
||||||
set_directory_properties (PROPERTIES
|
set_directory_properties (PROPERTIES
|
||||||
|
2
contrib/orc
vendored
2
contrib/orc
vendored
@ -1 +1 @@
|
|||||||
Subproject commit 568d1d60c250af1890f226c182bc15bd8cc94cf1
|
Subproject commit a20d1d9d7ad4a4be7b7ba97588e16ca8b9abb2b6
|
1
contrib/robin-map
vendored
Submodule
1
contrib/robin-map
vendored
Submodule
@ -0,0 +1 @@
|
|||||||
|
Subproject commit 851a59e0e3063ee0e23089062090a73fd3de482d
|
1
contrib/robin-map-cmake/CMakeLists.txt
Normal file
1
contrib/robin-map-cmake/CMakeLists.txt
Normal file
@ -0,0 +1 @@
|
|||||||
|
# See contrib/usearch-cmake/CMakeLists.txt
|
2
contrib/snappy
vendored
2
contrib/snappy
vendored
@ -1 +1 @@
|
|||||||
Subproject commit fb057edfed820212076239fd32cb2ff23e9016bf
|
Subproject commit 6ebb5b1ab8801ea3fde103c5c29f5ab86df5fe7a
|
1
contrib/usearch
vendored
Submodule
1
contrib/usearch
vendored
Submodule
@ -0,0 +1 @@
|
|||||||
|
Subproject commit 387b78b28b17b8954024ffc81e97cbcfa10d1f30
|
17
contrib/usearch-cmake/CMakeLists.txt
Normal file
17
contrib/usearch-cmake/CMakeLists.txt
Normal file
@ -0,0 +1,17 @@
|
|||||||
|
set(USEARCH_PROJECT_DIR "${ClickHouse_SOURCE_DIR}/contrib/usearch")
|
||||||
|
set(USEARCH_SOURCE_DIR "${USEARCH_PROJECT_DIR}/include")
|
||||||
|
|
||||||
|
set(FP16_PROJECT_DIR "${ClickHouse_SOURCE_DIR}/contrib/FP16")
|
||||||
|
set(ROBIN_MAP_PROJECT_DIR "${ClickHouse_SOURCE_DIR}/contrib/robin-map")
|
||||||
|
set(SIMSIMD_PROJECT_DIR "${ClickHouse_SOURCE_DIR}/contrib/SimSIMD-map")
|
||||||
|
|
||||||
|
add_library(_usearch INTERFACE)
|
||||||
|
|
||||||
|
target_include_directories(_usearch SYSTEM INTERFACE
|
||||||
|
${FP16_PROJECT_DIR}/include
|
||||||
|
${ROBIN_MAP_PROJECT_DIR}/include
|
||||||
|
${SIMSIMD_PROJECT_DIR}/include
|
||||||
|
${USEARCH_SOURCE_DIR})
|
||||||
|
|
||||||
|
add_library(ch_contrib::usearch ALIAS _usearch)
|
||||||
|
target_compile_definitions(_usearch INTERFACE ENABLE_USEARCH)
|
@ -1,5 +1,5 @@
|
|||||||
## ClickHouse Dockerfiles
|
## ClickHouse Dockerfiles
|
||||||
|
|
||||||
This directory contain Dockerfiles for `clickhouse-client` and `clickhouse-server`. They are updated in each release.
|
This directory contain Dockerfiles for `clickhouse-server`. They are updated in each release.
|
||||||
|
|
||||||
Also there is bunch of images for testing and CI. They are listed in `images.json` file and updated on each commit to master. If you need to add another image, place information about it into `images.json`.
|
Also, there is a bunch of images for testing and CI. They are listed in `images.json` file and updated on each commit to master. If you need to add another image, place information about it into `images.json`.
|
||||||
|
@ -1,34 +0,0 @@
|
|||||||
FROM ubuntu:18.04
|
|
||||||
|
|
||||||
# ARG for quick switch to a given ubuntu mirror
|
|
||||||
ARG apt_archive="http://archive.ubuntu.com"
|
|
||||||
RUN sed -i "s|http://archive.ubuntu.com|$apt_archive|g" /etc/apt/sources.list
|
|
||||||
|
|
||||||
ARG repository="deb https://repo.clickhouse.com/deb/stable/ main/"
|
|
||||||
ARG version=22.1.1.*
|
|
||||||
|
|
||||||
RUN apt-get update \
|
|
||||||
&& apt-get install --yes --no-install-recommends \
|
|
||||||
apt-transport-https \
|
|
||||||
ca-certificates \
|
|
||||||
dirmngr \
|
|
||||||
gnupg \
|
|
||||||
&& mkdir -p /etc/apt/sources.list.d \
|
|
||||||
&& apt-key adv --keyserver keyserver.ubuntu.com --recv E0C56BD4 \
|
|
||||||
&& echo $repository > /etc/apt/sources.list.d/clickhouse.list \
|
|
||||||
&& apt-get update \
|
|
||||||
&& env DEBIAN_FRONTEND=noninteractive \
|
|
||||||
apt-get install --allow-unauthenticated --yes --no-install-recommends \
|
|
||||||
clickhouse-client=$version \
|
|
||||||
clickhouse-common-static=$version \
|
|
||||||
locales \
|
|
||||||
tzdata \
|
|
||||||
&& rm -rf /var/lib/apt/lists/* /var/cache/debconf \
|
|
||||||
&& apt-get clean
|
|
||||||
|
|
||||||
RUN locale-gen en_US.UTF-8
|
|
||||||
ENV LANG en_US.UTF-8
|
|
||||||
ENV LANGUAGE en_US:en
|
|
||||||
ENV LC_ALL en_US.UTF-8
|
|
||||||
|
|
||||||
ENTRYPOINT ["/usr/bin/clickhouse-client"]
|
|
@ -1,7 +0,0 @@
|
|||||||
# ClickHouse Client Docker Image
|
|
||||||
|
|
||||||
For more information see [ClickHouse Server Docker Image](https://hub.docker.com/r/clickhouse/clickhouse-server/).
|
|
||||||
|
|
||||||
## License
|
|
||||||
|
|
||||||
View [license information](https://github.com/ClickHouse/ClickHouse/blob/master/LICENSE) for the software contained in this image.
|
|
@ -125,6 +125,7 @@
|
|||||||
"docker/test/keeper-jepsen",
|
"docker/test/keeper-jepsen",
|
||||||
"docker/test/server-jepsen",
|
"docker/test/server-jepsen",
|
||||||
"docker/test/sqllogic",
|
"docker/test/sqllogic",
|
||||||
|
"docker/test/sqltest",
|
||||||
"docker/test/stateless"
|
"docker/test/stateless"
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
@ -155,11 +156,18 @@
|
|||||||
},
|
},
|
||||||
"docker/docs/builder": {
|
"docker/docs/builder": {
|
||||||
"name": "clickhouse/docs-builder",
|
"name": "clickhouse/docs-builder",
|
||||||
"dependent": [
|
"dependent": []
|
||||||
]
|
|
||||||
},
|
},
|
||||||
"docker/test/sqllogic": {
|
"docker/test/sqllogic": {
|
||||||
"name": "clickhouse/sqllogic-test",
|
"name": "clickhouse/sqllogic-test",
|
||||||
"dependent": []
|
"dependent": []
|
||||||
|
},
|
||||||
|
"docker/test/sqltest": {
|
||||||
|
"name": "clickhouse/sqltest",
|
||||||
|
"dependent": []
|
||||||
|
},
|
||||||
|
"docker/test/integration/nginx_dav": {
|
||||||
|
"name": "clickhouse/nginx-dav",
|
||||||
|
"dependent": []
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -32,7 +32,7 @@ RUN arch=${TARGETARCH:-amd64} \
|
|||||||
esac
|
esac
|
||||||
|
|
||||||
ARG REPOSITORY="https://s3.amazonaws.com/clickhouse-builds/22.4/31c367d3cd3aefd316778601ff6565119fe36682/package_release"
|
ARG REPOSITORY="https://s3.amazonaws.com/clickhouse-builds/22.4/31c367d3cd3aefd316778601ff6565119fe36682/package_release"
|
||||||
ARG VERSION="23.7.1.2470"
|
ARG VERSION="23.7.4.5"
|
||||||
ARG PACKAGES="clickhouse-keeper"
|
ARG PACKAGES="clickhouse-keeper"
|
||||||
|
|
||||||
# user/group precreated explicitly with fixed uid/gid on purpose.
|
# user/group precreated explicitly with fixed uid/gid on purpose.
|
||||||
|
@ -6,7 +6,7 @@ Usage:
|
|||||||
Build deb package with `clang-14` in `debug` mode:
|
Build deb package with `clang-14` in `debug` mode:
|
||||||
```
|
```
|
||||||
$ mkdir deb/test_output
|
$ mkdir deb/test_output
|
||||||
$ ./packager --output-dir deb/test_output/ --package-type deb --compiler=clang-14 --build-type=debug
|
$ ./packager --output-dir deb/test_output/ --package-type deb --compiler=clang-14 --debug-build
|
||||||
$ ls -l deb/test_output
|
$ ls -l deb/test_output
|
||||||
-rw-r--r-- 1 root root 3730 clickhouse-client_22.2.2+debug_all.deb
|
-rw-r--r-- 1 root root 3730 clickhouse-client_22.2.2+debug_all.deb
|
||||||
-rw-r--r-- 1 root root 84221888 clickhouse-common-static_22.2.2+debug_amd64.deb
|
-rw-r--r-- 1 root root 84221888 clickhouse-common-static_22.2.2+debug_amd64.deb
|
||||||
|
@ -58,33 +58,6 @@ RUN curl https://sh.rustup.rs -sSf | bash -s -- -y && \
|
|||||||
rustup target add aarch64-apple-darwin && \
|
rustup target add aarch64-apple-darwin && \
|
||||||
rustup target add powerpc64le-unknown-linux-gnu
|
rustup target add powerpc64le-unknown-linux-gnu
|
||||||
|
|
||||||
# Create vendor cache for cargo.
|
|
||||||
#
|
|
||||||
# Note, that the config.toml for the root is used, you will not be able to
|
|
||||||
# install any other crates, except those which had been vendored (since if
|
|
||||||
# there is "replace-with" for some source, then cargo will not look to other
|
|
||||||
# remotes except this).
|
|
||||||
#
|
|
||||||
# Notes for the command itself:
|
|
||||||
# - --chown is required to preserve the rights
|
|
||||||
# - unstable-options for -C
|
|
||||||
# - chmod is required to fix the permissions, since builds are running from a different user
|
|
||||||
# - copy of the Cargo.lock is required for proper dependencies versions
|
|
||||||
# - cargo vendor --sync is requried to overcome [1] bug.
|
|
||||||
#
|
|
||||||
# [1]: https://github.com/rust-lang/wg-cargo-std-aware/issues/23
|
|
||||||
COPY --chown=root:root /rust /rust/packages
|
|
||||||
RUN cargo -Z unstable-options -C /rust/packages vendor > $CARGO_HOME/config.toml && \
|
|
||||||
cp "$(rustc --print=sysroot)"/lib/rustlib/src/rust/Cargo.lock "$(rustc --print=sysroot)"/lib/rustlib/src/rust/library/test/ && \
|
|
||||||
cargo -Z unstable-options -C /rust/packages vendor --sync "$(rustc --print=sysroot)"/lib/rustlib/src/rust/library/test/Cargo.toml && \
|
|
||||||
rm "$(rustc --print=sysroot)"/lib/rustlib/src/rust/library/test/Cargo.lock && \
|
|
||||||
sed -i "s#\"vendor\"#\"/rust/vendor\"#" $CARGO_HOME/config.toml && \
|
|
||||||
cat $CARGO_HOME/config.toml && \
|
|
||||||
mv /rust/packages/vendor /rust/vendor && \
|
|
||||||
chmod -R o=r+X /rust/vendor && \
|
|
||||||
ls -R -l /rust/packages && \
|
|
||||||
rm -r /rust/packages
|
|
||||||
|
|
||||||
# NOTE: Seems like gcc-11 is too new for ubuntu20 repository
|
# NOTE: Seems like gcc-11 is too new for ubuntu20 repository
|
||||||
# A cross-linker for RISC-V 64 (we need it, because LLVM's LLD does not work):
|
# A cross-linker for RISC-V 64 (we need it, because LLVM's LLD does not work):
|
||||||
RUN add-apt-repository ppa:ubuntu-toolchain-r/test --yes \
|
RUN add-apt-repository ppa:ubuntu-toolchain-r/test --yes \
|
||||||
@ -107,6 +80,14 @@ RUN add-apt-repository ppa:ubuntu-toolchain-r/test --yes \
|
|||||||
# Download toolchain and SDK for Darwin
|
# Download toolchain and SDK for Darwin
|
||||||
RUN curl -sL -O https://github.com/phracker/MacOSX-SDKs/releases/download/11.3/MacOSX11.0.sdk.tar.xz
|
RUN curl -sL -O https://github.com/phracker/MacOSX-SDKs/releases/download/11.3/MacOSX11.0.sdk.tar.xz
|
||||||
|
|
||||||
|
# Download and install mold 2.0 for s390x build
|
||||||
|
RUN curl -Lo /tmp/mold.tar.gz "https://github.com/rui314/mold/releases/download/v2.0.0/mold-2.0.0-x86_64-linux.tar.gz" \
|
||||||
|
&& mkdir /tmp/mold \
|
||||||
|
&& tar -xzf /tmp/mold.tar.gz -C /tmp/mold \
|
||||||
|
&& cp -r /tmp/mold/mold*/* /usr \
|
||||||
|
&& rm -rf /tmp/mold \
|
||||||
|
&& rm /tmp/mold.tar.gz
|
||||||
|
|
||||||
# Architecture of the image when BuildKit/buildx is used
|
# Architecture of the image when BuildKit/buildx is used
|
||||||
ARG TARGETARCH
|
ARG TARGETARCH
|
||||||
ARG NFPM_VERSION=2.20.0
|
ARG NFPM_VERSION=2.20.0
|
||||||
|
@ -1 +0,0 @@
|
|||||||
../../../rust
|
|
@ -22,7 +22,7 @@ def check_image_exists_locally(image_name: str) -> bool:
|
|||||||
output = subprocess.check_output(
|
output = subprocess.check_output(
|
||||||
f"docker images -q {image_name} 2> /dev/null", shell=True
|
f"docker images -q {image_name} 2> /dev/null", shell=True
|
||||||
)
|
)
|
||||||
return output != ""
|
return output != b""
|
||||||
except subprocess.CalledProcessError:
|
except subprocess.CalledProcessError:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
@ -46,7 +46,7 @@ def build_image(image_name: str, filepath: Path) -> None:
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def pre_build(repo_path: Path, env_variables: List[str]):
|
def pre_build(repo_path: Path, env_variables: List[str]) -> None:
|
||||||
if "WITH_PERFORMANCE=1" in env_variables:
|
if "WITH_PERFORMANCE=1" in env_variables:
|
||||||
current_branch = subprocess.check_output(
|
current_branch = subprocess.check_output(
|
||||||
"git branch --show-current", shell=True, encoding="utf-8"
|
"git branch --show-current", shell=True, encoding="utf-8"
|
||||||
@ -80,9 +80,12 @@ def run_docker_image_with_env(
|
|||||||
output_dir: Path,
|
output_dir: Path,
|
||||||
env_variables: List[str],
|
env_variables: List[str],
|
||||||
ch_root: Path,
|
ch_root: Path,
|
||||||
|
cargo_cache_dir: Path,
|
||||||
ccache_dir: Optional[Path],
|
ccache_dir: Optional[Path],
|
||||||
):
|
) -> None:
|
||||||
output_dir.mkdir(parents=True, exist_ok=True)
|
output_dir.mkdir(parents=True, exist_ok=True)
|
||||||
|
cargo_cache_dir.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
env_part = " -e ".join(env_variables)
|
env_part = " -e ".join(env_variables)
|
||||||
if env_part:
|
if env_part:
|
||||||
env_part = " -e " + env_part
|
env_part = " -e " + env_part
|
||||||
@ -104,7 +107,7 @@ def run_docker_image_with_env(
|
|||||||
cmd = (
|
cmd = (
|
||||||
f"docker run --network=host --user={user} --rm {ccache_mount}"
|
f"docker run --network=host --user={user} --rm {ccache_mount}"
|
||||||
f"--volume={output_dir}:/output --volume={ch_root}:/build {env_part} "
|
f"--volume={output_dir}:/output --volume={ch_root}:/build {env_part} "
|
||||||
f"{interactive} {image_name}"
|
f"--volume={cargo_cache_dir}:/rust/cargo/registry {interactive} {image_name}"
|
||||||
)
|
)
|
||||||
|
|
||||||
logging.info("Will build ClickHouse pkg with cmd: '%s'", cmd)
|
logging.info("Will build ClickHouse pkg with cmd: '%s'", cmd)
|
||||||
@ -112,12 +115,12 @@ def run_docker_image_with_env(
|
|||||||
subprocess.check_call(cmd, shell=True)
|
subprocess.check_call(cmd, shell=True)
|
||||||
|
|
||||||
|
|
||||||
def is_release_build(build_type: str, package_type: str, sanitizer: str) -> bool:
|
def is_release_build(debug_build: bool, package_type: str, sanitizer: str) -> bool:
|
||||||
return build_type == "" and package_type == "deb" and sanitizer == ""
|
return not debug_build and package_type == "deb" and sanitizer == ""
|
||||||
|
|
||||||
|
|
||||||
def parse_env_variables(
|
def parse_env_variables(
|
||||||
build_type: str,
|
debug_build: bool,
|
||||||
compiler: str,
|
compiler: str,
|
||||||
sanitizer: str,
|
sanitizer: str,
|
||||||
package_type: str,
|
package_type: str,
|
||||||
@ -129,9 +132,10 @@ def parse_env_variables(
|
|||||||
version: str,
|
version: str,
|
||||||
official: bool,
|
official: bool,
|
||||||
additional_pkgs: bool,
|
additional_pkgs: bool,
|
||||||
|
with_profiler: bool,
|
||||||
with_coverage: bool,
|
with_coverage: bool,
|
||||||
with_binaries: str,
|
with_binaries: str,
|
||||||
):
|
) -> List[str]:
|
||||||
DARWIN_SUFFIX = "-darwin"
|
DARWIN_SUFFIX = "-darwin"
|
||||||
DARWIN_ARM_SUFFIX = "-darwin-aarch64"
|
DARWIN_ARM_SUFFIX = "-darwin-aarch64"
|
||||||
ARM_SUFFIX = "-aarch64"
|
ARM_SUFFIX = "-aarch64"
|
||||||
@ -139,6 +143,7 @@ def parse_env_variables(
|
|||||||
FREEBSD_SUFFIX = "-freebsd"
|
FREEBSD_SUFFIX = "-freebsd"
|
||||||
PPC_SUFFIX = "-ppc64le"
|
PPC_SUFFIX = "-ppc64le"
|
||||||
RISCV_SUFFIX = "-riscv64"
|
RISCV_SUFFIX = "-riscv64"
|
||||||
|
S390X_SUFFIX = "-s390x"
|
||||||
AMD64_COMPAT_SUFFIX = "-amd64-compat"
|
AMD64_COMPAT_SUFFIX = "-amd64-compat"
|
||||||
|
|
||||||
result = []
|
result = []
|
||||||
@ -152,6 +157,7 @@ def parse_env_variables(
|
|||||||
is_cross_arm_v80compat = compiler.endswith(ARM_V80COMPAT_SUFFIX)
|
is_cross_arm_v80compat = compiler.endswith(ARM_V80COMPAT_SUFFIX)
|
||||||
is_cross_ppc = compiler.endswith(PPC_SUFFIX)
|
is_cross_ppc = compiler.endswith(PPC_SUFFIX)
|
||||||
is_cross_riscv = compiler.endswith(RISCV_SUFFIX)
|
is_cross_riscv = compiler.endswith(RISCV_SUFFIX)
|
||||||
|
is_cross_s390x = compiler.endswith(S390X_SUFFIX)
|
||||||
is_cross_freebsd = compiler.endswith(FREEBSD_SUFFIX)
|
is_cross_freebsd = compiler.endswith(FREEBSD_SUFFIX)
|
||||||
is_amd64_compat = compiler.endswith(AMD64_COMPAT_SUFFIX)
|
is_amd64_compat = compiler.endswith(AMD64_COMPAT_SUFFIX)
|
||||||
|
|
||||||
@ -213,6 +219,11 @@ def parse_env_variables(
|
|||||||
cmake_flags.append(
|
cmake_flags.append(
|
||||||
"-DCMAKE_TOOLCHAIN_FILE=/build/cmake/linux/toolchain-riscv64.cmake"
|
"-DCMAKE_TOOLCHAIN_FILE=/build/cmake/linux/toolchain-riscv64.cmake"
|
||||||
)
|
)
|
||||||
|
elif is_cross_s390x:
|
||||||
|
cc = compiler[: -len(S390X_SUFFIX)]
|
||||||
|
cmake_flags.append(
|
||||||
|
"-DCMAKE_TOOLCHAIN_FILE=/build/cmake/linux/toolchain-s390x.cmake"
|
||||||
|
)
|
||||||
elif is_amd64_compat:
|
elif is_amd64_compat:
|
||||||
cc = compiler[: -len(AMD64_COMPAT_SUFFIX)]
|
cc = compiler[: -len(AMD64_COMPAT_SUFFIX)]
|
||||||
result.append("DEB_ARCH=amd64")
|
result.append("DEB_ARCH=amd64")
|
||||||
@ -240,7 +251,7 @@ def parse_env_variables(
|
|||||||
build_target = (
|
build_target = (
|
||||||
f"{build_target} clickhouse-odbc-bridge clickhouse-library-bridge"
|
f"{build_target} clickhouse-odbc-bridge clickhouse-library-bridge"
|
||||||
)
|
)
|
||||||
if is_release_build(build_type, package_type, sanitizer):
|
if is_release_build(debug_build, package_type, sanitizer):
|
||||||
cmake_flags.append("-DSPLIT_DEBUG_SYMBOLS=ON")
|
cmake_flags.append("-DSPLIT_DEBUG_SYMBOLS=ON")
|
||||||
result.append("WITH_PERFORMANCE=1")
|
result.append("WITH_PERFORMANCE=1")
|
||||||
if is_cross_arm:
|
if is_cross_arm:
|
||||||
@ -255,8 +266,8 @@ def parse_env_variables(
|
|||||||
|
|
||||||
if sanitizer:
|
if sanitizer:
|
||||||
result.append(f"SANITIZER={sanitizer}")
|
result.append(f"SANITIZER={sanitizer}")
|
||||||
if build_type:
|
if debug_build:
|
||||||
result.append(f"BUILD_TYPE={build_type.capitalize()}")
|
result.append("BUILD_TYPE=Debug")
|
||||||
else:
|
else:
|
||||||
result.append("BUILD_TYPE=None")
|
result.append("BUILD_TYPE=None")
|
||||||
|
|
||||||
@ -322,6 +333,9 @@ def parse_env_variables(
|
|||||||
# utils are not included into clickhouse-bundle, so build everything
|
# utils are not included into clickhouse-bundle, so build everything
|
||||||
build_target = "all"
|
build_target = "all"
|
||||||
|
|
||||||
|
if with_profiler:
|
||||||
|
cmake_flags.append("-DENABLE_BUILD_PROFILING=1")
|
||||||
|
|
||||||
if with_coverage:
|
if with_coverage:
|
||||||
cmake_flags.append("-DWITH_COVERAGE=1")
|
cmake_flags.append("-DWITH_COVERAGE=1")
|
||||||
|
|
||||||
@ -361,7 +375,7 @@ def parse_args() -> argparse.Namespace:
|
|||||||
help="ClickHouse git repository",
|
help="ClickHouse git repository",
|
||||||
)
|
)
|
||||||
parser.add_argument("--output-dir", type=dir_name, required=True)
|
parser.add_argument("--output-dir", type=dir_name, required=True)
|
||||||
parser.add_argument("--build-type", choices=("debug", ""), default="")
|
parser.add_argument("--debug-build", action="store_true")
|
||||||
|
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--compiler",
|
"--compiler",
|
||||||
@ -373,6 +387,7 @@ def parse_args() -> argparse.Namespace:
|
|||||||
"clang-16-aarch64-v80compat",
|
"clang-16-aarch64-v80compat",
|
||||||
"clang-16-ppc64le",
|
"clang-16-ppc64le",
|
||||||
"clang-16-riscv64",
|
"clang-16-riscv64",
|
||||||
|
"clang-16-s390x",
|
||||||
"clang-16-amd64-compat",
|
"clang-16-amd64-compat",
|
||||||
"clang-16-freebsd",
|
"clang-16-freebsd",
|
||||||
),
|
),
|
||||||
@ -412,10 +427,18 @@ def parse_args() -> argparse.Namespace:
|
|||||||
action="store_true",
|
action="store_true",
|
||||||
help="if set, the build fails on errors writing cache to S3",
|
help="if set, the build fails on errors writing cache to S3",
|
||||||
)
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--cargo-cache-dir",
|
||||||
|
default=Path(os.getenv("CARGO_HOME", "") or Path.home() / ".cargo")
|
||||||
|
/ "registry",
|
||||||
|
type=dir_name,
|
||||||
|
help="a directory to preserve the rust cargo crates",
|
||||||
|
)
|
||||||
parser.add_argument("--force-build-image", action="store_true")
|
parser.add_argument("--force-build-image", action="store_true")
|
||||||
parser.add_argument("--version")
|
parser.add_argument("--version")
|
||||||
parser.add_argument("--official", action="store_true")
|
parser.add_argument("--official", action="store_true")
|
||||||
parser.add_argument("--additional-pkgs", action="store_true")
|
parser.add_argument("--additional-pkgs", action="store_true")
|
||||||
|
parser.add_argument("--with-profiler", action="store_true")
|
||||||
parser.add_argument("--with-coverage", action="store_true")
|
parser.add_argument("--with-coverage", action="store_true")
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--with-binaries", choices=("programs", "tests", ""), default=""
|
"--with-binaries", choices=("programs", "tests", ""), default=""
|
||||||
@ -451,7 +474,7 @@ def parse_args() -> argparse.Namespace:
|
|||||||
return args
|
return args
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main() -> None:
|
||||||
logging.basicConfig(level=logging.INFO, format="%(asctime)s %(message)s")
|
logging.basicConfig(level=logging.INFO, format="%(asctime)s %(message)s")
|
||||||
args = parse_args()
|
args = parse_args()
|
||||||
|
|
||||||
@ -467,7 +490,7 @@ def main():
|
|||||||
build_image(image_with_version, dockerfile)
|
build_image(image_with_version, dockerfile)
|
||||||
|
|
||||||
env_prepared = parse_env_variables(
|
env_prepared = parse_env_variables(
|
||||||
args.build_type,
|
args.debug_build,
|
||||||
args.compiler,
|
args.compiler,
|
||||||
args.sanitizer,
|
args.sanitizer,
|
||||||
args.package_type,
|
args.package_type,
|
||||||
@ -479,6 +502,7 @@ def main():
|
|||||||
args.version,
|
args.version,
|
||||||
args.official,
|
args.official,
|
||||||
args.additional_pkgs,
|
args.additional_pkgs,
|
||||||
|
args.with_profiler,
|
||||||
args.with_coverage,
|
args.with_coverage,
|
||||||
args.with_binaries,
|
args.with_binaries,
|
||||||
)
|
)
|
||||||
@ -490,6 +514,7 @@ def main():
|
|||||||
args.output_dir,
|
args.output_dir,
|
||||||
env_prepared,
|
env_prepared,
|
||||||
ch_root,
|
ch_root,
|
||||||
|
args.cargo_cache_dir,
|
||||||
args.ccache_dir,
|
args.ccache_dir,
|
||||||
)
|
)
|
||||||
logging.info("Output placed into %s", args.output_dir)
|
logging.info("Output placed into %s", args.output_dir)
|
||||||
|
@ -33,7 +33,7 @@ RUN arch=${TARGETARCH:-amd64} \
|
|||||||
# lts / testing / prestable / etc
|
# lts / testing / prestable / etc
|
||||||
ARG REPO_CHANNEL="stable"
|
ARG REPO_CHANNEL="stable"
|
||||||
ARG REPOSITORY="https://packages.clickhouse.com/tgz/${REPO_CHANNEL}"
|
ARG REPOSITORY="https://packages.clickhouse.com/tgz/${REPO_CHANNEL}"
|
||||||
ARG VERSION="23.7.1.2470"
|
ARG VERSION="23.7.4.5"
|
||||||
ARG PACKAGES="clickhouse-client clickhouse-server clickhouse-common-static"
|
ARG PACKAGES="clickhouse-client clickhouse-server clickhouse-common-static"
|
||||||
|
|
||||||
# user/group precreated explicitly with fixed uid/gid on purpose.
|
# user/group precreated explicitly with fixed uid/gid on purpose.
|
||||||
|
@ -23,7 +23,7 @@ RUN sed -i "s|http://archive.ubuntu.com|${apt_archive}|g" /etc/apt/sources.list
|
|||||||
|
|
||||||
ARG REPO_CHANNEL="stable"
|
ARG REPO_CHANNEL="stable"
|
||||||
ARG REPOSITORY="deb [signed-by=/usr/share/keyrings/clickhouse-keyring.gpg] https://packages.clickhouse.com/deb ${REPO_CHANNEL} main"
|
ARG REPOSITORY="deb [signed-by=/usr/share/keyrings/clickhouse-keyring.gpg] https://packages.clickhouse.com/deb ${REPO_CHANNEL} main"
|
||||||
ARG VERSION="23.7.1.2470"
|
ARG VERSION="23.7.4.5"
|
||||||
ARG PACKAGES="clickhouse-client clickhouse-server clickhouse-common-static"
|
ARG PACKAGES="clickhouse-client clickhouse-server clickhouse-common-static"
|
||||||
|
|
||||||
# set non-empty deb_location_url url to create a docker image
|
# set non-empty deb_location_url url to create a docker image
|
||||||
|
@ -19,20 +19,23 @@ RUN apt-get update \
|
|||||||
# and MEMORY_LIMIT_EXCEEDED exceptions in Functional tests (total memory limit in Functional tests is ~55.24 GiB).
|
# and MEMORY_LIMIT_EXCEEDED exceptions in Functional tests (total memory limit in Functional tests is ~55.24 GiB).
|
||||||
# TSAN will flush shadow memory when reaching this limit.
|
# TSAN will flush shadow memory when reaching this limit.
|
||||||
# It may cause false-negatives, but it's better than OOM.
|
# It may cause false-negatives, but it's better than OOM.
|
||||||
RUN echo "TSAN_OPTIONS='verbosity=1000 halt_on_error=1 history_size=7 memory_limit_mb=46080 second_deadlock_stack=1'" >> /etc/environment
|
RUN echo "TSAN_OPTIONS='verbosity=1000 halt_on_error=1 abort_on_error=1 history_size=7 memory_limit_mb=46080 second_deadlock_stack=1'" >> /etc/environment
|
||||||
RUN echo "UBSAN_OPTIONS='print_stacktrace=1'" >> /etc/environment
|
RUN echo "UBSAN_OPTIONS='print_stacktrace=1'" >> /etc/environment
|
||||||
RUN echo "MSAN_OPTIONS='abort_on_error=1 poison_in_dtor=1'" >> /etc/environment
|
RUN echo "MSAN_OPTIONS='abort_on_error=1 poison_in_dtor=1'" >> /etc/environment
|
||||||
RUN echo "LSAN_OPTIONS='suppressions=/usr/share/clickhouse-test/config/lsan_suppressions.txt'" >> /etc/environment
|
RUN echo "LSAN_OPTIONS='suppressions=/usr/share/clickhouse-test/config/lsan_suppressions.txt'" >> /etc/environment
|
||||||
# Sanitizer options for current shell (not current, but the one that will be spawned on "docker run")
|
# Sanitizer options for current shell (not current, but the one that will be spawned on "docker run")
|
||||||
# (but w/o verbosity for TSAN, otherwise test.reference will not match)
|
# (but w/o verbosity for TSAN, otherwise test.reference will not match)
|
||||||
ENV TSAN_OPTIONS='halt_on_error=1 history_size=7 memory_limit_mb=46080 second_deadlock_stack=1'
|
ENV TSAN_OPTIONS='halt_on_error=1 abort_on_error=1 history_size=7 memory_limit_mb=46080 second_deadlock_stack=1'
|
||||||
ENV UBSAN_OPTIONS='print_stacktrace=1'
|
ENV UBSAN_OPTIONS='print_stacktrace=1'
|
||||||
ENV MSAN_OPTIONS='abort_on_error=1 poison_in_dtor=1'
|
ENV MSAN_OPTIONS='abort_on_error=1 poison_in_dtor=1'
|
||||||
|
|
||||||
RUN echo "en_US.UTF-8 UTF-8" > /etc/locale.gen && locale-gen en_US.UTF-8
|
RUN echo "en_US.UTF-8 UTF-8" > /etc/locale.gen && locale-gen en_US.UTF-8
|
||||||
ENV LC_ALL en_US.UTF-8
|
ENV LC_ALL en_US.UTF-8
|
||||||
|
|
||||||
ENV TZ=Europe/Moscow
|
ENV TZ=Europe/Amsterdam
|
||||||
RUN ln -snf "/usr/share/zoneinfo/$TZ" /etc/localtime && echo "$TZ" > /etc/timezone
|
RUN ln -snf "/usr/share/zoneinfo/$TZ" /etc/localtime && echo "$TZ" > /etc/timezone
|
||||||
|
|
||||||
|
# This script is used to setup realtime export of server logs from the CI into external ClickHouse cluster:
|
||||||
|
COPY setup_export_logs.sh /
|
||||||
|
|
||||||
CMD sleep 1
|
CMD sleep 1
|
||||||
|
65
docker/test/base/setup_export_logs.sh
Executable file
65
docker/test/base/setup_export_logs.sh
Executable file
@ -0,0 +1,65 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
# This script sets up export of system log tables to a remote server.
|
||||||
|
# Remote tables are created if not exist, and augmented with extra columns,
|
||||||
|
# and their names will contain a hash of the table structure,
|
||||||
|
# which allows exporting tables from servers of different versions.
|
||||||
|
|
||||||
|
# Pre-configured destination cluster, where to export the data
|
||||||
|
CLUSTER=${CLUSTER:=system_logs_export}
|
||||||
|
|
||||||
|
EXTRA_COLUMNS=${EXTRA_COLUMNS:="pull_request_number UInt32, commit_sha String, check_start_time DateTime, check_name LowCardinality(String), instance_type LowCardinality(String), "}
|
||||||
|
EXTRA_COLUMNS_EXPRESSION=${EXTRA_COLUMNS_EXPRESSION:="0 AS pull_request_number, '' AS commit_sha, now() AS check_start_time, '' AS check_name, '' AS instance_type"}
|
||||||
|
EXTRA_ORDER_BY_COLUMNS=${EXTRA_ORDER_BY_COLUMNS:="check_name, "}
|
||||||
|
|
||||||
|
CONNECTION_PARAMETERS=${CONNECTION_PARAMETERS:=""}
|
||||||
|
|
||||||
|
# Create all configured system logs:
|
||||||
|
clickhouse-client --query "SYSTEM FLUSH LOGS"
|
||||||
|
|
||||||
|
# It's doesn't make sense to try creating tables if SYNC fails
|
||||||
|
echo "SYSTEM SYNC DATABASE REPLICA default" | clickhouse-client --receive_timeout 180 $CONNECTION_PARAMETERS || exit 0
|
||||||
|
|
||||||
|
# For each system log table:
|
||||||
|
clickhouse-client --query "SHOW TABLES FROM system LIKE '%\\_log'" | while read -r table
|
||||||
|
do
|
||||||
|
# Calculate hash of its structure:
|
||||||
|
hash=$(clickhouse-client --query "
|
||||||
|
SELECT sipHash64(groupArray((name, type)))
|
||||||
|
FROM (SELECT name, type FROM system.columns
|
||||||
|
WHERE database = 'system' AND table = '$table'
|
||||||
|
ORDER BY position)
|
||||||
|
")
|
||||||
|
|
||||||
|
# Create the destination table with adapted name and structure:
|
||||||
|
statement=$(clickhouse-client --format TSVRaw --query "SHOW CREATE TABLE system.${table}" | sed -r -e '
|
||||||
|
s/^\($/('"$EXTRA_COLUMNS"'/;
|
||||||
|
s/ORDER BY \(/ORDER BY ('"$EXTRA_ORDER_BY_COLUMNS"'/;
|
||||||
|
s/^CREATE TABLE system\.\w+_log$/CREATE TABLE IF NOT EXISTS '"$table"'_'"$hash"'/;
|
||||||
|
/^TTL /d
|
||||||
|
')
|
||||||
|
|
||||||
|
echo "Creating destination table ${table}_${hash}" >&2
|
||||||
|
|
||||||
|
echo "$statement" | clickhouse-client --distributed_ddl_task_timeout=10 $CONNECTION_PARAMETERS || continue
|
||||||
|
|
||||||
|
echo "Creating table system.${table}_sender" >&2
|
||||||
|
|
||||||
|
# Create Distributed table and materialized view to watch on the original table:
|
||||||
|
clickhouse-client --query "
|
||||||
|
CREATE TABLE system.${table}_sender
|
||||||
|
ENGINE = Distributed(${CLUSTER}, default, ${table}_${hash})
|
||||||
|
SETTINGS flush_on_detach=0
|
||||||
|
EMPTY AS
|
||||||
|
SELECT ${EXTRA_COLUMNS_EXPRESSION}, *
|
||||||
|
FROM system.${table}
|
||||||
|
"
|
||||||
|
|
||||||
|
echo "Creating materialized view system.${table}_watcher" >&2
|
||||||
|
|
||||||
|
clickhouse-client --query "
|
||||||
|
CREATE MATERIALIZED VIEW system.${table}_watcher TO system.${table}_sender AS
|
||||||
|
SELECT ${EXTRA_COLUMNS_EXPRESSION}, *
|
||||||
|
FROM system.${table}
|
||||||
|
"
|
||||||
|
done
|
@ -32,7 +32,7 @@ RUN mkdir -p /tmp/clickhouse-odbc-tmp \
|
|||||||
&& odbcinst -i -s -l -f /tmp/clickhouse-odbc-tmp/share/doc/clickhouse-odbc/config/odbc.ini.sample \
|
&& odbcinst -i -s -l -f /tmp/clickhouse-odbc-tmp/share/doc/clickhouse-odbc/config/odbc.ini.sample \
|
||||||
&& rm -rf /tmp/clickhouse-odbc-tmp
|
&& rm -rf /tmp/clickhouse-odbc-tmp
|
||||||
|
|
||||||
ENV TZ=Europe/Moscow
|
ENV TZ=Europe/Amsterdam
|
||||||
RUN ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && echo $TZ > /etc/timezone
|
RUN ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && echo $TZ > /etc/timezone
|
||||||
|
|
||||||
ENV COMMIT_SHA=''
|
ENV COMMIT_SHA=''
|
||||||
|
@ -148,6 +148,7 @@ function clone_submodules
|
|||||||
contrib/liburing
|
contrib/liburing
|
||||||
contrib/libfiu
|
contrib/libfiu
|
||||||
contrib/incbin
|
contrib/incbin
|
||||||
|
contrib/yaml-cpp
|
||||||
)
|
)
|
||||||
|
|
||||||
git submodule sync
|
git submodule sync
|
||||||
@ -170,6 +171,7 @@ function run_cmake
|
|||||||
"-DENABLE_SIMDJSON=1"
|
"-DENABLE_SIMDJSON=1"
|
||||||
"-DENABLE_JEMALLOC=1"
|
"-DENABLE_JEMALLOC=1"
|
||||||
"-DENABLE_LIBURING=1"
|
"-DENABLE_LIBURING=1"
|
||||||
|
"-DENABLE_YAML_CPP=1"
|
||||||
)
|
)
|
||||||
|
|
||||||
export CCACHE_DIR="$FASTTEST_WORKSPACE/ccache"
|
export CCACHE_DIR="$FASTTEST_WORKSPACE/ccache"
|
||||||
|
@ -8,7 +8,7 @@ ARG apt_archive="http://archive.ubuntu.com"
|
|||||||
RUN sed -i "s|http://archive.ubuntu.com|$apt_archive|g" /etc/apt/sources.list
|
RUN sed -i "s|http://archive.ubuntu.com|$apt_archive|g" /etc/apt/sources.list
|
||||||
|
|
||||||
ENV LANG=C.UTF-8
|
ENV LANG=C.UTF-8
|
||||||
ENV TZ=Europe/Moscow
|
ENV TZ=Europe/Amsterdam
|
||||||
RUN ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && echo $TZ > /etc/timezone
|
RUN ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && echo $TZ > /etc/timezone
|
||||||
|
|
||||||
RUN apt-get update \
|
RUN apt-get update \
|
||||||
|
@ -122,6 +122,23 @@ EOL
|
|||||||
<core_path>$PWD</core_path>
|
<core_path>$PWD</core_path>
|
||||||
</clickhouse>
|
</clickhouse>
|
||||||
EOL
|
EOL
|
||||||
|
|
||||||
|
# Setup a cluster for logs export to ClickHouse Cloud
|
||||||
|
# Note: these variables are provided to the Docker run command by the Python script in tests/ci
|
||||||
|
if [ -n "${CLICKHOUSE_CI_LOGS_HOST}" ]
|
||||||
|
then
|
||||||
|
echo "
|
||||||
|
remote_servers:
|
||||||
|
system_logs_export:
|
||||||
|
shard:
|
||||||
|
replica:
|
||||||
|
secure: 1
|
||||||
|
user: ci
|
||||||
|
host: '${CLICKHOUSE_CI_LOGS_HOST}'
|
||||||
|
port: 9440
|
||||||
|
password: '${CLICKHOUSE_CI_LOGS_PASSWORD}'
|
||||||
|
" > db/config.d/system_logs_export.yaml
|
||||||
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
function filter_exists_and_template
|
function filter_exists_and_template
|
||||||
@ -223,7 +240,22 @@ quit
|
|||||||
done
|
done
|
||||||
clickhouse-client --query "select 1" # This checks that the server is responding
|
clickhouse-client --query "select 1" # This checks that the server is responding
|
||||||
kill -0 $server_pid # This checks that it is our server that is started and not some other one
|
kill -0 $server_pid # This checks that it is our server that is started and not some other one
|
||||||
echo Server started and responded
|
echo 'Server started and responded'
|
||||||
|
|
||||||
|
# Initialize export of system logs to ClickHouse Cloud
|
||||||
|
if [ -n "${CLICKHOUSE_CI_LOGS_HOST}" ]
|
||||||
|
then
|
||||||
|
export EXTRA_COLUMNS_EXPRESSION="$PR_TO_TEST AS pull_request_number, '$SHA_TO_TEST' AS commit_sha, '$CHECK_START_TIME' AS check_start_time, '$CHECK_NAME' AS check_name, '$INSTANCE_TYPE' AS instance_type"
|
||||||
|
# TODO: Check if the password will appear in the logs.
|
||||||
|
export CONNECTION_PARAMETERS="--secure --user ci --host ${CLICKHOUSE_CI_LOGS_HOST} --password ${CLICKHOUSE_CI_LOGS_PASSWORD}"
|
||||||
|
|
||||||
|
/setup_export_logs.sh
|
||||||
|
|
||||||
|
# Unset variables after use
|
||||||
|
export CONNECTION_PARAMETERS=''
|
||||||
|
export CLICKHOUSE_CI_LOGS_HOST=''
|
||||||
|
export CLICKHOUSE_CI_LOGS_PASSWORD=''
|
||||||
|
fi
|
||||||
|
|
||||||
# SC2012: Use find instead of ls to better handle non-alphanumeric filenames. They are all alphanumeric.
|
# SC2012: Use find instead of ls to better handle non-alphanumeric filenames. They are all alphanumeric.
|
||||||
# SC2046: Quote this to prevent word splitting. Actually I need word splitting.
|
# SC2046: Quote this to prevent word splitting. Actually I need word splitting.
|
||||||
|
@ -12,6 +12,7 @@ ENV \
|
|||||||
# install systemd packages
|
# install systemd packages
|
||||||
RUN apt-get update && \
|
RUN apt-get update && \
|
||||||
apt-get install -y --no-install-recommends \
|
apt-get install -y --no-install-recommends \
|
||||||
|
sudo \
|
||||||
systemd \
|
systemd \
|
||||||
&& \
|
&& \
|
||||||
apt-get clean && \
|
apt-get clean && \
|
||||||
|
6
docker/test/integration/nginx_dav/Dockerfile
Normal file
6
docker/test/integration/nginx_dav/Dockerfile
Normal file
@ -0,0 +1,6 @@
|
|||||||
|
FROM nginx:alpine-slim
|
||||||
|
|
||||||
|
COPY default.conf /etc/nginx/conf.d/
|
||||||
|
|
||||||
|
RUN mkdir /usr/share/nginx/files/ \
|
||||||
|
&& chown nginx: /usr/share/nginx/files/ -R
|
25
docker/test/integration/nginx_dav/default.conf
Normal file
25
docker/test/integration/nginx_dav/default.conf
Normal file
@ -0,0 +1,25 @@
|
|||||||
|
server {
|
||||||
|
listen 80;
|
||||||
|
|
||||||
|
#root /usr/share/nginx/test.com;
|
||||||
|
index index.html index.htm;
|
||||||
|
|
||||||
|
server_name test.com localhost;
|
||||||
|
|
||||||
|
location / {
|
||||||
|
expires max;
|
||||||
|
root /usr/share/nginx/files;
|
||||||
|
client_max_body_size 20m;
|
||||||
|
client_body_temp_path /usr/share/nginx/tmp;
|
||||||
|
dav_methods PUT; # Allowed methods, only PUT is necessary
|
||||||
|
|
||||||
|
create_full_put_path on; # nginx automatically creates nested directories
|
||||||
|
dav_access user:rw group:r all:r; # access permissions for files
|
||||||
|
|
||||||
|
limit_except GET {
|
||||||
|
allow all;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
error_page 405 =200 $uri;
|
||||||
|
}
|
@ -95,6 +95,7 @@ RUN python3 -m pip install --no-cache-dir \
|
|||||||
pytest-timeout \
|
pytest-timeout \
|
||||||
pytest-xdist \
|
pytest-xdist \
|
||||||
pytz \
|
pytz \
|
||||||
|
pyyaml==5.3.1 \
|
||||||
redis \
|
redis \
|
||||||
requests-kerberos \
|
requests-kerberos \
|
||||||
tzlocal==2.1 \
|
tzlocal==2.1 \
|
||||||
@ -129,7 +130,7 @@ COPY misc/ /misc/
|
|||||||
|
|
||||||
# Same options as in test/base/Dockerfile
|
# Same options as in test/base/Dockerfile
|
||||||
# (in case you need to override them in tests)
|
# (in case you need to override them in tests)
|
||||||
ENV TSAN_OPTIONS='halt_on_error=1 history_size=7 memory_limit_mb=46080 second_deadlock_stack=1'
|
ENV TSAN_OPTIONS='halt_on_error=1 abort_on_error=1 history_size=7 memory_limit_mb=46080 second_deadlock_stack=1'
|
||||||
ENV UBSAN_OPTIONS='print_stacktrace=1'
|
ENV UBSAN_OPTIONS='print_stacktrace=1'
|
||||||
ENV MSAN_OPTIONS='abort_on_error=1 poison_in_dtor=1'
|
ENV MSAN_OPTIONS='abort_on_error=1 poison_in_dtor=1'
|
||||||
|
|
||||||
|
@ -2,7 +2,7 @@ version: "2.3"
|
|||||||
|
|
||||||
services:
|
services:
|
||||||
coredns:
|
coredns:
|
||||||
image: coredns/coredns:latest
|
image: coredns/coredns:1.9.3 # :latest broke this test
|
||||||
restart: always
|
restart: always
|
||||||
volumes:
|
volumes:
|
||||||
- ${COREDNS_CONFIG_DIR}/example.com:/example.com
|
- ${COREDNS_CONFIG_DIR}/example.com:/example.com
|
||||||
|
@ -12,3 +12,5 @@ services:
|
|||||||
- type: ${HDFS_FS:-tmpfs}
|
- type: ${HDFS_FS:-tmpfs}
|
||||||
source: ${HDFS_LOGS:-}
|
source: ${HDFS_LOGS:-}
|
||||||
target: /usr/local/hadoop/logs
|
target: /usr/local/hadoop/logs
|
||||||
|
sysctls:
|
||||||
|
net.ipv4.ip_local_port_range: '55000 65535'
|
||||||
|
@ -31,6 +31,8 @@ services:
|
|||||||
- kafka_zookeeper
|
- kafka_zookeeper
|
||||||
security_opt:
|
security_opt:
|
||||||
- label:disable
|
- label:disable
|
||||||
|
sysctls:
|
||||||
|
net.ipv4.ip_local_port_range: '55000 65535'
|
||||||
|
|
||||||
schema-registry:
|
schema-registry:
|
||||||
image: confluentinc/cp-schema-registry:5.2.0
|
image: confluentinc/cp-schema-registry:5.2.0
|
||||||
|
@ -20,6 +20,9 @@ services:
|
|||||||
- type: ${keeper_fs:-tmpfs}
|
- type: ${keeper_fs:-tmpfs}
|
||||||
source: ${keeper_db_dir1:-}
|
source: ${keeper_db_dir1:-}
|
||||||
target: /var/lib/clickhouse-keeper
|
target: /var/lib/clickhouse-keeper
|
||||||
|
- type: ${keeper_fs:-tmpfs}
|
||||||
|
source: ${keeper_db_dir1:-}
|
||||||
|
target: /var/lib/clickhouse
|
||||||
entrypoint: "${keeper_cmd_prefix:-clickhouse keeper} --config=/etc/clickhouse-keeper/keeper_config1.xml --log-file=/var/log/clickhouse-keeper/clickhouse-keeper.log --errorlog-file=/var/log/clickhouse-keeper/clickhouse-keeper.err.log"
|
entrypoint: "${keeper_cmd_prefix:-clickhouse keeper} --config=/etc/clickhouse-keeper/keeper_config1.xml --log-file=/var/log/clickhouse-keeper/clickhouse-keeper.log --errorlog-file=/var/log/clickhouse-keeper/clickhouse-keeper.err.log"
|
||||||
cap_add:
|
cap_add:
|
||||||
- SYS_PTRACE
|
- SYS_PTRACE
|
||||||
@ -53,6 +56,9 @@ services:
|
|||||||
- type: ${keeper_fs:-tmpfs}
|
- type: ${keeper_fs:-tmpfs}
|
||||||
source: ${keeper_db_dir2:-}
|
source: ${keeper_db_dir2:-}
|
||||||
target: /var/lib/clickhouse-keeper
|
target: /var/lib/clickhouse-keeper
|
||||||
|
- type: ${keeper_fs:-tmpfs}
|
||||||
|
source: ${keeper_db_dir1:-}
|
||||||
|
target: /var/lib/clickhouse
|
||||||
entrypoint: "${keeper_cmd_prefix:-clickhouse keeper} --config=/etc/clickhouse-keeper/keeper_config2.xml --log-file=/var/log/clickhouse-keeper/clickhouse-keeper.log --errorlog-file=/var/log/clickhouse-keeper/clickhouse-keeper.err.log"
|
entrypoint: "${keeper_cmd_prefix:-clickhouse keeper} --config=/etc/clickhouse-keeper/keeper_config2.xml --log-file=/var/log/clickhouse-keeper/clickhouse-keeper.log --errorlog-file=/var/log/clickhouse-keeper/clickhouse-keeper.err.log"
|
||||||
cap_add:
|
cap_add:
|
||||||
- SYS_PTRACE
|
- SYS_PTRACE
|
||||||
@ -86,6 +92,9 @@ services:
|
|||||||
- type: ${keeper_fs:-tmpfs}
|
- type: ${keeper_fs:-tmpfs}
|
||||||
source: ${keeper_db_dir3:-}
|
source: ${keeper_db_dir3:-}
|
||||||
target: /var/lib/clickhouse-keeper
|
target: /var/lib/clickhouse-keeper
|
||||||
|
- type: ${keeper_fs:-tmpfs}
|
||||||
|
source: ${keeper_db_dir1:-}
|
||||||
|
target: /var/lib/clickhouse
|
||||||
entrypoint: "${keeper_cmd_prefix:-clickhouse keeper} --config=/etc/clickhouse-keeper/keeper_config3.xml --log-file=/var/log/clickhouse-keeper/clickhouse-keeper.log --errorlog-file=/var/log/clickhouse-keeper/clickhouse-keeper.err.log"
|
entrypoint: "${keeper_cmd_prefix:-clickhouse keeper} --config=/etc/clickhouse-keeper/keeper_config3.xml --log-file=/var/log/clickhouse-keeper/clickhouse-keeper.log --errorlog-file=/var/log/clickhouse-keeper/clickhouse-keeper.err.log"
|
||||||
cap_add:
|
cap_add:
|
||||||
- SYS_PTRACE
|
- SYS_PTRACE
|
||||||
|
@ -20,6 +20,8 @@ services:
|
|||||||
depends_on:
|
depends_on:
|
||||||
- hdfskerberos
|
- hdfskerberos
|
||||||
entrypoint: /etc/bootstrap.sh -d
|
entrypoint: /etc/bootstrap.sh -d
|
||||||
|
sysctls:
|
||||||
|
net.ipv4.ip_local_port_range: '55000 65535'
|
||||||
|
|
||||||
hdfskerberos:
|
hdfskerberos:
|
||||||
image: clickhouse/kerberos-kdc:${DOCKER_KERBEROS_KDC_TAG:-latest}
|
image: clickhouse/kerberos-kdc:${DOCKER_KERBEROS_KDC_TAG:-latest}
|
||||||
@ -29,3 +31,5 @@ services:
|
|||||||
- ${KERBERIZED_HDFS_DIR}/../../kerberos_image_config.sh:/config.sh
|
- ${KERBERIZED_HDFS_DIR}/../../kerberos_image_config.sh:/config.sh
|
||||||
- /dev/urandom:/dev/random
|
- /dev/urandom:/dev/random
|
||||||
expose: [88, 749]
|
expose: [88, 749]
|
||||||
|
sysctls:
|
||||||
|
net.ipv4.ip_local_port_range: '55000 65535'
|
||||||
|
@ -48,6 +48,8 @@ services:
|
|||||||
- kafka_kerberos
|
- kafka_kerberos
|
||||||
security_opt:
|
security_opt:
|
||||||
- label:disable
|
- label:disable
|
||||||
|
sysctls:
|
||||||
|
net.ipv4.ip_local_port_range: '55000 65535'
|
||||||
|
|
||||||
kafka_kerberos:
|
kafka_kerberos:
|
||||||
image: clickhouse/kerberos-kdc:${DOCKER_KERBEROS_KDC_TAG:-latest}
|
image: clickhouse/kerberos-kdc:${DOCKER_KERBEROS_KDC_TAG:-latest}
|
||||||
|
@ -13,4 +13,3 @@ services:
|
|||||||
- ${MEILI_SECURE_EXTERNAL_PORT:-7700}:${MEILI_SECURE_INTERNAL_PORT:-7700}
|
- ${MEILI_SECURE_EXTERNAL_PORT:-7700}:${MEILI_SECURE_INTERNAL_PORT:-7700}
|
||||||
environment:
|
environment:
|
||||||
MEILI_MASTER_KEY: "password"
|
MEILI_MASTER_KEY: "password"
|
||||||
|
|
||||||
|
@ -14,7 +14,7 @@ services:
|
|||||||
MINIO_ACCESS_KEY: minio
|
MINIO_ACCESS_KEY: minio
|
||||||
MINIO_SECRET_KEY: minio123
|
MINIO_SECRET_KEY: minio123
|
||||||
MINIO_PROMETHEUS_AUTH_TYPE: public
|
MINIO_PROMETHEUS_AUTH_TYPE: public
|
||||||
command: server --address :9001 --certs-dir /certs /data1-1
|
command: server --console-address 127.0.0.1:19001 --address :9001 --certs-dir /certs /data1-1
|
||||||
depends_on:
|
depends_on:
|
||||||
- proxy1
|
- proxy1
|
||||||
- proxy2
|
- proxy2
|
||||||
|
@ -5,7 +5,7 @@ services:
|
|||||||
# Files will be put into /usr/share/nginx/files.
|
# Files will be put into /usr/share/nginx/files.
|
||||||
|
|
||||||
nginx:
|
nginx:
|
||||||
image: kssenii/nginx-test:1.1
|
image: clickhouse/nginx-dav:${DOCKER_NGINX_DAV_TAG:-latest}
|
||||||
restart: always
|
restart: always
|
||||||
ports:
|
ports:
|
||||||
- 80:80
|
- 80:80
|
||||||
|
@ -64,15 +64,16 @@ export CLICKHOUSE_ODBC_BRIDGE_BINARY_PATH=/clickhouse-odbc-bridge
|
|||||||
export CLICKHOUSE_LIBRARY_BRIDGE_BINARY_PATH=/clickhouse-library-bridge
|
export CLICKHOUSE_LIBRARY_BRIDGE_BINARY_PATH=/clickhouse-library-bridge
|
||||||
|
|
||||||
export DOCKER_BASE_TAG=${DOCKER_BASE_TAG:=latest}
|
export DOCKER_BASE_TAG=${DOCKER_BASE_TAG:=latest}
|
||||||
export DOCKER_HELPER_TAG=${DOCKER_HELPER_TAG:=latest}
|
|
||||||
export DOCKER_MYSQL_GOLANG_CLIENT_TAG=${DOCKER_MYSQL_GOLANG_CLIENT_TAG:=latest}
|
|
||||||
export DOCKER_DOTNET_CLIENT_TAG=${DOCKER_DOTNET_CLIENT_TAG:=latest}
|
export DOCKER_DOTNET_CLIENT_TAG=${DOCKER_DOTNET_CLIENT_TAG:=latest}
|
||||||
|
export DOCKER_HELPER_TAG=${DOCKER_HELPER_TAG:=latest}
|
||||||
|
export DOCKER_KERBERIZED_HADOOP_TAG=${DOCKER_KERBERIZED_HADOOP_TAG:=latest}
|
||||||
|
export DOCKER_KERBEROS_KDC_TAG=${DOCKER_KERBEROS_KDC_TAG:=latest}
|
||||||
|
export DOCKER_MYSQL_GOLANG_CLIENT_TAG=${DOCKER_MYSQL_GOLANG_CLIENT_TAG:=latest}
|
||||||
export DOCKER_MYSQL_JAVA_CLIENT_TAG=${DOCKER_MYSQL_JAVA_CLIENT_TAG:=latest}
|
export DOCKER_MYSQL_JAVA_CLIENT_TAG=${DOCKER_MYSQL_JAVA_CLIENT_TAG:=latest}
|
||||||
export DOCKER_MYSQL_JS_CLIENT_TAG=${DOCKER_MYSQL_JS_CLIENT_TAG:=latest}
|
export DOCKER_MYSQL_JS_CLIENT_TAG=${DOCKER_MYSQL_JS_CLIENT_TAG:=latest}
|
||||||
export DOCKER_MYSQL_PHP_CLIENT_TAG=${DOCKER_MYSQL_PHP_CLIENT_TAG:=latest}
|
export DOCKER_MYSQL_PHP_CLIENT_TAG=${DOCKER_MYSQL_PHP_CLIENT_TAG:=latest}
|
||||||
|
export DOCKER_NGINX_DAV_TAG=${DOCKER_NGINX_DAV_TAG:=latest}
|
||||||
export DOCKER_POSTGRESQL_JAVA_CLIENT_TAG=${DOCKER_POSTGRESQL_JAVA_CLIENT_TAG:=latest}
|
export DOCKER_POSTGRESQL_JAVA_CLIENT_TAG=${DOCKER_POSTGRESQL_JAVA_CLIENT_TAG:=latest}
|
||||||
export DOCKER_KERBEROS_KDC_TAG=${DOCKER_KERBEROS_KDC_TAG:=latest}
|
|
||||||
export DOCKER_KERBERIZED_HADOOP_TAG=${DOCKER_KERBERIZED_HADOOP_TAG:=latest}
|
|
||||||
|
|
||||||
cd /ClickHouse/tests/integration
|
cd /ClickHouse/tests/integration
|
||||||
exec "$@"
|
exec "$@"
|
||||||
|
@ -1,18 +1,7 @@
|
|||||||
# docker build -t clickhouse/performance-comparison .
|
# docker build -t clickhouse/performance-comparison .
|
||||||
|
|
||||||
# Using ubuntu:22.04 over 20.04 as all other images, since:
|
ARG FROM_TAG=latest
|
||||||
# a) ubuntu 20.04 has too old parallel, and does not support --memsuspend
|
FROM clickhouse/test-base:$FROM_TAG
|
||||||
# b) anyway for perf tests it should not be important (backward compatiblity
|
|
||||||
# with older ubuntu had been checked lots of times in various tests)
|
|
||||||
FROM ubuntu:22.04
|
|
||||||
|
|
||||||
# ARG for quick switch to a given ubuntu mirror
|
|
||||||
ARG apt_archive="http://archive.ubuntu.com"
|
|
||||||
RUN sed -i "s|http://archive.ubuntu.com|$apt_archive|g" /etc/apt/sources.list
|
|
||||||
|
|
||||||
ENV LANG=C.UTF-8
|
|
||||||
ENV TZ=Europe/Moscow
|
|
||||||
RUN ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && echo $TZ > /etc/timezone
|
|
||||||
|
|
||||||
RUN apt-get update \
|
RUN apt-get update \
|
||||||
&& DEBIAN_FRONTEND=noninteractive apt-get install --yes --no-install-recommends \
|
&& DEBIAN_FRONTEND=noninteractive apt-get install --yes --no-install-recommends \
|
||||||
@ -56,10 +45,9 @@ COPY * /
|
|||||||
# node #0 should be less stable because of system interruptions. We bind
|
# node #0 should be less stable because of system interruptions. We bind
|
||||||
# randomly to node 1 or 0 to gather some statistics on that. We have to bind
|
# randomly to node 1 or 0 to gather some statistics on that. We have to bind
|
||||||
# both servers and the tmpfs on which the database is stored. How to do it
|
# both servers and the tmpfs on which the database is stored. How to do it
|
||||||
# through Yandex Sandbox API is unclear, but by default tmpfs uses
|
# is unclear, but by default tmpfs uses
|
||||||
# 'process allocation policy', not sure which process but hopefully the one that
|
# 'process allocation policy', not sure which process but hopefully the one that
|
||||||
# writes to it, so just bind the downloader script as well. We could also try to
|
# writes to it, so just bind the downloader script as well.
|
||||||
# remount it with proper options in Sandbox task.
|
|
||||||
# https://www.kernel.org/doc/Documentation/filesystems/tmpfs.txt
|
# https://www.kernel.org/doc/Documentation/filesystems/tmpfs.txt
|
||||||
# Double-escaped backslashes are a tribute to the engineering wonder of docker --
|
# Double-escaped backslashes are a tribute to the engineering wonder of docker --
|
||||||
# it gives '/bin/sh: 1: [bash,: not found' otherwise.
|
# it gives '/bin/sh: 1: [bash,: not found' otherwise.
|
||||||
|
@ -90,7 +90,7 @@ function configure
|
|||||||
set +m
|
set +m
|
||||||
|
|
||||||
wait_for_server $LEFT_SERVER_PORT $left_pid
|
wait_for_server $LEFT_SERVER_PORT $left_pid
|
||||||
echo Server for setup started
|
echo "Server for setup started"
|
||||||
|
|
||||||
clickhouse-client --port $LEFT_SERVER_PORT --query "create database test" ||:
|
clickhouse-client --port $LEFT_SERVER_PORT --query "create database test" ||:
|
||||||
clickhouse-client --port $LEFT_SERVER_PORT --query "rename table datasets.hits_v1 to test.hits" ||:
|
clickhouse-client --port $LEFT_SERVER_PORT --query "rename table datasets.hits_v1 to test.hits" ||:
|
||||||
@ -156,9 +156,9 @@ function restart
|
|||||||
wait_for_server $RIGHT_SERVER_PORT $right_pid
|
wait_for_server $RIGHT_SERVER_PORT $right_pid
|
||||||
echo right ok
|
echo right ok
|
||||||
|
|
||||||
clickhouse-client --port $LEFT_SERVER_PORT --query "select * from system.tables where database != 'system'"
|
clickhouse-client --port $LEFT_SERVER_PORT --query "select * from system.tables where database NOT IN ('system', 'INFORMATION_SCHEMA', 'information_schema')"
|
||||||
clickhouse-client --port $LEFT_SERVER_PORT --query "select * from system.build_options"
|
clickhouse-client --port $LEFT_SERVER_PORT --query "select * from system.build_options"
|
||||||
clickhouse-client --port $RIGHT_SERVER_PORT --query "select * from system.tables where database != 'system'"
|
clickhouse-client --port $RIGHT_SERVER_PORT --query "select * from system.tables where database NOT IN ('system', 'INFORMATION_SCHEMA', 'information_schema')"
|
||||||
clickhouse-client --port $RIGHT_SERVER_PORT --query "select * from system.build_options"
|
clickhouse-client --port $RIGHT_SERVER_PORT --query "select * from system.build_options"
|
||||||
|
|
||||||
# Check again that both servers we started are running -- this is important
|
# Check again that both servers we started are running -- this is important
|
||||||
@ -352,14 +352,12 @@ function get_profiles
|
|||||||
wait
|
wait
|
||||||
|
|
||||||
clickhouse-client --port $LEFT_SERVER_PORT --query "select * from system.query_log where type in ('QueryFinish', 'ExceptionWhileProcessing') format TSVWithNamesAndTypes" > left-query-log.tsv ||: &
|
clickhouse-client --port $LEFT_SERVER_PORT --query "select * from system.query_log where type in ('QueryFinish', 'ExceptionWhileProcessing') format TSVWithNamesAndTypes" > left-query-log.tsv ||: &
|
||||||
clickhouse-client --port $LEFT_SERVER_PORT --query "select * from system.query_thread_log format TSVWithNamesAndTypes" > left-query-thread-log.tsv ||: &
|
|
||||||
clickhouse-client --port $LEFT_SERVER_PORT --query "select * from system.trace_log format TSVWithNamesAndTypes" > left-trace-log.tsv ||: &
|
clickhouse-client --port $LEFT_SERVER_PORT --query "select * from system.trace_log format TSVWithNamesAndTypes" > left-trace-log.tsv ||: &
|
||||||
clickhouse-client --port $LEFT_SERVER_PORT --query "select arrayJoin(trace) addr, concat(splitByChar('/', addressToLine(addr))[-1], '#', demangle(addressToSymbol(addr)) ) name from system.trace_log group by addr format TSVWithNamesAndTypes" > left-addresses.tsv ||: &
|
clickhouse-client --port $LEFT_SERVER_PORT --query "select arrayJoin(trace) addr, concat(splitByChar('/', addressToLine(addr))[-1], '#', demangle(addressToSymbol(addr)) ) name from system.trace_log group by addr format TSVWithNamesAndTypes" > left-addresses.tsv ||: &
|
||||||
clickhouse-client --port $LEFT_SERVER_PORT --query "select * from system.metric_log format TSVWithNamesAndTypes" > left-metric-log.tsv ||: &
|
clickhouse-client --port $LEFT_SERVER_PORT --query "select * from system.metric_log format TSVWithNamesAndTypes" > left-metric-log.tsv ||: &
|
||||||
clickhouse-client --port $LEFT_SERVER_PORT --query "select * from system.asynchronous_metric_log format TSVWithNamesAndTypes" > left-async-metric-log.tsv ||: &
|
clickhouse-client --port $LEFT_SERVER_PORT --query "select * from system.asynchronous_metric_log format TSVWithNamesAndTypes" > left-async-metric-log.tsv ||: &
|
||||||
|
|
||||||
clickhouse-client --port $RIGHT_SERVER_PORT --query "select * from system.query_log where type in ('QueryFinish', 'ExceptionWhileProcessing') format TSVWithNamesAndTypes" > right-query-log.tsv ||: &
|
clickhouse-client --port $RIGHT_SERVER_PORT --query "select * from system.query_log where type in ('QueryFinish', 'ExceptionWhileProcessing') format TSVWithNamesAndTypes" > right-query-log.tsv ||: &
|
||||||
clickhouse-client --port $RIGHT_SERVER_PORT --query "select * from system.query_thread_log format TSVWithNamesAndTypes" > right-query-thread-log.tsv ||: &
|
|
||||||
clickhouse-client --port $RIGHT_SERVER_PORT --query "select * from system.trace_log format TSVWithNamesAndTypes" > right-trace-log.tsv ||: &
|
clickhouse-client --port $RIGHT_SERVER_PORT --query "select * from system.trace_log format TSVWithNamesAndTypes" > right-trace-log.tsv ||: &
|
||||||
clickhouse-client --port $RIGHT_SERVER_PORT --query "select arrayJoin(trace) addr, concat(splitByChar('/', addressToLine(addr))[-1], '#', demangle(addressToSymbol(addr)) ) name from system.trace_log group by addr format TSVWithNamesAndTypes" > right-addresses.tsv ||: &
|
clickhouse-client --port $RIGHT_SERVER_PORT --query "select arrayJoin(trace) addr, concat(splitByChar('/', addressToLine(addr))[-1], '#', demangle(addressToSymbol(addr)) ) name from system.trace_log group by addr format TSVWithNamesAndTypes" > right-addresses.tsv ||: &
|
||||||
clickhouse-client --port $RIGHT_SERVER_PORT --query "select * from system.metric_log format TSVWithNamesAndTypes" > right-metric-log.tsv ||: &
|
clickhouse-client --port $RIGHT_SERVER_PORT --query "select * from system.metric_log format TSVWithNamesAndTypes" > right-metric-log.tsv ||: &
|
||||||
@ -665,9 +663,8 @@ create view partial_query_times as select * from
|
|||||||
-- Report for backward-incompatible ('partial') queries that we could only run on the new server (e.g.
|
-- Report for backward-incompatible ('partial') queries that we could only run on the new server (e.g.
|
||||||
-- queries with new functions added in the tested PR).
|
-- queries with new functions added in the tested PR).
|
||||||
create table partial_queries_report engine File(TSV, 'report/partial-queries-report.tsv')
|
create table partial_queries_report engine File(TSV, 'report/partial-queries-report.tsv')
|
||||||
settings output_format_decimal_trailing_zeros = 1
|
as select round(time_median, 3) time,
|
||||||
as select toDecimal64(time_median, 3) time,
|
round(time_stddev / time_median, 3) relative_time_stddev,
|
||||||
toDecimal64(time_stddev / time_median, 3) relative_time_stddev,
|
|
||||||
test, query_index, query_display_name
|
test, query_index, query_display_name
|
||||||
from partial_query_times
|
from partial_query_times
|
||||||
join query_display_names using (test, query_index)
|
join query_display_names using (test, query_index)
|
||||||
@ -739,28 +736,26 @@ create table queries engine File(TSVWithNamesAndTypes, 'report/queries.tsv')
|
|||||||
;
|
;
|
||||||
|
|
||||||
create table changed_perf_report engine File(TSV, 'report/changed-perf.tsv')
|
create table changed_perf_report engine File(TSV, 'report/changed-perf.tsv')
|
||||||
settings output_format_decimal_trailing_zeros = 1
|
|
||||||
as with
|
as with
|
||||||
-- server_time is sometimes reported as zero (if it's less than 1 ms),
|
-- server_time is sometimes reported as zero (if it's less than 1 ms),
|
||||||
-- so we have to work around this to not get an error about conversion
|
-- so we have to work around this to not get an error about conversion
|
||||||
-- of NaN to decimal.
|
-- of NaN to decimal.
|
||||||
(left > right ? left / right : right / left) as times_change_float,
|
(left > right ? left / right : right / left) as times_change_float,
|
||||||
isFinite(times_change_float) as times_change_finite,
|
isFinite(times_change_float) as times_change_finite,
|
||||||
toDecimal64(times_change_finite ? times_change_float : 1., 3) as times_change_decimal,
|
round(times_change_finite ? times_change_float : 1., 3) as times_change_decimal,
|
||||||
times_change_finite
|
times_change_finite
|
||||||
? (left > right ? '-' : '+') || toString(times_change_decimal) || 'x'
|
? (left > right ? '-' : '+') || toString(times_change_decimal) || 'x'
|
||||||
: '--' as times_change_str
|
: '--' as times_change_str
|
||||||
select
|
select
|
||||||
toDecimal64(left, 3), toDecimal64(right, 3), times_change_str,
|
round(left, 3), round(right, 3), times_change_str,
|
||||||
toDecimal64(diff, 3), toDecimal64(stat_threshold, 3),
|
round(diff, 3), round(stat_threshold, 3),
|
||||||
changed_fail, test, query_index, query_display_name
|
changed_fail, test, query_index, query_display_name
|
||||||
from queries where changed_show order by abs(diff) desc;
|
from queries where changed_show order by abs(diff) desc;
|
||||||
|
|
||||||
create table unstable_queries_report engine File(TSV, 'report/unstable-queries.tsv')
|
create table unstable_queries_report engine File(TSV, 'report/unstable-queries.tsv')
|
||||||
settings output_format_decimal_trailing_zeros = 1
|
|
||||||
as select
|
as select
|
||||||
toDecimal64(left, 3), toDecimal64(right, 3), toDecimal64(diff, 3),
|
round(left, 3), round(right, 3), round(diff, 3),
|
||||||
toDecimal64(stat_threshold, 3), unstable_fail, test, query_index, query_display_name
|
round(stat_threshold, 3), unstable_fail, test, query_index, query_display_name
|
||||||
from queries where unstable_show order by stat_threshold desc;
|
from queries where unstable_show order by stat_threshold desc;
|
||||||
|
|
||||||
|
|
||||||
@ -789,11 +784,10 @@ create view total_speedup as
|
|||||||
;
|
;
|
||||||
|
|
||||||
create table test_perf_changes_report engine File(TSV, 'report/test-perf-changes.tsv')
|
create table test_perf_changes_report engine File(TSV, 'report/test-perf-changes.tsv')
|
||||||
settings output_format_decimal_trailing_zeros = 1
|
|
||||||
as with
|
as with
|
||||||
(times_speedup >= 1
|
(times_speedup >= 1
|
||||||
? '-' || toString(toDecimal64(times_speedup, 3)) || 'x'
|
? '-' || toString(round(times_speedup, 3)) || 'x'
|
||||||
: '+' || toString(toDecimal64(1 / times_speedup, 3)) || 'x')
|
: '+' || toString(round(1 / times_speedup, 3)) || 'x')
|
||||||
as times_speedup_str
|
as times_speedup_str
|
||||||
select test, times_speedup_str, queries, bad, changed, unstable
|
select test, times_speedup_str, queries, bad, changed, unstable
|
||||||
-- Not sure what's the precedence of UNION ALL vs WHERE & ORDER BY, hence all
|
-- Not sure what's the precedence of UNION ALL vs WHERE & ORDER BY, hence all
|
||||||
@ -817,11 +811,10 @@ create view total_client_time_per_query as select *
|
|||||||
'test text, query_index int, client float, server float');
|
'test text, query_index int, client float, server float');
|
||||||
|
|
||||||
create table slow_on_client_report engine File(TSV, 'report/slow-on-client.tsv')
|
create table slow_on_client_report engine File(TSV, 'report/slow-on-client.tsv')
|
||||||
settings output_format_decimal_trailing_zeros = 1
|
as select client, server, round(client/server, 3) p,
|
||||||
as select client, server, toDecimal64(client/server, 3) p,
|
|
||||||
test, query_display_name
|
test, query_display_name
|
||||||
from total_client_time_per_query left join query_display_names using (test, query_index)
|
from total_client_time_per_query left join query_display_names using (test, query_index)
|
||||||
where p > toDecimal64(1.02, 3) order by p desc;
|
where p > round(1.02, 3) order by p desc;
|
||||||
|
|
||||||
create table wall_clock_time_per_test engine Memory as select *
|
create table wall_clock_time_per_test engine Memory as select *
|
||||||
from file('wall-clock-times.tsv', TSV, 'test text, real float, user float, system float');
|
from file('wall-clock-times.tsv', TSV, 'test text, real float, user float, system float');
|
||||||
@ -899,15 +892,14 @@ create view test_times_view_total as
|
|||||||
;
|
;
|
||||||
|
|
||||||
create table test_times_report engine File(TSV, 'report/test-times.tsv')
|
create table test_times_report engine File(TSV, 'report/test-times.tsv')
|
||||||
settings output_format_decimal_trailing_zeros = 1
|
|
||||||
as select
|
as select
|
||||||
test,
|
test,
|
||||||
toDecimal64(real, 3),
|
round(real, 3),
|
||||||
toDecimal64(total_client_time, 3),
|
round(total_client_time, 3),
|
||||||
queries,
|
queries,
|
||||||
toDecimal64(query_max, 3),
|
round(query_max, 3),
|
||||||
toDecimal64(avg_real_per_query, 3),
|
round(avg_real_per_query, 3),
|
||||||
toDecimal64(query_min, 3),
|
round(query_min, 3),
|
||||||
runs
|
runs
|
||||||
from (
|
from (
|
||||||
select * from test_times_view
|
select * from test_times_view
|
||||||
@ -919,21 +911,20 @@ create table test_times_report engine File(TSV, 'report/test-times.tsv')
|
|||||||
|
|
||||||
-- report for all queries page, only main metric
|
-- report for all queries page, only main metric
|
||||||
create table all_tests_report engine File(TSV, 'report/all-queries.tsv')
|
create table all_tests_report engine File(TSV, 'report/all-queries.tsv')
|
||||||
settings output_format_decimal_trailing_zeros = 1
|
|
||||||
as with
|
as with
|
||||||
-- server_time is sometimes reported as zero (if it's less than 1 ms),
|
-- server_time is sometimes reported as zero (if it's less than 1 ms),
|
||||||
-- so we have to work around this to not get an error about conversion
|
-- so we have to work around this to not get an error about conversion
|
||||||
-- of NaN to decimal.
|
-- of NaN to decimal.
|
||||||
(left > right ? left / right : right / left) as times_change_float,
|
(left > right ? left / right : right / left) as times_change_float,
|
||||||
isFinite(times_change_float) as times_change_finite,
|
isFinite(times_change_float) as times_change_finite,
|
||||||
toDecimal64(times_change_finite ? times_change_float : 1., 3) as times_change_decimal,
|
round(times_change_finite ? times_change_float : 1., 3) as times_change_decimal,
|
||||||
times_change_finite
|
times_change_finite
|
||||||
? (left > right ? '-' : '+') || toString(times_change_decimal) || 'x'
|
? (left > right ? '-' : '+') || toString(times_change_decimal) || 'x'
|
||||||
: '--' as times_change_str
|
: '--' as times_change_str
|
||||||
select changed_fail, unstable_fail,
|
select changed_fail, unstable_fail,
|
||||||
toDecimal64(left, 3), toDecimal64(right, 3), times_change_str,
|
round(left, 3), round(right, 3), times_change_str,
|
||||||
toDecimal64(isFinite(diff) ? diff : 0, 3),
|
round(isFinite(diff) ? diff : 0, 3),
|
||||||
toDecimal64(isFinite(stat_threshold) ? stat_threshold : 0, 3),
|
round(isFinite(stat_threshold) ? stat_threshold : 0, 3),
|
||||||
test, query_index, query_display_name
|
test, query_index, query_display_name
|
||||||
from queries order by test, query_index;
|
from queries order by test, query_index;
|
||||||
|
|
||||||
@ -1044,27 +1035,6 @@ create table unstable_run_traces engine File(TSVWithNamesAndTypes,
|
|||||||
order by count() desc
|
order by count() desc
|
||||||
;
|
;
|
||||||
|
|
||||||
create table metric_devation engine File(TSVWithNamesAndTypes,
|
|
||||||
'report/metric-deviation.$version.tsv')
|
|
||||||
settings output_format_decimal_trailing_zeros = 1
|
|
||||||
-- first goes the key used to split the file with grep
|
|
||||||
as select test, query_index, query_display_name,
|
|
||||||
toDecimal64(d, 3) d, q, metric
|
|
||||||
from (
|
|
||||||
select
|
|
||||||
test, query_index,
|
|
||||||
(q[3] - q[1])/q[2] d,
|
|
||||||
quantilesExact(0, 0.5, 1)(value) q, metric
|
|
||||||
from (select * from unstable_run_metrics
|
|
||||||
union all select * from unstable_run_traces
|
|
||||||
union all select * from unstable_run_metrics_2) mm
|
|
||||||
group by test, query_index, metric
|
|
||||||
having isFinite(d) and d > 0.5 and q[3] > 5
|
|
||||||
) metrics
|
|
||||||
left join query_display_names using (test, query_index)
|
|
||||||
order by test, query_index, d desc
|
|
||||||
;
|
|
||||||
|
|
||||||
create table stacks engine File(TSV, 'report/stacks.$version.tsv') as
|
create table stacks engine File(TSV, 'report/stacks.$version.tsv') as
|
||||||
select
|
select
|
||||||
-- first goes the key used to split the file with grep
|
-- first goes the key used to split the file with grep
|
||||||
@ -1173,9 +1143,8 @@ create table metrics engine File(TSV, 'metrics/metrics.tsv') as
|
|||||||
|
|
||||||
-- Show metrics that have changed
|
-- Show metrics that have changed
|
||||||
create table changes engine File(TSV, 'metrics/changes.tsv')
|
create table changes engine File(TSV, 'metrics/changes.tsv')
|
||||||
settings output_format_decimal_trailing_zeros = 1
|
|
||||||
as select metric, left, right,
|
as select metric, left, right,
|
||||||
toDecimal64(diff, 3), toDecimal64(times_diff, 3)
|
round(diff, 3), round(times_diff, 3)
|
||||||
from (
|
from (
|
||||||
select metric, median(left) as left, median(right) as right,
|
select metric, median(left) as left, median(right) as right,
|
||||||
(right - left) / left diff,
|
(right - left) / left diff,
|
||||||
@ -1226,7 +1195,6 @@ create table ci_checks engine File(TSVWithNamesAndTypes, 'ci-checks.tsv')
|
|||||||
'$SHA_TO_TEST' :: LowCardinality(String) AS commit_sha,
|
'$SHA_TO_TEST' :: LowCardinality(String) AS commit_sha,
|
||||||
'${CLICKHOUSE_PERFORMANCE_COMPARISON_CHECK_NAME:-Performance}' :: LowCardinality(String) AS check_name,
|
'${CLICKHOUSE_PERFORMANCE_COMPARISON_CHECK_NAME:-Performance}' :: LowCardinality(String) AS check_name,
|
||||||
'$(sed -n 's/.*<!--status: \(.*\)-->/\1/p' report.html)' :: LowCardinality(String) AS check_status,
|
'$(sed -n 's/.*<!--status: \(.*\)-->/\1/p' report.html)' :: LowCardinality(String) AS check_status,
|
||||||
-- TODO toDateTime() can't parse output of 'date', so no time for now.
|
|
||||||
(($(date +%s) - $CHPC_CHECK_START_TIMESTAMP) * 1000) :: UInt64 AS check_duration_ms,
|
(($(date +%s) - $CHPC_CHECK_START_TIMESTAMP) * 1000) :: UInt64 AS check_duration_ms,
|
||||||
fromUnixTimestamp($CHPC_CHECK_START_TIMESTAMP) check_start_time,
|
fromUnixTimestamp($CHPC_CHECK_START_TIMESTAMP) check_start_time,
|
||||||
test_name :: LowCardinality(String) AS test_name ,
|
test_name :: LowCardinality(String) AS test_name ,
|
||||||
|
@ -19,31 +19,6 @@
|
|||||||
<opentelemetry_span_log remove="remove"/>
|
<opentelemetry_span_log remove="remove"/>
|
||||||
<session_log remove="remove"/>
|
<session_log remove="remove"/>
|
||||||
|
|
||||||
<!-- performance tests does not uses real block devices,
|
|
||||||
instead they stores everything in memory.
|
|
||||||
|
|
||||||
And so, to avoid extra memory reference switch *_log to Memory engine. -->
|
|
||||||
<query_log>
|
|
||||||
<engine>ENGINE = Memory</engine>
|
|
||||||
<partition_by remove="remove"/>
|
|
||||||
</query_log>
|
|
||||||
<query_thread_log>
|
|
||||||
<engine>ENGINE = Memory</engine>
|
|
||||||
<partition_by remove="remove"/>
|
|
||||||
</query_thread_log>
|
|
||||||
<trace_log>
|
|
||||||
<engine>ENGINE = Memory</engine>
|
|
||||||
<partition_by remove="remove"/>
|
|
||||||
</trace_log>
|
|
||||||
<metric_log>
|
|
||||||
<engine>ENGINE = Memory</engine>
|
|
||||||
<partition_by remove="remove"/>
|
|
||||||
</metric_log>
|
|
||||||
<asynchronous_metric_log>
|
|
||||||
<engine>ENGINE = Memory</engine>
|
|
||||||
<partition_by remove="remove"/>
|
|
||||||
</asynchronous_metric_log>
|
|
||||||
|
|
||||||
<uncompressed_cache_size>1000000000</uncompressed_cache_size>
|
<uncompressed_cache_size>1000000000</uncompressed_cache_size>
|
||||||
|
|
||||||
<asynchronous_metrics_update_period_s>10</asynchronous_metrics_update_period_s>
|
<asynchronous_metrics_update_period_s>10</asynchronous_metrics_update_period_s>
|
||||||
|
@ -3,7 +3,7 @@
|
|||||||
<default>
|
<default>
|
||||||
<allow_introspection_functions>1</allow_introspection_functions>
|
<allow_introspection_functions>1</allow_introspection_functions>
|
||||||
<log_queries>1</log_queries>
|
<log_queries>1</log_queries>
|
||||||
<metrics_perf_events_enabled>1</metrics_perf_events_enabled>
|
<metrics_perf_events_enabled>0</metrics_perf_events_enabled>
|
||||||
<!--
|
<!--
|
||||||
If a test takes too long by mistake, the entire test task can
|
If a test takes too long by mistake, the entire test task can
|
||||||
time out and the author won't get a proper message. Put some cap
|
time out and the author won't get a proper message. Put some cap
|
||||||
@ -21,6 +21,7 @@
|
|||||||
<!-- disable JIT for perf tests -->
|
<!-- disable JIT for perf tests -->
|
||||||
<compile_expressions>0</compile_expressions>
|
<compile_expressions>0</compile_expressions>
|
||||||
<compile_aggregate_expressions>0</compile_aggregate_expressions>
|
<compile_aggregate_expressions>0</compile_aggregate_expressions>
|
||||||
|
<compile_sort_description>0</compile_sort_description>
|
||||||
|
|
||||||
<!-- Don't fail some prewarm queries too early -->
|
<!-- Don't fail some prewarm queries too early -->
|
||||||
<timeout_before_checking_execution_speed>60</timeout_before_checking_execution_speed>
|
<timeout_before_checking_execution_speed>60</timeout_before_checking_execution_speed>
|
||||||
|
@ -31,8 +31,6 @@ function download
|
|||||||
# Test all of them.
|
# Test all of them.
|
||||||
declare -a urls_to_try=(
|
declare -a urls_to_try=(
|
||||||
"$S3_URL/PRs/$left_pr/$left_sha/$BUILD_NAME/performance.tar.zst"
|
"$S3_URL/PRs/$left_pr/$left_sha/$BUILD_NAME/performance.tar.zst"
|
||||||
"$S3_URL/$left_pr/$left_sha/$BUILD_NAME/performance.tar.zst"
|
|
||||||
"$S3_URL/$left_pr/$left_sha/$BUILD_NAME/performance.tgz"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
for path in "${urls_to_try[@]}"
|
for path in "${urls_to_try[@]}"
|
||||||
|
@ -130,7 +130,7 @@ then
|
|||||||
git -C right/ch diff --name-only "$base" pr -- :!tests/performance :!docker/test/performance-comparison | tee other-changed-files.txt
|
git -C right/ch diff --name-only "$base" pr -- :!tests/performance :!docker/test/performance-comparison | tee other-changed-files.txt
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Set python output encoding so that we can print queries with Russian letters.
|
# Set python output encoding so that we can print queries with non-ASCII letters.
|
||||||
export PYTHONIOENCODING=utf-8
|
export PYTHONIOENCODING=utf-8
|
||||||
|
|
||||||
# By default, use the main comparison script from the tested package, so that we
|
# By default, use the main comparison script from the tested package, so that we
|
||||||
@ -151,11 +151,7 @@ export PATH
|
|||||||
export REF_PR
|
export REF_PR
|
||||||
export REF_SHA
|
export REF_SHA
|
||||||
|
|
||||||
# Try to collect some core dumps. I've seen two patterns in Sandbox:
|
# Try to collect some core dumps.
|
||||||
# 1) |/home/zomb-sandbox/venv/bin/python /home/zomb-sandbox/client/sandbox/bin/coredumper.py %e %p %g %u %s %P %c
|
|
||||||
# Not sure what this script does (puts them to sandbox resources, logs some messages?),
|
|
||||||
# and it's not accessible from inside docker anyway.
|
|
||||||
# 2) something like %e.%p.core.dmp. The dump should end up in the workspace directory.
|
|
||||||
# At least we remove the ulimit and then try to pack some common file names into output.
|
# At least we remove the ulimit and then try to pack some common file names into output.
|
||||||
ulimit -c unlimited
|
ulimit -c unlimited
|
||||||
cat /proc/sys/kernel/core_pattern
|
cat /proc/sys/kernel/core_pattern
|
||||||
|
@ -369,6 +369,7 @@ for query_index in queries_to_run:
|
|||||||
"max_execution_time": args.prewarm_max_query_seconds,
|
"max_execution_time": args.prewarm_max_query_seconds,
|
||||||
"query_profiler_real_time_period_ns": 10000000,
|
"query_profiler_real_time_period_ns": 10000000,
|
||||||
"query_profiler_cpu_time_period_ns": 10000000,
|
"query_profiler_cpu_time_period_ns": 10000000,
|
||||||
|
"metrics_perf_events_enabled": 1,
|
||||||
"memory_profiler_step": "4Mi",
|
"memory_profiler_step": "4Mi",
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
@ -503,6 +504,7 @@ for query_index in queries_to_run:
|
|||||||
settings={
|
settings={
|
||||||
"query_profiler_real_time_period_ns": 10000000,
|
"query_profiler_real_time_period_ns": 10000000,
|
||||||
"query_profiler_cpu_time_period_ns": 10000000,
|
"query_profiler_cpu_time_period_ns": 10000000,
|
||||||
|
"metrics_perf_events_enabled": 1,
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
print(
|
print(
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
|
|
||||||
set -exu
|
set -exu
|
||||||
trap "exit" INT TERM
|
trap "exit" INT TERM
|
||||||
|
|
||||||
@ -96,5 +97,4 @@ rg -Fa "Fatal" /var/log/clickhouse-server/clickhouse-server.log ||:
|
|||||||
zstd < /var/log/clickhouse-server/clickhouse-server.log > /test_output/clickhouse-server.log.zst &
|
zstd < /var/log/clickhouse-server/clickhouse-server.log > /test_output/clickhouse-server.log.zst &
|
||||||
|
|
||||||
# Compressed (FIXME: remove once only github actions will be left)
|
# Compressed (FIXME: remove once only github actions will be left)
|
||||||
rm /var/log/clickhouse-server/clickhouse-server.log
|
|
||||||
mv /var/log/clickhouse-server/stderr.log /test_output/ ||:
|
mv /var/log/clickhouse-server/stderr.log /test_output/ ||:
|
||||||
|
30
docker/test/sqltest/Dockerfile
Normal file
30
docker/test/sqltest/Dockerfile
Normal file
@ -0,0 +1,30 @@
|
|||||||
|
# docker build -t clickhouse/sqltest .
|
||||||
|
ARG FROM_TAG=latest
|
||||||
|
FROM clickhouse/test-base:$FROM_TAG
|
||||||
|
|
||||||
|
RUN apt-get update --yes \
|
||||||
|
&& env DEBIAN_FRONTEND=noninteractive \
|
||||||
|
apt-get install --yes --no-install-recommends \
|
||||||
|
wget \
|
||||||
|
git \
|
||||||
|
python3 \
|
||||||
|
python3-dev \
|
||||||
|
python3-pip \
|
||||||
|
sudo \
|
||||||
|
&& apt-get clean
|
||||||
|
|
||||||
|
RUN pip3 install \
|
||||||
|
pyyaml \
|
||||||
|
clickhouse-driver
|
||||||
|
|
||||||
|
ARG sqltest_repo="https://github.com/elliotchance/sqltest/"
|
||||||
|
|
||||||
|
RUN git clone ${sqltest_repo}
|
||||||
|
|
||||||
|
ENV TZ=UTC
|
||||||
|
ENV MAX_RUN_TIME=900
|
||||||
|
RUN ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && echo $TZ > /etc/timezone
|
||||||
|
|
||||||
|
COPY run.sh /
|
||||||
|
COPY test.py /
|
||||||
|
CMD ["/bin/bash", "/run.sh"]
|
51
docker/test/sqltest/run.sh
Executable file
51
docker/test/sqltest/run.sh
Executable file
@ -0,0 +1,51 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
# shellcheck disable=SC2015
|
||||||
|
|
||||||
|
set -x
|
||||||
|
set -e
|
||||||
|
set -u
|
||||||
|
set -o pipefail
|
||||||
|
|
||||||
|
BINARY_TO_DOWNLOAD=${BINARY_TO_DOWNLOAD:="clang-16_debug_none_unsplitted_disable_False_binary"}
|
||||||
|
BINARY_URL_TO_DOWNLOAD=${BINARY_URL_TO_DOWNLOAD:="https://clickhouse-builds.s3.amazonaws.com/$PR_TO_TEST/$SHA_TO_TEST/clickhouse_build_check/$BINARY_TO_DOWNLOAD/clickhouse"}
|
||||||
|
|
||||||
|
function wget_with_retry
|
||||||
|
{
|
||||||
|
for _ in 1 2 3 4; do
|
||||||
|
if wget -nv -nd -c "$1";then
|
||||||
|
return 0
|
||||||
|
else
|
||||||
|
sleep 0.5
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
return 1
|
||||||
|
}
|
||||||
|
|
||||||
|
wget_with_retry "$BINARY_URL_TO_DOWNLOAD"
|
||||||
|
chmod +x clickhouse
|
||||||
|
./clickhouse install --noninteractive
|
||||||
|
|
||||||
|
echo "
|
||||||
|
users:
|
||||||
|
default:
|
||||||
|
access_management: 1" > /etc/clickhouse-server/users.d/access_management.yaml
|
||||||
|
|
||||||
|
clickhouse start
|
||||||
|
|
||||||
|
# Wait for start
|
||||||
|
for _ in {1..100}
|
||||||
|
do
|
||||||
|
clickhouse-client --query "SELECT 1" && break ||:
|
||||||
|
sleep 1
|
||||||
|
done
|
||||||
|
|
||||||
|
# Run the test
|
||||||
|
pushd sqltest/standards/2016/
|
||||||
|
/test.py
|
||||||
|
mv report.html test.log /workspace
|
||||||
|
popd
|
||||||
|
|
||||||
|
zstd --threads=0 /var/log/clickhouse-server/clickhouse-server.log
|
||||||
|
zstd --threads=0 /var/log/clickhouse-server/clickhouse-server.err.log
|
||||||
|
|
||||||
|
mv /var/log/clickhouse-server/clickhouse-server.log.zst /var/log/clickhouse-server/clickhouse-server.err.log.zst /workspace
|
148
docker/test/sqltest/test.py
Executable file
148
docker/test/sqltest/test.py
Executable file
@ -0,0 +1,148 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
import os
|
||||||
|
import yaml
|
||||||
|
import html
|
||||||
|
import random
|
||||||
|
import string
|
||||||
|
from clickhouse_driver import Client
|
||||||
|
|
||||||
|
|
||||||
|
client = Client(host="localhost", port=9000)
|
||||||
|
settings = {
|
||||||
|
"default_table_engine": "Memory",
|
||||||
|
"union_default_mode": "DISTINCT",
|
||||||
|
"calculate_text_stack_trace": 0,
|
||||||
|
}
|
||||||
|
|
||||||
|
database_name = "sqltest_" + "".join(
|
||||||
|
random.choice(string.ascii_lowercase) for _ in range(10)
|
||||||
|
)
|
||||||
|
|
||||||
|
client.execute(f"DROP DATABASE IF EXISTS {database_name}", settings=settings)
|
||||||
|
client.execute(f"CREATE DATABASE {database_name}", settings=settings)
|
||||||
|
|
||||||
|
client = Client(host="localhost", port=9000, database=database_name)
|
||||||
|
|
||||||
|
summary = {"success": 0, "total": 0, "results": {}}
|
||||||
|
|
||||||
|
log_file = open("test.log", "w")
|
||||||
|
report_html_file = open("report.html", "w")
|
||||||
|
|
||||||
|
with open("features.yml", "r") as file:
|
||||||
|
yaml_content = yaml.safe_load(file)
|
||||||
|
|
||||||
|
for category in yaml_content:
|
||||||
|
log_file.write(category.capitalize() + " features:\n")
|
||||||
|
summary["results"][category] = {"success": 0, "total": 0, "results": {}}
|
||||||
|
|
||||||
|
for test in yaml_content[category]:
|
||||||
|
log_file.write(test + ": " + yaml_content[category][test] + "\n")
|
||||||
|
summary["results"][category]["results"][test] = {
|
||||||
|
"success": 0,
|
||||||
|
"total": 0,
|
||||||
|
"description": yaml_content[category][test],
|
||||||
|
}
|
||||||
|
|
||||||
|
test_path = test[0] + "/" + test + ".tests.yml"
|
||||||
|
if os.path.exists(test_path):
|
||||||
|
with open(test_path, "r") as test_file:
|
||||||
|
test_yaml_content = yaml.load_all(test_file, Loader=yaml.FullLoader)
|
||||||
|
|
||||||
|
for test_case in test_yaml_content:
|
||||||
|
queries = test_case["sql"]
|
||||||
|
if not isinstance(queries, list):
|
||||||
|
queries = [queries]
|
||||||
|
|
||||||
|
for query in queries:
|
||||||
|
# Example: E011-01
|
||||||
|
test_group = ""
|
||||||
|
if "-" in test:
|
||||||
|
test_group = test.split("-", 1)[0]
|
||||||
|
summary["results"][category]["results"][test_group][
|
||||||
|
"total"
|
||||||
|
] += 1
|
||||||
|
summary["results"][category]["results"][test]["total"] += 1
|
||||||
|
summary["results"][category]["total"] += 1
|
||||||
|
summary["total"] += 1
|
||||||
|
|
||||||
|
log_file.write(query + "\n")
|
||||||
|
|
||||||
|
try:
|
||||||
|
result = client.execute(query, settings=settings)
|
||||||
|
log_file.write(str(result) + "\n")
|
||||||
|
|
||||||
|
if test_group:
|
||||||
|
summary["results"][category]["results"][test_group][
|
||||||
|
"success"
|
||||||
|
] += 1
|
||||||
|
summary["results"][category]["results"][test][
|
||||||
|
"success"
|
||||||
|
] += 1
|
||||||
|
summary["results"][category]["success"] += 1
|
||||||
|
summary["success"] += 1
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
log_file.write(f"Error occurred: {str(e)}\n")
|
||||||
|
|
||||||
|
client.execute(f"DROP DATABASE {database_name}", settings=settings)
|
||||||
|
|
||||||
|
|
||||||
|
def enable_color(ratio):
|
||||||
|
if ratio == 0:
|
||||||
|
return "<b style='color: red;'>"
|
||||||
|
elif ratio < 0.5:
|
||||||
|
return "<b style='color: orange;'>"
|
||||||
|
elif ratio < 1:
|
||||||
|
return "<b style='color: gray;'>"
|
||||||
|
else:
|
||||||
|
return "<b style='color: green;'>"
|
||||||
|
|
||||||
|
|
||||||
|
reset_color = "</b>"
|
||||||
|
|
||||||
|
|
||||||
|
def print_ratio(indent, name, success, total, description):
|
||||||
|
report_html_file.write(
|
||||||
|
"{}{}: {}{} / {} ({:.1%}){}{}\n".format(
|
||||||
|
" " * indent,
|
||||||
|
name.capitalize(),
|
||||||
|
enable_color(success / total),
|
||||||
|
success,
|
||||||
|
total,
|
||||||
|
success / total,
|
||||||
|
reset_color,
|
||||||
|
f" - " + html.escape(description) if description else "",
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
report_html_file.write(
|
||||||
|
"<html><body><pre style='font-size: 16pt; padding: 1em; line-height: 1.25;'>\n"
|
||||||
|
)
|
||||||
|
|
||||||
|
print_ratio(0, "Total", summary["success"], summary["total"], "")
|
||||||
|
|
||||||
|
for category in summary["results"]:
|
||||||
|
cat_summary = summary["results"][category]
|
||||||
|
|
||||||
|
if cat_summary["total"] == 0:
|
||||||
|
continue
|
||||||
|
|
||||||
|
print_ratio(2, category, cat_summary["success"], cat_summary["total"], "")
|
||||||
|
|
||||||
|
for test in summary["results"][category]["results"]:
|
||||||
|
test_summary = summary["results"][category]["results"][test]
|
||||||
|
|
||||||
|
if test_summary["total"] == 0:
|
||||||
|
continue
|
||||||
|
|
||||||
|
print_ratio(
|
||||||
|
6 if "-" in test else 4,
|
||||||
|
test,
|
||||||
|
test_summary["success"],
|
||||||
|
test_summary["total"],
|
||||||
|
test_summary["description"],
|
||||||
|
)
|
||||||
|
|
||||||
|
report_html_file.write("</pre></body></html>\n")
|
@ -20,6 +20,22 @@ ln -s /usr/share/clickhouse-test/clickhouse-test /usr/bin/clickhouse-test
|
|||||||
azurite-blob --blobHost 0.0.0.0 --blobPort 10000 --debug /azurite_log &
|
azurite-blob --blobHost 0.0.0.0 --blobPort 10000 --debug /azurite_log &
|
||||||
./setup_minio.sh stateful
|
./setup_minio.sh stateful
|
||||||
|
|
||||||
|
# Setup a cluster for logs export to ClickHouse Cloud
|
||||||
|
# Note: these variables are provided to the Docker run command by the Python script in tests/ci
|
||||||
|
if [ -n "${CLICKHOUSE_CI_LOGS_HOST}" ]
|
||||||
|
then
|
||||||
|
echo "
|
||||||
|
remote_servers:
|
||||||
|
system_logs_export:
|
||||||
|
shard:
|
||||||
|
replica:
|
||||||
|
secure: 1
|
||||||
|
user: ci
|
||||||
|
host: '${CLICKHOUSE_CI_LOGS_HOST}'
|
||||||
|
password: '${CLICKHOUSE_CI_LOGS_PASSWORD}'
|
||||||
|
" > /etc/clickhouse-server/config.d/system_logs_export.yaml
|
||||||
|
fi
|
||||||
|
|
||||||
function start()
|
function start()
|
||||||
{
|
{
|
||||||
if [[ -n "$USE_DATABASE_REPLICATED" ]] && [[ "$USE_DATABASE_REPLICATED" -eq 1 ]]; then
|
if [[ -n "$USE_DATABASE_REPLICATED" ]] && [[ "$USE_DATABASE_REPLICATED" -eq 1 ]]; then
|
||||||
@ -65,6 +81,22 @@ function start()
|
|||||||
}
|
}
|
||||||
|
|
||||||
start
|
start
|
||||||
|
|
||||||
|
# Initialize export of system logs to ClickHouse Cloud
|
||||||
|
if [ -n "${CLICKHOUSE_CI_LOGS_HOST}" ]
|
||||||
|
then
|
||||||
|
export EXTRA_COLUMNS_EXPRESSION="$PULL_REQUEST_NUMBER AS pull_request_number, '$COMMIT_SHA' AS commit_sha, '$CHECK_START_TIME' AS check_start_time, '$CHECK_NAME' AS check_name, '$INSTANCE_TYPE' AS instance_type"
|
||||||
|
# TODO: Check if the password will appear in the logs.
|
||||||
|
export CONNECTION_PARAMETERS="--secure --user ci --host ${CLICKHOUSE_CI_LOGS_HOST} --password ${CLICKHOUSE_CI_LOGS_PASSWORD}"
|
||||||
|
|
||||||
|
./setup_export_logs.sh
|
||||||
|
|
||||||
|
# Unset variables after use
|
||||||
|
export CONNECTION_PARAMETERS=''
|
||||||
|
export CLICKHOUSE_CI_LOGS_HOST=''
|
||||||
|
export CLICKHOUSE_CI_LOGS_PASSWORD=''
|
||||||
|
fi
|
||||||
|
|
||||||
# shellcheck disable=SC2086 # No quotes because I want to split it into words.
|
# shellcheck disable=SC2086 # No quotes because I want to split it into words.
|
||||||
/s3downloader --url-prefix "$S3_URL" --dataset-names $DATASETS
|
/s3downloader --url-prefix "$S3_URL" --dataset-names $DATASETS
|
||||||
chmod 777 -R /var/lib/clickhouse
|
chmod 777 -R /var/lib/clickhouse
|
||||||
|
@ -41,6 +41,8 @@ RUN apt-get update -y \
|
|||||||
zstd \
|
zstd \
|
||||||
file \
|
file \
|
||||||
pv \
|
pv \
|
||||||
|
zip \
|
||||||
|
p7zip-full \
|
||||||
&& apt-get clean
|
&& apt-get clean
|
||||||
|
|
||||||
RUN pip3 install numpy scipy pandas Jinja2
|
RUN pip3 install numpy scipy pandas Jinja2
|
||||||
@ -52,7 +54,7 @@ RUN mkdir -p /tmp/clickhouse-odbc-tmp \
|
|||||||
&& odbcinst -i -s -l -f /tmp/clickhouse-odbc-tmp/share/doc/clickhouse-odbc/config/odbc.ini.sample \
|
&& odbcinst -i -s -l -f /tmp/clickhouse-odbc-tmp/share/doc/clickhouse-odbc/config/odbc.ini.sample \
|
||||||
&& rm -rf /tmp/clickhouse-odbc-tmp
|
&& rm -rf /tmp/clickhouse-odbc-tmp
|
||||||
|
|
||||||
ENV TZ=Europe/Moscow
|
ENV TZ=Europe/Amsterdam
|
||||||
RUN ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && echo $TZ > /etc/timezone
|
RUN ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && echo $TZ > /etc/timezone
|
||||||
|
|
||||||
ENV NUM_TRIES=1
|
ENV NUM_TRIES=1
|
||||||
@ -85,4 +87,5 @@ RUN npm install -g azurite \
|
|||||||
COPY run.sh /
|
COPY run.sh /
|
||||||
COPY setup_minio.sh /
|
COPY setup_minio.sh /
|
||||||
COPY setup_hdfs_minicluster.sh /
|
COPY setup_hdfs_minicluster.sh /
|
||||||
|
|
||||||
CMD ["/bin/bash", "/run.sh"]
|
CMD ["/bin/bash", "/run.sh"]
|
||||||
|
@ -36,6 +36,22 @@ fi
|
|||||||
./setup_minio.sh stateless
|
./setup_minio.sh stateless
|
||||||
./setup_hdfs_minicluster.sh
|
./setup_hdfs_minicluster.sh
|
||||||
|
|
||||||
|
# Setup a cluster for logs export to ClickHouse Cloud
|
||||||
|
# Note: these variables are provided to the Docker run command by the Python script in tests/ci
|
||||||
|
if [ -n "${CLICKHOUSE_CI_LOGS_HOST}" ]
|
||||||
|
then
|
||||||
|
echo "
|
||||||
|
remote_servers:
|
||||||
|
system_logs_export:
|
||||||
|
shard:
|
||||||
|
replica:
|
||||||
|
secure: 1
|
||||||
|
user: ci
|
||||||
|
host: '${CLICKHOUSE_CI_LOGS_HOST}'
|
||||||
|
password: '${CLICKHOUSE_CI_LOGS_PASSWORD}'
|
||||||
|
" > /etc/clickhouse-server/config.d/system_logs_export.yaml
|
||||||
|
fi
|
||||||
|
|
||||||
# For flaky check we also enable thread fuzzer
|
# For flaky check we also enable thread fuzzer
|
||||||
if [ "$NUM_TRIES" -gt "1" ]; then
|
if [ "$NUM_TRIES" -gt "1" ]; then
|
||||||
export THREAD_FUZZER_CPU_TIME_PERIOD_US=1000
|
export THREAD_FUZZER_CPU_TIME_PERIOD_US=1000
|
||||||
@ -92,7 +108,28 @@ if [[ -n "$USE_DATABASE_REPLICATED" ]] && [[ "$USE_DATABASE_REPLICATED" -eq 1 ]]
|
|||||||
MAX_RUN_TIME=$((MAX_RUN_TIME != 0 ? MAX_RUN_TIME : 9000)) # set to 2.5 hours if 0 (unlimited)
|
MAX_RUN_TIME=$((MAX_RUN_TIME != 0 ? MAX_RUN_TIME : 9000)) # set to 2.5 hours if 0 (unlimited)
|
||||||
fi
|
fi
|
||||||
|
|
||||||
sleep 5
|
|
||||||
|
# Wait for the server to start, but not for too long.
|
||||||
|
for _ in {1..100}
|
||||||
|
do
|
||||||
|
clickhouse-client --query "SELECT 1" && break
|
||||||
|
sleep 1
|
||||||
|
done
|
||||||
|
|
||||||
|
# Initialize export of system logs to ClickHouse Cloud
|
||||||
|
if [ -n "${CLICKHOUSE_CI_LOGS_HOST}" ]
|
||||||
|
then
|
||||||
|
export EXTRA_COLUMNS_EXPRESSION="$PULL_REQUEST_NUMBER AS pull_request_number, '$COMMIT_SHA' AS commit_sha, '$CHECK_START_TIME' AS check_start_time, '$CHECK_NAME' AS check_name, '$INSTANCE_TYPE' AS instance_type"
|
||||||
|
# TODO: Check if the password will appear in the logs.
|
||||||
|
export CONNECTION_PARAMETERS="--secure --user ci --host ${CLICKHOUSE_CI_LOGS_HOST} --password ${CLICKHOUSE_CI_LOGS_PASSWORD}"
|
||||||
|
|
||||||
|
./setup_export_logs.sh
|
||||||
|
|
||||||
|
# Unset variables after use
|
||||||
|
export CONNECTION_PARAMETERS=''
|
||||||
|
export CLICKHOUSE_CI_LOGS_HOST=''
|
||||||
|
export CLICKHOUSE_CI_LOGS_PASSWORD=''
|
||||||
|
fi
|
||||||
|
|
||||||
attach_gdb_to_clickhouse || true # FIXME: to not break old builds, clean on 2023-09-01
|
attach_gdb_to_clickhouse || true # FIXME: to not break old builds, clean on 2023-09-01
|
||||||
|
|
||||||
|
@ -51,8 +51,39 @@ configure
|
|||||||
azurite-blob --blobHost 0.0.0.0 --blobPort 10000 --debug /azurite_log &
|
azurite-blob --blobHost 0.0.0.0 --blobPort 10000 --debug /azurite_log &
|
||||||
./setup_minio.sh stateless # to have a proper environment
|
./setup_minio.sh stateless # to have a proper environment
|
||||||
|
|
||||||
|
# Setup a cluster for logs export to ClickHouse Cloud
|
||||||
|
# Note: these variables are provided to the Docker run command by the Python script in tests/ci
|
||||||
|
if [ -n "${CLICKHOUSE_CI_LOGS_HOST}" ]
|
||||||
|
then
|
||||||
|
echo "
|
||||||
|
remote_servers:
|
||||||
|
system_logs_export:
|
||||||
|
shard:
|
||||||
|
replica:
|
||||||
|
secure: 1
|
||||||
|
user: ci
|
||||||
|
host: '${CLICKHOUSE_CI_LOGS_HOST}'
|
||||||
|
password: '${CLICKHOUSE_CI_LOGS_PASSWORD}'
|
||||||
|
" > /etc/clickhouse-server/config.d/system_logs_export.yaml
|
||||||
|
fi
|
||||||
|
|
||||||
start
|
start
|
||||||
|
|
||||||
|
# Initialize export of system logs to ClickHouse Cloud
|
||||||
|
if [ -n "${CLICKHOUSE_CI_LOGS_HOST}" ]
|
||||||
|
then
|
||||||
|
export EXTRA_COLUMNS_EXPRESSION="$PULL_REQUEST_NUMBER AS pull_request_number, '$COMMIT_SHA' AS commit_sha, '$CHECK_START_TIME' AS check_start_time, '$CHECK_NAME' AS check_name, '$INSTANCE_TYPE' AS instance_type"
|
||||||
|
# TODO: Check if the password will appear in the logs.
|
||||||
|
export CONNECTION_PARAMETERS="--secure --user ci --host ${CLICKHOUSE_CI_LOGS_HOST} --password ${CLICKHOUSE_CI_LOGS_PASSWORD}"
|
||||||
|
|
||||||
|
./setup_export_logs.sh
|
||||||
|
|
||||||
|
# Unset variables after use
|
||||||
|
export CONNECTION_PARAMETERS=''
|
||||||
|
export CLICKHOUSE_CI_LOGS_HOST=''
|
||||||
|
export CLICKHOUSE_CI_LOGS_PASSWORD=''
|
||||||
|
fi
|
||||||
|
|
||||||
# shellcheck disable=SC2086 # No quotes because I want to split it into words.
|
# shellcheck disable=SC2086 # No quotes because I want to split it into words.
|
||||||
/s3downloader --url-prefix "$S3_URL" --dataset-names $DATASETS
|
/s3downloader --url-prefix "$S3_URL" --dataset-names $DATASETS
|
||||||
chmod 777 -R /var/lib/clickhouse
|
chmod 777 -R /var/lib/clickhouse
|
||||||
@ -180,6 +211,11 @@ mv /etc/clickhouse-server/config.d/s3_storage_policy_by_default.xml.tmp /etc/cli
|
|||||||
sudo chown clickhouse /etc/clickhouse-server/config.d/s3_storage_policy_by_default.xml
|
sudo chown clickhouse /etc/clickhouse-server/config.d/s3_storage_policy_by_default.xml
|
||||||
sudo chgrp clickhouse /etc/clickhouse-server/config.d/s3_storage_policy_by_default.xml
|
sudo chgrp clickhouse /etc/clickhouse-server/config.d/s3_storage_policy_by_default.xml
|
||||||
|
|
||||||
|
sudo cat /etc/clickhouse-server/config.d/logger_trace.xml \
|
||||||
|
| sed "s|<level>trace</level>|<level>test</level>|" \
|
||||||
|
> /etc/clickhouse-server/config.d/logger_trace.xml.tmp
|
||||||
|
mv /etc/clickhouse-server/config.d/logger_trace.xml.tmp /etc/clickhouse-server/config.d/logger_trace.xml
|
||||||
|
|
||||||
start
|
start
|
||||||
|
|
||||||
stress --hung-check --drop-databases --output-folder test_output --skip-func-tests "$SKIP_TESTS_OPTION" --global-time-limit 1200 \
|
stress --hung-check --drop-databases --output-folder test_output --skip-func-tests "$SKIP_TESTS_OPTION" --global-time-limit 1200 \
|
||||||
@ -233,4 +269,10 @@ rowNumberInAllBlocks()
|
|||||||
LIMIT 1" < /test_output/test_results.tsv > /test_output/check_status.tsv || echo "failure\tCannot parse test_results.tsv" > /test_output/check_status.tsv
|
LIMIT 1" < /test_output/test_results.tsv > /test_output/check_status.tsv || echo "failure\tCannot parse test_results.tsv" > /test_output/check_status.tsv
|
||||||
[ -s /test_output/check_status.tsv ] || echo -e "success\tNo errors found" > /test_output/check_status.tsv
|
[ -s /test_output/check_status.tsv ] || echo -e "success\tNo errors found" > /test_output/check_status.tsv
|
||||||
|
|
||||||
|
# But OOMs in stress test are allowed
|
||||||
|
if rg 'OOM in dmesg|Signal 9' /test_output/check_status.tsv
|
||||||
|
then
|
||||||
|
sed -i 's/failure/success/' /test_output/check_status.tsv
|
||||||
|
fi
|
||||||
|
|
||||||
collect_core_dumps
|
collect_core_dumps
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
# docker build -t clickhouse/style-test .
|
# docker build -t clickhouse/style-test .
|
||||||
FROM ubuntu:20.04
|
FROM ubuntu:22.04
|
||||||
ARG ACT_VERSION=0.2.33
|
ARG ACT_VERSION=0.2.33
|
||||||
ARG ACTIONLINT_VERSION=1.6.22
|
ARG ACTIONLINT_VERSION=1.6.22
|
||||||
|
|
||||||
|
@ -63,6 +63,7 @@ configure
|
|||||||
# it contains some new settings, but we can safely remove it
|
# it contains some new settings, but we can safely remove it
|
||||||
rm /etc/clickhouse-server/config.d/merge_tree.xml
|
rm /etc/clickhouse-server/config.d/merge_tree.xml
|
||||||
rm /etc/clickhouse-server/config.d/enable_wait_for_shutdown_replicated_tables.xml
|
rm /etc/clickhouse-server/config.d/enable_wait_for_shutdown_replicated_tables.xml
|
||||||
|
rm /etc/clickhouse-server/config.d/filesystem_caches_path.xml
|
||||||
rm /etc/clickhouse-server/users.d/nonconst_timezone.xml
|
rm /etc/clickhouse-server/users.d/nonconst_timezone.xml
|
||||||
|
|
||||||
start
|
start
|
||||||
@ -93,6 +94,7 @@ sudo chgrp clickhouse /etc/clickhouse-server/config.d/s3_storage_policy_by_defau
|
|||||||
# it contains some new settings, but we can safely remove it
|
# it contains some new settings, but we can safely remove it
|
||||||
rm /etc/clickhouse-server/config.d/merge_tree.xml
|
rm /etc/clickhouse-server/config.d/merge_tree.xml
|
||||||
rm /etc/clickhouse-server/config.d/enable_wait_for_shutdown_replicated_tables.xml
|
rm /etc/clickhouse-server/config.d/enable_wait_for_shutdown_replicated_tables.xml
|
||||||
|
rm /etc/clickhouse-server/config.d/filesystem_caches_path.xml
|
||||||
rm /etc/clickhouse-server/users.d/nonconst_timezone.xml
|
rm /etc/clickhouse-server/users.d/nonconst_timezone.xml
|
||||||
|
|
||||||
start
|
start
|
||||||
@ -231,4 +233,10 @@ rowNumberInAllBlocks()
|
|||||||
LIMIT 1" < /test_output/test_results.tsv > /test_output/check_status.tsv || echo "failure\tCannot parse test_results.tsv" > /test_output/check_status.tsv
|
LIMIT 1" < /test_output/test_results.tsv > /test_output/check_status.tsv || echo "failure\tCannot parse test_results.tsv" > /test_output/check_status.tsv
|
||||||
[ -s /test_output/check_status.tsv ] || echo -e "success\tNo errors found" > /test_output/check_status.tsv
|
[ -s /test_output/check_status.tsv ] || echo -e "success\tNo errors found" > /test_output/check_status.tsv
|
||||||
|
|
||||||
|
# But OOMs in stress test are allowed
|
||||||
|
if rg 'OOM in dmesg|Signal 9' /test_output/check_status.tsv
|
||||||
|
then
|
||||||
|
sed -i 's/failure/success/' /test_output/check_status.tsv
|
||||||
|
fi
|
||||||
|
|
||||||
collect_core_dumps
|
collect_core_dumps
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user