Merge branch 'master' into ftsearch

This commit is contained in:
larryluogit 2023-01-19 11:34:11 -05:00 committed by GitHub
commit 52ae33dba7
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
2233 changed files with 34542 additions and 13551 deletions

View File

@ -18,5 +18,24 @@ tests/ci/run_check.py
### Changelog entry (a user-readable short description of the changes that goes to CHANGELOG.md):
...
### Documentation entry for user-facing changes
- [ ] Documentation is written (mandatory for new features)
<!---
Directly edit documentation source files in the "docs" folder with the same pull-request as code changes
or
Add a user-readable short description of the changes that should be added to docs.clickhouse.com below.
At a minimum, the following information should be added (but add more as needed).
- Motivation: Why is this function, table engine, etc. useful to ClickHouse users?
- Parameters: If the feature being added takes arguments, options or is influenced by settings, please list them below with a brief explanation.
- Example use: A query or command.
-->
> Information about CI checks: https://clickhouse.com/docs/en/development/continuous-integration/

View File

@ -683,3 +683,4 @@ jobs:
run: |
cd "$GITHUB_WORKSPACE/tests/ci"
python3 finish_check.py
python3 merge_pr.py

View File

@ -8,4 +8,4 @@ jobs:
DebugInfo:
runs-on: ubuntu-latest
steps:
- uses: hmarr/debug-action@1201a20fc9d278ddddd5f0f46922d06513892491
- uses: hmarr/debug-action@a701ed95a46e6f2fb0df25e1a558c16356fae35a

View File

@ -169,3 +169,4 @@ jobs:
run: |
cd "$GITHUB_WORKSPACE/tests/ci"
python3 finish_check.py
python3 merge_pr.py --check-approved

View File

@ -141,37 +141,6 @@ jobs:
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
sudo rm -fr "$TEMP_PATH"
SharedBuildSmokeTest:
needs: [BuilderDebShared]
runs-on: [self-hosted, style-checker]
steps:
- name: Set envs
run: |
cat >> "$GITHUB_ENV" << 'EOF'
TEMP_PATH=${{runner.temp}}/split_build_check
REPO_COPY=${{runner.temp}}/split_build_check/ClickHouse
REPORTS_PATH=${{runner.temp}}/reports_dir
EOF
- name: Check out repository code
uses: ClickHouse/checkout@v1
with:
clear-repository: true
- name: Download json reports
uses: actions/download-artifact@v3
with:
path: ${{ env.REPORTS_PATH }}
- name: Shared build check
run: |
sudo rm -fr "$TEMP_PATH"
mkdir -p "$TEMP_PATH"
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
cd "$REPO_COPY/tests/ci" && python3 split_build_smoke_check.py
- name: Cleanup
if: always()
run: |
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
sudo rm -fr "$TEMP_PATH"
#########################################################################################
#################################### ORDINARY BUILDS ####################################
#########################################################################################
@ -508,47 +477,6 @@ jobs:
##########################################################################################
##################################### SPECIAL BUILDS #####################################
##########################################################################################
BuilderDebShared:
needs: [DockerHubPush]
runs-on: [self-hosted, builder]
steps:
- name: Set envs
run: |
cat >> "$GITHUB_ENV" << 'EOF'
TEMP_PATH=${{runner.temp}}/build_check
IMAGES_PATH=${{runner.temp}}/images_path
REPO_COPY=${{runner.temp}}/build_check/ClickHouse
CACHES_PATH=${{runner.temp}}/../ccaches
BUILD_NAME=binary_shared
EOF
- name: Download changed images
uses: actions/download-artifact@v3
with:
name: changed_images
path: ${{ env.IMAGES_PATH }}
- name: Check out repository code
uses: ClickHouse/checkout@v1
with:
clear-repository: true
submodules: true
- name: Build
run: |
sudo rm -fr "$TEMP_PATH"
mkdir -p "$TEMP_PATH"
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME"
- name: Upload build URLs to artifacts
if: ${{ success() || failure() }}
uses: actions/upload-artifact@v3
with:
name: ${{ env.BUILD_URLS }}
path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json
- name: Cleanup
if: always()
run: |
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
BuilderBinClangTidy:
needs: [DockerHubPush]
runs-on: [self-hosted, builder]
@ -968,7 +896,6 @@ jobs:
- BuilderBinAmd64Compat
- BuilderBinAarch64V80Compat
- BuilderBinClangTidy
- BuilderDebShared
runs-on: [self-hosted, style-checker]
if: ${{ success() || failure() }}
steps:
@ -3139,7 +3066,6 @@ jobs:
- UnitTestsMsan
- UnitTestsUBsan
- UnitTestsReleaseClang
- SharedBuildSmokeTest
- SQLancerTestRelease
- SQLancerTestDebug
runs-on: [self-hosted, style-checker]

View File

@ -107,7 +107,7 @@ jobs:
run: |
curl --form token="${COVERITY_TOKEN}" \
--form email='security+coverity@clickhouse.com' \
--form file="@$TEMP_PATH/$BUILD_NAME/coverity-scan.tgz" \
--form file="@$TEMP_PATH/$BUILD_NAME/coverity-scan.tar.zst" \
--form version="${GITHUB_REF#refs/heads/}-${GITHUB_SHA::6}" \
--form description="Nighly Scan: $(date +'%Y-%m-%dT%H:%M:%S')" \
https://scan.coverity.com/builds?project=ClickHouse%2FClickHouse

View File

@ -203,37 +203,6 @@ jobs:
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
sudo rm -fr "$TEMP_PATH"
SharedBuildSmokeTest:
needs: [BuilderDebShared]
runs-on: [self-hosted, style-checker]
steps:
- name: Set envs
run: |
cat >> "$GITHUB_ENV" << 'EOF'
TEMP_PATH=${{runner.temp}}/split_build_check
REPO_COPY=${{runner.temp}}/split_build_check/ClickHouse
REPORTS_PATH=${{runner.temp}}/reports_dir
EOF
- name: Check out repository code
uses: ClickHouse/checkout@v1
with:
clear-repository: true
- name: Download json reports
uses: actions/download-artifact@v3
with:
path: ${{ env.REPORTS_PATH }}
- name: Shared build check
run: |
sudo rm -fr "$TEMP_PATH"
mkdir -p "$TEMP_PATH"
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
cd "$REPO_COPY/tests/ci" && python3 split_build_smoke_check.py
- name: Cleanup
if: always()
run: |
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
sudo rm -fr "$TEMP_PATH"
#########################################################################################
#################################### ORDINARY BUILDS ####################################
#########################################################################################
@ -570,47 +539,6 @@ jobs:
##########################################################################################
##################################### SPECIAL BUILDS #####################################
##########################################################################################
BuilderDebShared:
needs: [DockerHubPush, FastTest, StyleCheck]
runs-on: [self-hosted, builder]
steps:
- name: Set envs
run: |
cat >> "$GITHUB_ENV" << 'EOF'
TEMP_PATH=${{runner.temp}}/build_check
IMAGES_PATH=${{runner.temp}}/images_path
REPO_COPY=${{runner.temp}}/build_check/ClickHouse
CACHES_PATH=${{runner.temp}}/../ccaches
BUILD_NAME=binary_shared
EOF
- name: Download changed images
uses: actions/download-artifact@v3
with:
name: changed_images
path: ${{ env.IMAGES_PATH }}
- name: Check out repository code
uses: ClickHouse/checkout@v1
with:
clear-repository: true
submodules: true
- name: Build
run: |
sudo rm -fr "$TEMP_PATH"
mkdir -p "$TEMP_PATH"
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME"
- name: Upload build URLs to artifacts
if: ${{ success() || failure() }}
uses: actions/upload-artifact@v3
with:
name: ${{ env.BUILD_URLS }}
path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json
- name: Cleanup
if: always()
run: |
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
BuilderBinClangTidy:
needs: [DockerHubPush, FastTest, StyleCheck]
runs-on: [self-hosted, builder]
@ -1018,12 +946,10 @@ jobs:
- BuilderBinDarwin
- BuilderBinDarwinAarch64
- BuilderBinFreeBSD
# - BuilderBinGCC
- BuilderBinPPC64
- BuilderBinAmd64Compat
- BuilderBinAarch64V80Compat
- BuilderBinClangTidy
- BuilderDebShared
runs-on: [self-hosted, style-checker]
if: ${{ success() || failure() }}
steps:
@ -2603,7 +2529,7 @@ jobs:
sudo rm -fr "$TEMP_PATH"
TestsBugfixCheck:
needs: [CheckLabels, StyleCheck]
runs-on: [self-hosted, stress-tester]
runs-on: [self-hosted, func-tester]
steps:
- name: Set envs
run: |
@ -2639,7 +2565,7 @@ jobs:
python3 functional_test_check.py "Stateless $CHECK_NAME" "$KILL_TIMEOUT" \
--validate-bugfix --post-commit-status=file || echo 'ignore exit code'
python3 bugfix_validate_check.py "${TEMP_PATH}/stateless/post_commit_status.tsv" "${TEMP_PATH}/integration/post_commit_status.tsv"
python3 bugfix_validate_check.py "${TEMP_PATH}/stateless/functional_commit_status.tsv" "${TEMP_PATH}/integration/integration_commit_status.tsv"
- name: Cleanup
if: always()
run: |
@ -4448,7 +4374,6 @@ jobs:
- UnitTestsMsan
- UnitTestsUBsan
- UnitTestsReleaseClang
- SharedBuildSmokeTest
- CompatibilityCheck
- IntegrationTestsFlakyCheck
- SQLancerTestRelease
@ -4463,3 +4388,4 @@ jobs:
run: |
cd "$GITHUB_WORKSPACE/tests/ci"
python3 finish_check.py
python3 merge_pr.py --check-approved

2
.gitignore vendored
View File

@ -154,6 +154,8 @@ website/package-lock.json
/programs/server/data
/programs/server/metadata
/programs/server/store
/programs/server/uuid
/programs/server/coordination
# temporary test files
tests/queries/0_stateless/test_*

186
.gitmodules vendored
View File

@ -1,88 +1,88 @@
[submodule "contrib/poco"]
path = contrib/poco
url = https://github.com/ClickHouse/poco.git
url = https://github.com/ClickHouse/poco
branch = clickhouse
[submodule "contrib/zstd"]
path = contrib/zstd
url = https://github.com/facebook/zstd.git
url = https://github.com/facebook/zstd
[submodule "contrib/lz4"]
path = contrib/lz4
url = https://github.com/lz4/lz4.git
url = https://github.com/lz4/lz4
[submodule "contrib/librdkafka"]
path = contrib/librdkafka
url = https://github.com/ClickHouse/librdkafka.git
url = https://github.com/ClickHouse/librdkafka
[submodule "contrib/cctz"]
path = contrib/cctz
url = https://github.com/ClickHouse/cctz.git
url = https://github.com/ClickHouse/cctz
[submodule "contrib/zlib-ng"]
path = contrib/zlib-ng
url = https://github.com/ClickHouse/zlib-ng.git
url = https://github.com/ClickHouse/zlib-ng
branch = clickhouse-2.0.x
[submodule "contrib/googletest"]
path = contrib/googletest
url = https://github.com/google/googletest.git
url = https://github.com/google/googletest
[submodule "contrib/capnproto"]
path = contrib/capnproto
url = https://github.com/capnproto/capnproto.git
url = https://github.com/capnproto/capnproto
[submodule "contrib/double-conversion"]
path = contrib/double-conversion
url = https://github.com/google/double-conversion.git
url = https://github.com/google/double-conversion
[submodule "contrib/re2"]
path = contrib/re2
url = https://github.com/google/re2.git
url = https://github.com/google/re2
[submodule "contrib/mariadb-connector-c"]
path = contrib/mariadb-connector-c
url = https://github.com/ClickHouse/mariadb-connector-c.git
url = https://github.com/ClickHouse/mariadb-connector-c
[submodule "contrib/jemalloc"]
path = contrib/jemalloc
url = https://github.com/jemalloc/jemalloc.git
url = https://github.com/jemalloc/jemalloc
[submodule "contrib/unixodbc"]
path = contrib/unixodbc
url = https://github.com/ClickHouse/UnixODBC.git
url = https://github.com/ClickHouse/UnixODBC
[submodule "contrib/protobuf"]
path = contrib/protobuf
url = https://github.com/ClickHouse/protobuf.git
url = https://github.com/ClickHouse/protobuf
branch = v3.13.0.1
[submodule "contrib/boost"]
path = contrib/boost
url = https://github.com/ClickHouse/boost.git
url = https://github.com/ClickHouse/boost
[submodule "contrib/base64"]
path = contrib/base64
url = https://github.com/ClickHouse/Turbo-Base64.git
url = https://github.com/ClickHouse/Turbo-Base64
[submodule "contrib/arrow"]
path = contrib/arrow
url = https://github.com/ClickHouse/arrow.git
url = https://github.com/ClickHouse/arrow
branch = blessed/release-6.0.1
[submodule "contrib/thrift"]
path = contrib/thrift
url = https://github.com/apache/thrift.git
url = https://github.com/apache/thrift
[submodule "contrib/libhdfs3"]
path = contrib/libhdfs3
url = https://github.com/ClickHouse/libhdfs3.git
url = https://github.com/ClickHouse/libhdfs3
[submodule "contrib/libxml2"]
path = contrib/libxml2
url = https://github.com/GNOME/libxml2.git
url = https://github.com/GNOME/libxml2
[submodule "contrib/libgsasl"]
path = contrib/libgsasl
url = https://github.com/ClickHouse/libgsasl.git
url = https://github.com/ClickHouse/libgsasl
[submodule "contrib/snappy"]
path = contrib/snappy
url = https://github.com/ClickHouse/snappy.git
url = https://github.com/ClickHouse/snappy
[submodule "contrib/cppkafka"]
path = contrib/cppkafka
url = https://github.com/mfontanini/cppkafka.git
url = https://github.com/mfontanini/cppkafka
[submodule "contrib/brotli"]
path = contrib/brotli
url = https://github.com/google/brotli.git
url = https://github.com/google/brotli
[submodule "contrib/h3"]
path = contrib/h3
url = https://github.com/ClickHouse/h3
[submodule "contrib/libunwind"]
path = contrib/libunwind
url = https://github.com/ClickHouse/libunwind.git
url = https://github.com/ClickHouse/libunwind
[submodule "contrib/simdjson"]
path = contrib/simdjson
url = https://github.com/simdjson/simdjson.git
url = https://github.com/simdjson/simdjson
[submodule "contrib/rapidjson"]
path = contrib/rapidjson
url = https://github.com/ClickHouse/rapidjson
@ -94,68 +94,68 @@
url = https://github.com/ClickHouse/orc
[submodule "contrib/sparsehash-c11"]
path = contrib/sparsehash-c11
url = https://github.com/sparsehash/sparsehash-c11.git
url = https://github.com/sparsehash/sparsehash-c11
[submodule "contrib/grpc"]
path = contrib/grpc
url = https://github.com/ClickHouse/grpc.git
url = https://github.com/ClickHouse/grpc
branch = v1.33.2
[submodule "contrib/aws"]
path = contrib/aws
url = https://github.com/ClickHouse/aws-sdk-cpp.git
url = https://github.com/ClickHouse/aws-sdk-cpp
[submodule "aws-c-event-stream"]
path = contrib/aws-c-event-stream
url = https://github.com/ClickHouse/aws-c-event-stream.git
url = https://github.com/awslabs/aws-c-event-stream
[submodule "aws-c-common"]
path = contrib/aws-c-common
url = https://github.com/ClickHouse/aws-c-common.git
url = https://github.com/ClickHouse/aws-c-common
[submodule "aws-checksums"]
path = contrib/aws-checksums
url = https://github.com/ClickHouse/aws-checksums.git
url = https://github.com/awslabs/aws-checksums
[submodule "contrib/curl"]
path = contrib/curl
url = https://github.com/curl/curl.git
url = https://github.com/curl/curl
[submodule "contrib/icudata"]
path = contrib/icudata
url = https://github.com/ClickHouse/icudata.git
url = https://github.com/ClickHouse/icudata
[submodule "contrib/icu"]
path = contrib/icu
url = https://github.com/unicode-org/icu.git
url = https://github.com/unicode-org/icu
[submodule "contrib/flatbuffers"]
path = contrib/flatbuffers
url = https://github.com/ClickHouse/flatbuffers.git
url = https://github.com/ClickHouse/flatbuffers
[submodule "contrib/replxx"]
path = contrib/replxx
url = https://github.com/ClickHouse/replxx.git
url = https://github.com/ClickHouse/replxx
[submodule "contrib/avro"]
path = contrib/avro
url = https://github.com/ClickHouse/avro.git
url = https://github.com/ClickHouse/avro
ignore = untracked
[submodule "contrib/msgpack-c"]
path = contrib/msgpack-c
url = https://github.com/msgpack/msgpack-c
[submodule "contrib/libcpuid"]
path = contrib/libcpuid
url = https://github.com/ClickHouse/libcpuid.git
url = https://github.com/ClickHouse/libcpuid
[submodule "contrib/openldap"]
path = contrib/openldap
url = https://github.com/ClickHouse/openldap.git
url = https://github.com/ClickHouse/openldap
[submodule "contrib/AMQP-CPP"]
path = contrib/AMQP-CPP
url = https://github.com/ClickHouse/AMQP-CPP.git
url = https://github.com/ClickHouse/AMQP-CPP
[submodule "contrib/cassandra"]
path = contrib/cassandra
url = https://github.com/ClickHouse/cpp-driver.git
url = https://github.com/ClickHouse/cpp-driver
branch = clickhouse
[submodule "contrib/libuv"]
path = contrib/libuv
url = https://github.com/ClickHouse/libuv.git
url = https://github.com/ClickHouse/libuv
branch = clickhouse
[submodule "contrib/fmtlib"]
path = contrib/fmtlib
url = https://github.com/fmtlib/fmt.git
url = https://github.com/fmtlib/fmt
[submodule "contrib/sentry-native"]
path = contrib/sentry-native
url = https://github.com/ClickHouse/sentry-native.git
url = https://github.com/ClickHouse/sentry-native
[submodule "contrib/krb5"]
path = contrib/krb5
url = https://github.com/ClickHouse/krb5
@ -172,17 +172,17 @@
url = https://github.com/danlark1/miniselect
[submodule "contrib/rocksdb"]
path = contrib/rocksdb
url = https://github.com/ClickHouse/rocksdb.git
url = https://github.com/ClickHouse/rocksdb
[submodule "contrib/xz"]
path = contrib/xz
url = https://github.com/xz-mirror/xz
[submodule "contrib/abseil-cpp"]
path = contrib/abseil-cpp
url = https://github.com/abseil/abseil-cpp.git
url = https://github.com/abseil/abseil-cpp
branch = lts_2021_11_02
[submodule "contrib/dragonbox"]
path = contrib/dragonbox
url = https://github.com/ClickHouse/dragonbox.git
url = https://github.com/ClickHouse/dragonbox
[submodule "contrib/fast_float"]
path = contrib/fast_float
url = https://github.com/fastfloat/fast_float
@ -191,44 +191,44 @@
url = https://github.com/ClickHouse/libpq
[submodule "contrib/boringssl"]
path = contrib/boringssl
url = https://github.com/ClickHouse/boringssl.git
url = https://github.com/ClickHouse/boringssl
branch = unknown_branch_from_artur
[submodule "contrib/NuRaft"]
path = contrib/NuRaft
url = https://github.com/ClickHouse/NuRaft.git
url = https://github.com/ClickHouse/NuRaft
[submodule "contrib/nanodbc"]
path = contrib/nanodbc
url = https://github.com/ClickHouse/nanodbc.git
url = https://github.com/ClickHouse/nanodbc
[submodule "contrib/datasketches-cpp"]
path = contrib/datasketches-cpp
url = https://github.com/ClickHouse/datasketches-cpp.git
url = https://github.com/ClickHouse/datasketches-cpp
[submodule "contrib/yaml-cpp"]
path = contrib/yaml-cpp
url = https://github.com/ClickHouse/yaml-cpp.git
url = https://github.com/ClickHouse/yaml-cpp
[submodule "contrib/cld2"]
path = contrib/cld2
url = https://github.com/ClickHouse/cld2.git
url = https://github.com/ClickHouse/cld2
[submodule "contrib/libstemmer_c"]
path = contrib/libstemmer_c
url = https://github.com/ClickHouse/libstemmer_c.git
url = https://github.com/ClickHouse/libstemmer_c
[submodule "contrib/wordnet-blast"]
path = contrib/wordnet-blast
url = https://github.com/ClickHouse/wordnet-blast.git
url = https://github.com/ClickHouse/wordnet-blast
[submodule "contrib/lemmagen-c"]
path = contrib/lemmagen-c
url = https://github.com/ClickHouse/lemmagen-c.git
url = https://github.com/ClickHouse/lemmagen-c
[submodule "contrib/libpqxx"]
path = contrib/libpqxx
url = https://github.com/ClickHouse/libpqxx.git
url = https://github.com/ClickHouse/libpqxx
[submodule "contrib/sqlite-amalgamation"]
path = contrib/sqlite-amalgamation
url = https://github.com/azadkuh/sqlite-amalgamation
url = https://github.com/ClickHouse/sqlite-amalgamation
[submodule "contrib/s2geometry"]
path = contrib/s2geometry
url = https://github.com/ClickHouse/s2geometry.git
url = https://github.com/ClickHouse/s2geometry
[submodule "contrib/bzip2"]
path = contrib/bzip2
url = https://github.com/ClickHouse/bzip2.git
url = https://github.com/ClickHouse/bzip2
[submodule "contrib/magic_enum"]
path = contrib/magic_enum
url = https://github.com/Neargye/magic_enum
@ -237,60 +237,96 @@
url = https://github.com/google/libprotobuf-mutator
[submodule "contrib/sysroot"]
path = contrib/sysroot
url = https://github.com/ClickHouse/sysroot.git
url = https://github.com/ClickHouse/sysroot
[submodule "contrib/nlp-data"]
path = contrib/nlp-data
url = https://github.com/ClickHouse/nlp-data.git
url = https://github.com/ClickHouse/nlp-data
[submodule "contrib/hive-metastore"]
path = contrib/hive-metastore
url = https://github.com/ClickHouse/hive-metastore
[submodule "contrib/azure"]
path = contrib/azure
url = https://github.com/ClickHouse/azure-sdk-for-cpp.git
url = https://github.com/ClickHouse/azure-sdk-for-cpp
[submodule "contrib/minizip-ng"]
path = contrib/minizip-ng
url = https://github.com/zlib-ng/minizip-ng
[submodule "contrib/annoy"]
path = contrib/annoy
url = https://github.com/ClickHouse/annoy.git
url = https://github.com/ClickHouse/annoy
branch = ClickHouse-master
[submodule "contrib/qpl"]
path = contrib/qpl
url = https://github.com/intel/qpl.git
url = https://github.com/intel/qpl
[submodule "contrib/wyhash"]
path = contrib/wyhash
url = https://github.com/wangyi-fudan/wyhash.git
url = https://github.com/wangyi-fudan/wyhash
[submodule "contrib/hashidsxx"]
path = contrib/hashidsxx
url = https://github.com/schoentoon/hashidsxx.git
url = https://github.com/schoentoon/hashidsxx
[submodule "contrib/nats-io"]
path = contrib/nats-io
url = https://github.com/ClickHouse/nats.c.git
url = https://github.com/ClickHouse/nats.c
[submodule "contrib/vectorscan"]
path = contrib/vectorscan
url = https://github.com/VectorCamp/vectorscan.git
url = https://github.com/VectorCamp/vectorscan
[submodule "contrib/c-ares"]
path = contrib/c-ares
url = https://github.com/ClickHouse/c-ares
[submodule "contrib/llvm-project"]
path = contrib/llvm-project
url = https://github.com/ClickHouse/llvm-project.git
url = https://github.com/ClickHouse/llvm-project
[submodule "contrib/corrosion"]
path = contrib/corrosion
url = https://github.com/corrosion-rs/corrosion.git
url = https://github.com/corrosion-rs/corrosion
[submodule "contrib/morton-nd"]
path = contrib/morton-nd
url = https://github.com/morton-nd/morton-nd
[submodule "contrib/xxHash"]
path = contrib/xxHash
url = https://github.com/Cyan4973/xxHash.git
url = https://github.com/Cyan4973/xxHash
[submodule "contrib/crc32-s390x"]
path = contrib/crc32-s390x
url = https://github.com/linux-on-ibm-z/crc32-s390x
[submodule "contrib/openssl"]
path = contrib/openssl
url = https://github.com/openssl/openssl
branch = openssl-3.0
[submodule "contrib/google-benchmark"]
path = contrib/google-benchmark
url = https://github.com/google/benchmark.git
url = https://github.com/google/benchmark
[submodule "contrib/libdivide"]
path = contrib/libdivide
url = https://github.com/ridiculousfish/libdivide.git
url = https://github.com/ridiculousfish/libdivide
[submodule "contrib/aws-crt-cpp"]
path = contrib/aws-crt-cpp
url = https://github.com/ClickHouse/aws-crt-cpp
[submodule "contrib/aws-c-io"]
path = contrib/aws-c-io
url = https://github.com/ClickHouse/aws-c-io
[submodule "contrib/aws-c-mqtt"]
path = contrib/aws-c-mqtt
url = https://github.com/awslabs/aws-c-mqtt
[submodule "contrib/aws-c-auth"]
path = contrib/aws-c-auth
url = https://github.com/awslabs/aws-c-auth
[submodule "contrib/aws-c-cal"]
path = contrib/aws-c-cal
url = https://github.com/ClickHouse/aws-c-cal
[submodule "contrib/aws-c-sdkutils"]
path = contrib/aws-c-sdkutils
url = https://github.com/awslabs/aws-c-sdkutils
[submodule "contrib/aws-c-http"]
path = contrib/aws-c-http
url = https://github.com/awslabs/aws-c-http
[submodule "contrib/aws-c-s3"]
path = contrib/aws-c-s3
url = https://github.com/awslabs/aws-c-s3
[submodule "contrib/aws-c-compression"]
path = contrib/aws-c-compression
url = https://github.com/awslabs/aws-c-compression
[submodule "contrib/aws-s2n-tls"]
path = contrib/aws-s2n-tls
url = https://github.com/ClickHouse/s2n-tls
[submodule "contrib/crc32-vpmsum"]
path = contrib/crc32-vpmsum
url = https://github.com/antonblanchard/crc32-vpmsum.git

View File

@ -73,22 +73,7 @@ message (STATUS "CMAKE_BUILD_TYPE: ${CMAKE_BUILD_TYPE}")
string (TOUPPER ${CMAKE_BUILD_TYPE} CMAKE_BUILD_TYPE_UC)
option(USE_STATIC_LIBRARIES "Disable to use shared libraries" ON)
# DEVELOPER ONLY.
# Faster linking if turned on.
option(SPLIT_SHARED_LIBRARIES "Keep all internal libraries as separate .so files" OFF)
if (USE_STATIC_LIBRARIES AND SPLIT_SHARED_LIBRARIES)
message(FATAL_ERROR "SPLIT_SHARED_LIBRARIES=1 must not be used together with USE_STATIC_LIBRARIES=1")
endif()
if (NOT USE_STATIC_LIBRARIES AND SPLIT_SHARED_LIBRARIES)
set(BUILD_SHARED_LIBS 1 CACHE INTERNAL "")
endif ()
if (USE_STATIC_LIBRARIES)
list(REVERSE CMAKE_FIND_LIBRARY_SUFFIXES)
endif ()
list(REVERSE CMAKE_FIND_LIBRARY_SUFFIXES)
option (ENABLE_FUZZING "Fuzzy testing using libfuzzer" OFF)
@ -171,7 +156,7 @@ option(ENABLE_TESTS "Provide unit_test_dbms target with Google.Test unit tests"
option(ENABLE_EXAMPLES "Build all example programs in 'examples' subdirectories" OFF)
option(ENABLE_BENCHMARKS "Build all benchmark programs in 'benchmarks' subdirectories" OFF)
if (OS_LINUX AND (ARCH_AMD64 OR ARCH_AARCH64) AND USE_STATIC_LIBRARIES AND NOT SPLIT_SHARED_LIBRARIES AND NOT USE_MUSL)
if (OS_LINUX AND (ARCH_AMD64 OR ARCH_AARCH64) AND NOT USE_MUSL)
# Only for Linux, x86_64 or aarch64.
option(GLIBC_COMPATIBILITY "Enable compatibility with older glibc libraries." ON)
elseif(GLIBC_COMPATIBILITY)
@ -467,22 +452,13 @@ endif ()
set (CMAKE_POSTFIX_VARIABLE "CMAKE_${CMAKE_BUILD_TYPE_UC}_POSTFIX")
if (USE_STATIC_LIBRARIES)
set (CMAKE_POSITION_INDEPENDENT_CODE OFF)
if (OS_LINUX AND NOT ARCH_AARCH64)
# Slightly more efficient code can be generated
# It's disabled for ARM because otherwise ClickHouse cannot run on Android.
set (CMAKE_CXX_FLAGS_RELWITHDEBINFO "${CMAKE_CXX_FLAGS_RELWITHDEBINFO} -fno-pie")
set (CMAKE_C_FLAGS_RELWITHDEBINFO "${CMAKE_C_FLAGS_RELWITHDEBINFO} -fno-pie")
set (CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -no-pie -Wl,-no-pie")
endif ()
else ()
set (CMAKE_POSITION_INDEPENDENT_CODE ON)
# This is required for clang on Arch linux, that uses PIE by default.
# See enable-SSP-and-PIE-by-default.patch [1].
#
# [1]: https://github.com/archlinux/svntogit-packages/blob/6e681aa860e65ad46a1387081482eb875c2200f2/trunk/enable-SSP-and-PIE-by-default.patch
set (CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -no-pie")
set (CMAKE_POSITION_INDEPENDENT_CODE OFF)
if (OS_LINUX AND NOT ARCH_AARCH64)
# Slightly more efficient code can be generated
# It's disabled for ARM because otherwise ClickHouse cannot run on Android.
set (CMAKE_CXX_FLAGS_RELWITHDEBINFO "${CMAKE_CXX_FLAGS_RELWITHDEBINFO} -fno-pie")
set (CMAKE_C_FLAGS_RELWITHDEBINFO "${CMAKE_C_FLAGS_RELWITHDEBINFO} -fno-pie")
set (CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -no-pie -Wl,-no-pie")
endif ()
if (ENABLE_TESTS)
@ -504,10 +480,7 @@ else ()
set (CLICKHOUSE_ETC_DIR "${CMAKE_INSTALL_PREFIX}/etc")
endif ()
message (STATUS
"Building for: ${CMAKE_SYSTEM} ${CMAKE_SYSTEM_PROCESSOR} ${CMAKE_LIBRARY_ARCHITECTURE} ;
USE_STATIC_LIBRARIES=${USE_STATIC_LIBRARIES}
SPLIT_SHARED_LIBRARIES=${SPLIT_SHARED_LIBRARIES}")
message (STATUS "Building for: ${CMAKE_SYSTEM} ${CMAKE_SYSTEM_PROCESSOR} ${CMAKE_LIBRARY_ARCHITECTURE}")
include (GNUInstallDirs)
@ -553,7 +526,7 @@ macro (clickhouse_add_executable target)
# - _je_zone_register due to JEMALLOC_PRIVATE_NAMESPACE=je_ under OS X.
# - but jemalloc-cmake does not run private_namespace.sh
# so symbol name should be _zone_register
if (ENABLE_JEMALLOC AND USE_STATIC_LIBRARIES AND OS_DARWIN)
if (ENABLE_JEMALLOC AND OS_DARWIN)
set_property(TARGET ${target} APPEND PROPERTY LINK_OPTIONS -u_zone_register)
endif()
endif()

View File

@ -1,4 +1,4 @@
Copyright 2016-2022 ClickHouse, Inc.
Copyright 2016-2023 ClickHouse, Inc.
Apache License
Version 2.0, January 2004
@ -188,7 +188,7 @@ Copyright 2016-2022 ClickHouse, Inc.
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright 2016-2022 ClickHouse, Inc.
Copyright 2016-2023 ClickHouse, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.

View File

@ -39,10 +39,6 @@ endif ()
target_include_directories(common PUBLIC .. "${CMAKE_CURRENT_BINARY_DIR}/..")
if (OS_DARWIN AND NOT USE_STATIC_LIBRARIES)
target_link_libraries(common PUBLIC -Wl,-U,_inside_main)
endif()
target_link_libraries (common
PUBLIC
ch_contrib::cityhash

53
base/base/IPv4andIPv6.h Normal file
View File

@ -0,0 +1,53 @@
#pragma once
#include <base/strong_typedef.h>
#include <base/extended_types.h>
#include <Common/memcmpSmall.h>
namespace DB
{
using IPv4 = StrongTypedef<UInt32, struct IPv4Tag>;
struct IPv6 : StrongTypedef<UInt128, struct IPv6Tag>
{
constexpr IPv6() = default;
constexpr explicit IPv6(const UInt128 & x) : StrongTypedef(x) {}
constexpr explicit IPv6(UInt128 && x) : StrongTypedef(std::move(x)) {}
IPv6 & operator=(const UInt128 & rhs) { StrongTypedef::operator=(rhs); return *this; }
IPv6 & operator=(UInt128 && rhs) { StrongTypedef::operator=(std::move(rhs)); return *this; }
bool operator<(const IPv6 & rhs) const
{
return
memcmp16(
reinterpret_cast<const unsigned char *>(toUnderType().items),
reinterpret_cast<const unsigned char *>(rhs.toUnderType().items)
) < 0;
}
bool operator>(const IPv6 & rhs) const
{
return
memcmp16(
reinterpret_cast<const unsigned char *>(toUnderType().items),
reinterpret_cast<const unsigned char *>(rhs.toUnderType().items)
) > 0;
}
bool operator==(const IPv6 & rhs) const
{
return
memcmp16(
reinterpret_cast<const unsigned char *>(toUnderType().items),
reinterpret_cast<const unsigned char *>(rhs.toUnderType().items)
) == 0;
}
bool operator<=(const IPv6 & rhs) const { return !operator>(rhs); }
bool operator>=(const IPv6 & rhs) const { return !operator<(rhs); }
bool operator!=(const IPv6 & rhs) const { return !operator==(rhs); }
};
}

View File

@ -2,6 +2,7 @@
#include "Decimal.h"
#include "UUID.h"
#include "IPv4andIPv6.h"
namespace DB
{
@ -35,6 +36,8 @@ TN_MAP(Float32)
TN_MAP(Float64)
TN_MAP(String)
TN_MAP(UUID)
TN_MAP(IPv4)
TN_MAP(IPv6)
TN_MAP(Decimal32)
TN_MAP(Decimal64)
TN_MAP(Decimal128)

View File

@ -144,6 +144,13 @@
# define TSA_REQUIRES_SHARED(...) __attribute__((requires_shared_capability(__VA_ARGS__))) /// thread needs shared possession of given capability
# define TSA_ACQUIRED_AFTER(...) __attribute__((acquired_after(__VA_ARGS__))) /// annotated lock must be locked after given lock
# define TSA_NO_THREAD_SAFETY_ANALYSIS __attribute__((no_thread_safety_analysis)) /// disable TSA for a function
# define TSA_CAPABILITY(...) __attribute__((capability(__VA_ARGS__))) /// object of a class can be used as capability
# define TSA_ACQUIRE(...) __attribute__((acquire_capability(__VA_ARGS__))) /// function acquires a capability, but does not release it
# define TSA_TRY_ACQUIRE(...) __attribute__((try_acquire_capability(__VA_ARGS__))) /// function tries to acquire a capability and returns a boolean value indicating success or failure
# define TSA_RELEASE(...) __attribute__((release_capability(__VA_ARGS__))) /// function releases the given capability
# define TSA_ACQUIRE_SHARED(...) __attribute__((acquire_shared_capability(__VA_ARGS__))) /// function acquires a shared capability, but does not release it
# define TSA_TRY_ACQUIRE_SHARED(...) __attribute__((try_acquire_shared_capability(__VA_ARGS__))) /// function tries to acquire a shared capability and returns a boolean value indicating success or failure
# define TSA_RELEASE_SHARED(...) __attribute__((release_shared_capability(__VA_ARGS__))) /// function releases the given shared capability
/// Macros for suppressing TSA warnings for specific reads/writes (instead of suppressing it for the whole function)
/// They use a lambda function to apply function attribute to a single statement. This enable us to suppress warnings locally instead of
@ -164,6 +171,13 @@
# define TSA_REQUIRES(...)
# define TSA_REQUIRES_SHARED(...)
# define TSA_NO_THREAD_SAFETY_ANALYSIS
# define TSA_CAPABILITY(...)
# define TSA_ACQUIRE(...)
# define TSA_TRY_ACQUIRE(...)
# define TSA_RELEASE(...)
# define TSA_ACQUIRE_SHARED(...)
# define TSA_TRY_ACQUIRE_SHARED(...)
# define TSA_RELEASE_SHARED(...)
# define TSA_SUPPRESS_WARNING_FOR_READ(x) (x)
# define TSA_SUPPRESS_WARNING_FOR_WRITE(x) (x)

View File

@ -37,7 +37,7 @@ if (GLIBC_COMPATIBILITY)
target_include_directories(glibc-compatibility PRIVATE libcxxabi ${musl_arch_include_dir})
if (( NOT USE_STATIC_LIBRARIES AND NOT USE_STATIC_LIBRARIES ) OR ENABLE_OPENSSL_DYNAMIC)
if (ENABLE_OPENSSL_DYNAMIC)
target_compile_options(glibc-compatibility PRIVATE -fPIC)
endif ()

View File

@ -102,6 +102,11 @@ elseif (ARCH_AMD64)
SET(ENABLE_AVX512_FOR_SPEC_OP 0)
endif()
# ClickHouse can be cross-compiled (e.g. on an ARM host for x86) but it is also possible to build ClickHouse on x86 w/o AVX for x86 w/
# AVX. We only check that the compiler can emit certain SIMD instructions, we don't care if the host system is able to run the binary.
# Therefore, use check_cxx_source_compiles (= does the code compile+link?) instead of check_cxx_source_runs (= does the code
# compile+link+run).
set (TEST_FLAG "-mssse3")
set (CMAKE_REQUIRED_FLAGS "${TEST_FLAG} -O0")
check_cxx_source_compiles("

View File

@ -25,7 +25,7 @@ if (SANITIZE)
if (CMAKE_CXX_COMPILER_ID STREQUAL "GNU")
set (CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} ${ASAN_FLAGS}")
endif()
if (USE_STATIC_LIBRARIES AND CMAKE_CXX_COMPILER_ID STREQUAL "GNU")
if (CMAKE_CXX_COMPILER_ID STREQUAL "GNU")
set (CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -static-libasan")
endif ()
if (COMPILER_GCC)
@ -50,7 +50,7 @@ if (SANITIZE)
if (CMAKE_CXX_COMPILER_ID STREQUAL "GNU")
set (CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -fsanitize=memory")
endif()
if (USE_STATIC_LIBRARIES AND CMAKE_CXX_COMPILER_ID STREQUAL "GNU")
if (CMAKE_CXX_COMPILER_ID STREQUAL "GNU")
set (CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -static-libmsan")
endif ()
@ -71,7 +71,7 @@ if (SANITIZE)
if (CMAKE_CXX_COMPILER_ID STREQUAL "GNU")
set (CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -fsanitize=thread")
endif()
if (USE_STATIC_LIBRARIES AND CMAKE_CXX_COMPILER_ID STREQUAL "GNU")
if (CMAKE_CXX_COMPILER_ID STREQUAL "GNU")
set (CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -static-libtsan")
endif ()
if (COMPILER_GCC)
@ -103,7 +103,7 @@ if (SANITIZE)
if (CMAKE_CXX_COMPILER_ID STREQUAL "GNU")
set (CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -fsanitize=undefined")
endif()
if (USE_STATIC_LIBRARIES AND CMAKE_CXX_COMPILER_ID STREQUAL "GNU")
if (CMAKE_CXX_COMPILER_ID STREQUAL "GNU")
set (CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -static-libubsan")
endif ()
if (COMPILER_GCC)

View File

@ -55,6 +55,7 @@ else ()
endif ()
add_contrib (miniselect-cmake miniselect)
add_contrib (pdqsort-cmake pdqsort)
add_contrib (crc32-vpmsum-cmake crc32-vpmsum)
add_contrib (sparsehash-c11-cmake sparsehash-c11)
add_contrib (abseil-cpp-cmake abseil-cpp)
add_contrib (magic-enum-cmake magic_enum)
@ -115,12 +116,25 @@ endif()
add_contrib (llvm-project-cmake llvm-project)
add_contrib (libfuzzer-cmake llvm-project)
add_contrib (libxml2-cmake libxml2)
add_contrib (aws-s3-cmake
add_contrib (aws-cmake
aws
aws-c-auth
aws-c-cal
aws-c-common
aws-c-compression
aws-c-event-stream
aws-c-http
aws-c-io
aws-c-mqtt
aws-c-s3
aws-c-sdkutils
aws-s2n-tls
aws-checksums
aws-crt-cpp
aws-cmake
)
add_contrib (base64-cmake base64)
add_contrib (simdjson-cmake simdjson)
add_contrib (rapidjson-cmake rapidjson)
@ -166,6 +180,10 @@ add_contrib (c-ares-cmake c-ares)
add_contrib (qpl-cmake qpl)
add_contrib (morton-nd-cmake morton-nd)
if (ARCH_S390X)
add_contrib(crc32-s390x-cmake crc32-s390x)
endif()
add_contrib (annoy-cmake annoy)
add_contrib (xxHash-cmake xxHash)

View File

@ -78,23 +78,14 @@ set(FLATBUFFERS_BINARY_DIR "${ClickHouse_BINARY_DIR}/contrib/flatbuffers")
set(FLATBUFFERS_INCLUDE_DIR "${FLATBUFFERS_SRC_DIR}/include")
# set flatbuffers CMake options
if (USE_STATIC_LIBRARIES)
set(FLATBUFFERS_BUILD_FLATLIB ON CACHE BOOL "Enable the build of the flatbuffers library")
set(FLATBUFFERS_BUILD_SHAREDLIB OFF CACHE BOOL "Disable the build of the flatbuffers shared library")
else ()
set(FLATBUFFERS_BUILD_SHAREDLIB ON CACHE BOOL "Enable the build of the flatbuffers shared library")
set(FLATBUFFERS_BUILD_FLATLIB OFF CACHE BOOL "Disable the build of the flatbuffers library")
endif ()
set(FLATBUFFERS_BUILD_FLATLIB ON CACHE BOOL "Enable the build of the flatbuffers library")
set(FLATBUFFERS_BUILD_SHAREDLIB OFF CACHE BOOL "Disable the build of the flatbuffers shared library")
set(FLATBUFFERS_BUILD_TESTS OFF CACHE BOOL "Skip flatbuffers tests")
add_subdirectory(${FLATBUFFERS_SRC_DIR} "${FLATBUFFERS_BINARY_DIR}")
add_library(_flatbuffers INTERFACE)
if(USE_STATIC_LIBRARIES)
target_link_libraries(_flatbuffers INTERFACE flatbuffers)
else()
target_link_libraries(_flatbuffers INTERFACE flatbuffers_shared)
endif()
target_link_libraries(_flatbuffers INTERFACE flatbuffers)
target_include_directories(_flatbuffers INTERFACE ${FLATBUFFERS_INCLUDE_DIR})
# === hdfs

2
contrib/aws vendored

@ -1 +1 @@
Subproject commit 00b03604543367d7e310cb0993973fdcb723ea79
Subproject commit 4a12641211d4dbc8e2fdb2dd0f1eea0927db9252

1
contrib/aws-c-auth vendored Submodule

@ -0,0 +1 @@
Subproject commit 30df6c407e2df43bd244e2c34c9b4a4b87372bfb

1
contrib/aws-c-cal vendored Submodule

@ -0,0 +1 @@
Subproject commit 85dd7664b786a389c6fb1a6f031ab4bb2282133d

@ -1 +1 @@
Subproject commit 736a82d1697c108b04a277e66438a7f4e19b6857
Subproject commit 324fd1d973ccb25c813aa747bf1759cfde5121c5

1
contrib/aws-c-compression vendored Submodule

@ -0,0 +1 @@
Subproject commit b517b7decd0dac30be2162f5186c250221c53aff

@ -1 +1 @@
Subproject commit 3bc33662f9ccff4f4cbcf9509cc78c26e022fde0
Subproject commit 39bfa94a14b7126bf0c1330286ef8db452d87e66

1
contrib/aws-c-http vendored Submodule

@ -0,0 +1 @@
Subproject commit 2c5a2a7d5556600b9782ffa6c9d7e09964df1abc

1
contrib/aws-c-io vendored Submodule

@ -0,0 +1 @@
Subproject commit 5d32c453560d0823df521a686bf7fbacde7f9be3

1
contrib/aws-c-mqtt vendored Submodule

@ -0,0 +1 @@
Subproject commit 882c689561a3db1466330ccfe3b63637e0a575d3

1
contrib/aws-c-s3 vendored Submodule

@ -0,0 +1 @@
Subproject commit a41255ece72a7c887bba7f9d998ca3e14f4c8a1b

1
contrib/aws-c-sdkutils vendored Submodule

@ -0,0 +1 @@
Subproject commit 25bf5cf225f977c3accc6a05a0a7a181ef2a4a30

@ -1 +1 @@
Subproject commit 519d6d9093819b6cf89ffff589a27ef8f83d0f65
Subproject commit 48e7c0e01479232f225c8044d76c84e74192889d

View File

@ -0,0 +1,114 @@
# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
# SPDX-License-Identifier: Apache-2.0.
include(CheckCSourceRuns)
option(USE_CPU_EXTENSIONS "Whenever possible, use functions optimized for CPUs with specific extensions (ex: SSE, AVX)." ON)
# In the current (11/2/21) state of mingw64, the packaged gcc is not capable of emitting properly aligned avx2 instructions under certain circumstances.
# This leads to crashes for windows builds using mingw64 when invoking the avx2-enabled versions of certain functions. Until we can find a better
# work-around, disable avx2 (and all other extensions) in mingw builds.
#
# https://gcc.gnu.org/bugzilla/show_bug.cgi?id=54412
#
if (MINGW)
message(STATUS "MINGW detected! Disabling avx2 and other CPU extensions")
set(USE_CPU_EXTENSIONS OFF)
endif()
if(NOT CMAKE_CROSSCOMPILING)
check_c_source_runs("
#include <stdbool.h>
bool foo(int a, int b, int *c) {
return __builtin_mul_overflow(a, b, c);
}
int main() {
int out;
if (foo(1, 2, &out)) {
return 0;
}
return 0;
}" AWS_HAVE_GCC_OVERFLOW_MATH_EXTENSIONS)
if (USE_CPU_EXTENSIONS)
check_c_source_runs("
int main() {
int foo = 42;
_mulx_u32(1, 2, &foo);
return foo != 2;
}" AWS_HAVE_MSVC_MULX)
endif()
endif()
check_c_source_compiles("
#include <Windows.h>
#if WINAPI_FAMILY_PARTITION(WINAPI_PARTITION_DESKTOP)
int main() {
return 0;
}
#else
it's not windows desktop
#endif
" AWS_HAVE_WINAPI_DESKTOP)
check_c_source_compiles("
int main() {
#if !(defined(__x86_64__) || defined(__i386__) || defined(_M_X64) || defined(_M_IX86))
# error \"not intel\"
#endif
return 0;
}
" AWS_ARCH_INTEL)
check_c_source_compiles("
int main() {
#if !(defined(__aarch64__) || defined(_M_ARM64))
# error \"not arm64\"
#endif
return 0;
}
" AWS_ARCH_ARM64)
check_c_source_compiles("
int main() {
#if !(defined(__arm__) || defined(_M_ARM))
# error \"not arm\"
#endif
return 0;
}
" AWS_ARCH_ARM32)
check_c_source_compiles("
int main() {
int foo = 42, bar = 24;
__asm__ __volatile__(\"\":\"=r\"(foo):\"r\"(bar):\"memory\");
}" AWS_HAVE_GCC_INLINE_ASM)
check_c_source_compiles("
#include <sys/auxv.h>
int main() {
#ifdef __linux__
getauxval(AT_HWCAP);
getauxval(AT_HWCAP2);
#endif
return 0;
}" AWS_HAVE_AUXV)
string(REGEX MATCH "^(aarch64|arm)" ARM_CPU "${CMAKE_SYSTEM_PROCESSOR}")
if(NOT LEGACY_COMPILER_SUPPORT OR ARM_CPU)
check_c_source_compiles("
#include <execinfo.h>
int main() {
backtrace(NULL, 0);
return 0;
}" AWS_HAVE_EXECINFO)
endif()
check_c_source_compiles("
#include <linux/if_link.h>
int main() {
return 1;
}" AWS_HAVE_LINUX_IF_LINK_H)

View File

@ -0,0 +1,74 @@
# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
# SPDX-License-Identifier: Apache-2.0.
include(CheckCCompilerFlag)
include(CheckIncludeFile)
if (USE_CPU_EXTENSIONS)
if (MSVC)
check_c_compiler_flag("/arch:AVX2" HAVE_M_AVX2_FLAG)
if (HAVE_M_AVX2_FLAG)
set(AVX2_CFLAGS "/arch:AVX2")
endif()
else()
check_c_compiler_flag(-mavx2 HAVE_M_AVX2_FLAG)
if (HAVE_M_AVX2_FLAG)
set(AVX2_CFLAGS "-mavx -mavx2")
endif()
endif()
cmake_push_check_state()
set(CMAKE_REQUIRED_FLAGS "${CMAKE_REQUIRED_FLAGS} ${AVX2_CFLAGS}")
check_c_source_compiles("
#include <immintrin.h>
#include <emmintrin.h>
#include <string.h>
int main() {
__m256i vec;
memset(&vec, 0, sizeof(vec));
_mm256_shuffle_epi8(vec, vec);
_mm256_set_epi32(1,2,3,4,5,6,7,8);
_mm256_permutevar8x32_epi32(vec, vec);
return 0;
}" HAVE_AVX2_INTRINSICS)
check_c_source_compiles("
#include <immintrin.h>
#include <string.h>
int main() {
__m256i vec;
memset(&vec, 0, sizeof(vec));
return (int)_mm256_extract_epi64(vec, 2);
}" HAVE_MM256_EXTRACT_EPI64)
cmake_pop_check_state()
endif() # USE_CPU_EXTENSIONS
macro(simd_add_definition_if target definition)
if(${definition})
target_compile_definitions(${target} PRIVATE -D${definition})
endif(${definition})
endmacro(simd_add_definition_if)
# Configure private preprocessor definitions for SIMD-related features
# Does not set any processor feature codegen flags
function(simd_add_definitions target)
simd_add_definition_if(${target} HAVE_AVX2_INTRINSICS)
simd_add_definition_if(${target} HAVE_MM256_EXTRACT_EPI64)
endfunction(simd_add_definitions)
# Adds source files only if AVX2 is supported. These files will be built with
# avx2 intrinsics enabled.
# Usage: simd_add_source_avx2(target file1.c file2.c ...)
function(simd_add_source_avx2 target)
foreach(file ${ARGN})
target_sources(${target} PRIVATE ${file})
set_source_files_properties(${file} PROPERTIES COMPILE_FLAGS "${AVX2_CFLAGS}")
endforeach()
endfunction(simd_add_source_avx2)

View File

@ -0,0 +1,50 @@
# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
# SPDX-License-Identifier: Apache-2.0.
include(CheckSymbolExists)
# Check if the platform supports setting thread affinity
# (important for hitting full NIC entitlement on NUMA architectures)
function(aws_set_thread_affinity_method target)
# Non-POSIX, Android, and Apple platforms do not support thread affinity.
if (NOT UNIX OR ANDROID OR APPLE)
target_compile_definitions(${target} PRIVATE
-DAWS_AFFINITY_METHOD=AWS_AFFINITY_METHOD_NONE)
return()
endif()
cmake_push_check_state()
list(APPEND CMAKE_REQUIRED_DEFINITIONS -D_GNU_SOURCE)
list(APPEND CMAKE_REQUIRED_LIBRARIES pthread)
set(headers "pthread.h")
# BSDs put nonportable pthread declarations in a separate header.
if(CMAKE_SYSTEM_NAME MATCHES BSD)
set(headers "${headers};pthread_np.h")
endif()
# Using pthread attrs is the preferred method, but is glibc-specific.
check_symbol_exists(pthread_attr_setaffinity_np "${headers}" USE_PTHREAD_ATTR_SETAFFINITY)
if (USE_PTHREAD_ATTR_SETAFFINITY)
target_compile_definitions(${target} PRIVATE
-DAWS_AFFINITY_METHOD=AWS_AFFINITY_METHOD_PTHREAD_ATTR)
return()
endif()
# This method is still nonportable, but is supported by musl and BSDs.
check_symbol_exists(pthread_setaffinity_np "${headers}" USE_PTHREAD_SETAFFINITY)
if (USE_PTHREAD_SETAFFINITY)
target_compile_definitions(${target} PRIVATE
-DAWS_AFFINITY_METHOD=AWS_AFFINITY_METHOD_PTHREAD)
return()
endif()
# If we got here, we expected thread affinity support but didn't find it.
# We still build with degraded NUMA performance, but show a warning.
message(WARNING "No supported method for setting thread affinity")
target_compile_definitions(${target} PRIVATE
-DAWS_AFFINITY_METHOD=AWS_AFFINITY_METHOD_NONE)
cmake_pop_check_state()
endfunction()

View File

@ -0,0 +1,61 @@
# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
# SPDX-License-Identifier: Apache-2.0.
include(CheckSymbolExists)
# Check how the platform supports setting thread name
function(aws_set_thread_name_method target)
if (WINDOWS)
# On Windows we do a runtime check, instead of compile-time check
return()
elseif (APPLE)
# All Apple platforms we support have the same function, so no need for compile-time check.
return()
endif()
cmake_push_check_state()
list(APPEND CMAKE_REQUIRED_DEFINITIONS -D_GNU_SOURCE)
list(APPEND CMAKE_REQUIRED_LIBRARIES pthread)
# The start of the test program
set(c_source_start "
#define _GNU_SOURCE
#include <pthread.h>
#if defined(__FreeBSD__) || defined(__NETBSD__)
#include <pthread_np.h>
#endif
int main() {
pthread_t thread_id;
")
# The end of the test program
set(c_source_end "}")
# pthread_setname_np() usually takes 2 args
check_c_source_compiles("
${c_source_start}
pthread_setname_np(thread_id, \"asdf\");
${c_source_end}"
PTHREAD_SETNAME_TAKES_2ARGS)
if (PTHREAD_SETNAME_TAKES_2ARGS)
target_compile_definitions(${target} PRIVATE -DAWS_PTHREAD_SETNAME_TAKES_2ARGS)
return()
endif()
# But on NetBSD it takes 3!
check_c_source_compiles("
${c_source_start}
pthread_setname_np(thread_id, \"asdf\", NULL);
${c_source_end}
" PTHREAD_SETNAME_TAKES_3ARGS)
if (PTHREAD_SETNAME_TAKES_3ARGS)
target_compile_definitions(${target} PRIVATE -DAWS_PTHREAD_SETNAME_TAKES_3ARGS)
return()
endif()
# And on many older/weirder platforms it's just not supported
cmake_pop_check_state()
endfunction()

View File

@ -0,0 +1,376 @@
set(ENABLE_AWS_S3_DEFAULT OFF)
if(ENABLE_LIBRARIES AND (OS_LINUX OR OS_DARWIN) AND TARGET OpenSSL::Crypto)
set(ENABLE_AWS_S3_DEFAULT ON)
endif()
option(ENABLE_AWS_S3 "Enable AWS S3" ${ENABLE_AWS_S3_DEFAULT})
if(ENABLE_AWS_S3)
if(NOT TARGET OpenSSL::Crypto)
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't use AWS SDK without OpenSSL")
elseif(NOT (OS_LINUX OR OS_DARWIN))
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't use AWS SDK with platform ${CMAKE_SYSTEM_NAME}")
endif()
endif()
if(NOT ENABLE_AWS_S3)
message(STATUS "Not using AWS S3")
return()
endif()
# Utilities.
include("${ClickHouse_SOURCE_DIR}/contrib/aws-cmake/AwsFeatureTests.cmake")
include("${ClickHouse_SOURCE_DIR}/contrib/aws-cmake/AwsThreadAffinity.cmake")
include("${ClickHouse_SOURCE_DIR}/contrib/aws-cmake/AwsThreadName.cmake")
include("${ClickHouse_SOURCE_DIR}/contrib/aws-cmake/AwsSIMD.cmake")
# Gather sources and options.
set(AWS_SOURCES)
set(AWS_PUBLIC_INCLUDES)
set(AWS_PRIVATE_INCLUDES)
set(AWS_PUBLIC_COMPILE_DEFS)
set(AWS_PRIVATE_COMPILE_DEFS)
set(AWS_PRIVATE_LIBS)
if (CMAKE_BUILD_TYPE_UC STREQUAL "DEBUG")
list(APPEND AWS_PRIVATE_COMPILE_DEFS "-DDEBUG_BUILD")
endif()
set(ENABLE_OPENSSL_ENCRYPTION ON)
if (ENABLE_OPENSSL_ENCRYPTION)
list(APPEND AWS_PRIVATE_COMPILE_DEFS "-DENABLE_OPENSSL_ENCRYPTION")
endif()
set(USE_S2N ON)
if (USE_S2N)
list(APPEND AWS_PRIVATE_COMPILE_DEFS "-DUSE_S2N")
endif()
# Directories.
SET(AWS_SDK_DIR "${ClickHouse_SOURCE_DIR}/contrib/aws")
SET(AWS_SDK_CORE_DIR "${AWS_SDK_DIR}/aws-cpp-sdk-core")
SET(AWS_SDK_S3_DIR "${AWS_SDK_DIR}/aws-cpp-sdk-s3")
SET(AWS_AUTH_DIR "${ClickHouse_SOURCE_DIR}/contrib/aws-c-auth")
SET(AWS_CAL_DIR "${ClickHouse_SOURCE_DIR}/contrib/aws-c-cal")
SET(AWS_CHECKSUMS_DIR "${ClickHouse_SOURCE_DIR}/contrib/aws-checksums")
SET(AWS_COMMON_DIR "${ClickHouse_SOURCE_DIR}/contrib/aws-c-common")
SET(AWS_COMPRESSION_DIR "${ClickHouse_SOURCE_DIR}/contrib/aws-c-compression")
SET(AWS_CRT_DIR "${ClickHouse_SOURCE_DIR}/contrib/aws-crt-cpp")
SET(AWS_EVENT_STREAM_DIR "${ClickHouse_SOURCE_DIR}/contrib/aws-c-event-stream")
SET(AWS_HTTP_DIR "${ClickHouse_SOURCE_DIR}/contrib/aws-c-http")
SET(AWS_IO_DIR "${ClickHouse_SOURCE_DIR}/contrib/aws-c-io")
SET(AWS_MQTT_DIR "${ClickHouse_SOURCE_DIR}/contrib/aws-c-mqtt")
SET(AWS_S2N_TLS_DIR "${ClickHouse_SOURCE_DIR}/contrib/aws-s2n-tls")
SET(AWS_S3_DIR "${ClickHouse_SOURCE_DIR}/contrib/aws-c-s3")
SET(AWS_SDKUTILS_DIR "${ClickHouse_SOURCE_DIR}/contrib/aws-c-sdkutils")
# aws-cpp-sdk-core
file(GLOB AWS_SDK_CORE_SRC
"${AWS_SDK_CORE_DIR}/source/*.cpp"
"${AWS_SDK_CORE_DIR}/source/auth/*.cpp"
"${AWS_SDK_CORE_DIR}/source/auth/bearer-token-provider/*.cpp"
"${AWS_SDK_CORE_DIR}/source/auth/signer/*.cpp"
"${AWS_SDK_CORE_DIR}/source/auth/signer-provider/*.cpp"
"${AWS_SDK_CORE_DIR}/source/client/*.cpp"
"${AWS_SDK_CORE_DIR}/source/config/*.cpp"
"${AWS_SDK_CORE_DIR}/source/config/defaults/*.cpp"
"${AWS_SDK_CORE_DIR}/source/endpoint/*.cpp"
"${AWS_SDK_CORE_DIR}/source/endpoint/internal/*.cpp"
"${AWS_SDK_CORE_DIR}/source/external/cjson/*.cpp"
"${AWS_SDK_CORE_DIR}/source/external/tinyxml2/*.cpp"
"${AWS_SDK_CORE_DIR}/source/http/*.cpp"
"${AWS_SDK_CORE_DIR}/source/http/standard/*.cpp"
"${AWS_SDK_CORE_DIR}/source/internal/*.cpp"
"${AWS_SDK_CORE_DIR}/source/monitoring/*.cpp"
"${AWS_SDK_CORE_DIR}/source/utils/*.cpp"
"${AWS_SDK_CORE_DIR}/source/utils/base64/*.cpp"
"${AWS_SDK_CORE_DIR}/source/utils/crypto/*.cpp"
"${AWS_SDK_CORE_DIR}/source/utils/crypto/openssl/*.cpp"
"${AWS_SDK_CORE_DIR}/source/utils/crypto/factory/*.cpp"
"${AWS_SDK_CORE_DIR}/source/utils/event/*.cpp"
"${AWS_SDK_CORE_DIR}/source/utils/json/*.cpp"
"${AWS_SDK_CORE_DIR}/source/utils/logging/*.cpp"
"${AWS_SDK_CORE_DIR}/source/utils/memory/*.cpp"
"${AWS_SDK_CORE_DIR}/source/utils/memory/stl/*.cpp"
"${AWS_SDK_CORE_DIR}/source/utils/stream/*.cpp"
"${AWS_SDK_CORE_DIR}/source/utils/threading/*.cpp"
"${AWS_SDK_CORE_DIR}/source/utils/xml/*.cpp"
)
if(OS_LINUX OR OS_DARWIN)
file(GLOB AWS_SDK_CORE_NET_SRC "${AWS_SDK_CORE_DIR}/source/net/linux-shared/*.cpp")
file(GLOB AWS_SDK_CORE_PLATFORM_SRC "${AWS_SDK_CORE_DIR}/source/platform/linux-shared/*.cpp")
else()
file(GLOB AWS_SDK_CORE_NET_SRC "${AWS_SDK_CORE_DIR}/source/net/*.cpp")
set(AWS_SDK_CORE_PLATFORM_SRC)
endif()
OPTION(USE_AWS_MEMORY_MANAGEMENT "Aws memory management" OFF)
configure_file("${AWS_SDK_CORE_DIR}/include/aws/core/SDKConfig.h.in"
"${CMAKE_CURRENT_BINARY_DIR}/include/aws/core/SDKConfig.h" @ONLY)
list(APPEND AWS_PUBLIC_COMPILE_DEFS "-DAWS_SDK_VERSION_MAJOR=1")
list(APPEND AWS_PUBLIC_COMPILE_DEFS "-DAWS_SDK_VERSION_MINOR=10")
list(APPEND AWS_PUBLIC_COMPILE_DEFS "-DAWS_SDK_VERSION_PATCH=36")
list(APPEND AWS_SOURCES ${AWS_SDK_CORE_SRC} ${AWS_SDK_CORE_NET_SRC} ${AWS_SDK_CORE_PLATFORM_SRC})
list(APPEND AWS_PUBLIC_INCLUDES
"${AWS_SDK_CORE_DIR}/include/"
"${CMAKE_CURRENT_BINARY_DIR}/include"
)
# aws-cpp-sdk-s3
file(GLOB AWS_SDK_S3_SRC
"${AWS_SDK_S3_DIR}/source/*.cpp"
"${AWS_SDK_S3_DIR}/source/model/*.cpp"
)
list(APPEND AWS_SOURCES ${AWS_SDK_S3_SRC})
list(APPEND AWS_PUBLIC_INCLUDES "${AWS_SDK_S3_DIR}/include/")
# aws-c-auth
file(GLOB AWS_AUTH_SRC
"${AWS_AUTH_DIR}/source/*.c"
)
list(APPEND AWS_SOURCES ${AWS_AUTH_SRC})
list(APPEND AWS_PUBLIC_INCLUDES "${AWS_AUTH_DIR}/include/")
# aws-c-cal
file(GLOB AWS_CAL_SRC
"${AWS_CAL_DIR}/source/*.c"
)
if (ENABLE_OPENSSL_ENCRYPTION)
file(GLOB AWS_CAL_OS_SRC
"${AWS_CAL_DIR}/source/unix/*.c"
)
list(APPEND AWS_PRIVATE_LIBS OpenSSL::Crypto)
endif()
list(APPEND AWS_SOURCES ${AWS_CAL_SRC} ${AWS_CAL_OS_SRC})
list(APPEND AWS_PRIVATE_INCLUDES "${AWS_CAL_DIR}/include/")
# aws-c-event-stream
file(GLOB AWS_EVENT_STREAM_SRC
"${AWS_EVENT_STREAM_DIR}/source/*.c"
)
list(APPEND AWS_SOURCES ${AWS_EVENT_STREAM_SRC})
list(APPEND AWS_PRIVATE_INCLUDES "${AWS_EVENT_STREAM_DIR}/include/")
# aws-c-common
file(GLOB AWS_COMMON_SRC
"${AWS_COMMON_DIR}/source/*.c"
"${AWS_COMMON_DIR}/source/external/*.c"
"${AWS_COMMON_DIR}/source/posix/*.c"
)
file(GLOB AWS_COMMON_ARCH_SRC
"${AWS_COMMON_DIR}/source/arch/generic/*.c"
)
if (AWS_ARCH_INTEL)
file(GLOB AWS_COMMON_ARCH_SRC
"${AWS_COMMON_DIR}/source/arch/intel/cpuid.c"
"${AWS_COMMON_DIR}/source/arch/intel/asm/*.c"
)
elseif (AWS_ARCH_ARM64 OR AWS_ARCH_ARM32)
if (AWS_HAVE_AUXV)
file(GLOB AWS_COMMON_ARCH_SRC
"${AWS_COMMON_DIR}/source/arch/arm/asm/*.c"
)
endif()
endif()
set(AWS_COMMON_AVX2_SRC)
if (HAVE_AVX2_INTRINSICS)
list(APPEND AWS_PRIVATE_COMPILE_DEFS "-DUSE_SIMD_ENCODING")
set(AWS_COMMON_AVX2_SRC "${AWS_COMMON_DIR}/source/arch/intel/encoding_avx2.c")
set_source_files_properties(${AWS_COMMON_AVX2_SRC} PROPERTIES COMPILE_FLAGS "${AVX2_CFLAGS}")
endif()
configure_file("${AWS_COMMON_DIR}/include/aws/common/config.h.in"
"${CMAKE_CURRENT_BINARY_DIR}/include/aws/common/config.h" @ONLY)
list(APPEND AWS_SOURCES ${AWS_COMMON_SRC} ${AWS_COMMON_ARCH_SRC} ${AWS_COMMON_AVX2_SRC})
list(APPEND AWS_PUBLIC_INCLUDES
"${AWS_COMMON_DIR}/include/"
"${CMAKE_CURRENT_BINARY_DIR}/include"
)
# aws-checksums
file(GLOB AWS_CHECKSUMS_SRC
"${AWS_CHECKSUMS_DIR}/source/*.c"
"${AWS_CHECKSUMS_DIR}/source/intel/*.c"
"${AWS_CHECKSUMS_DIR}/source/intel/asm/*.c"
"${AWS_CHECKSUMS_DIR}/source/arm/*.c"
)
if(AWS_ARCH_INTEL AND AWS_HAVE_GCC_INLINE_ASM)
file(GLOB AWS_CHECKSUMS_ARCH_SRC
"${AWS_CHECKSUMS_DIR}/source/intel/asm/*.c"
)
endif()
if (AWS_ARCH_ARM64)
file(GLOB AWS_CHECKSUMS_ARCH_SRC
"${AWS_CHECKSUMS_DIR}/source/arm/*.c"
)
set_source_files_properties("${AWS_CHECKSUMS_DIR}/source/arm/crc32c_arm.c" PROPERTIES COMPILE_FLAGS -march=armv8-a+crc)
elseif (AWS_ARCH_ARM32)
if (AWS_ARM32_CRC)
file(GLOB AWS_CHECKSUMS_ARCH_SRC
"${AWS_CHECKSUMS_DIR}/source/arm/*.c"
"${AWS_CHECKSUMS_DIR}/source/arm/asm/*.c"
)
set_source_files_properties(source/arm/crc32c_arm.c PROPERTIES COMPILE_FLAGS -march=armv8-a+crc)
endif()
endif()
list(APPEND AWS_SOURCES ${AWS_CHECKSUMS_SRC} ${AWS_CHECKSUMS_ARCH_SRC})
list(APPEND AWS_PRIVATE_INCLUDES "${AWS_CHECKSUMS_DIR}/include/")
# aws-c-io
file(GLOB AWS_IO_SRC
"${AWS_IO_DIR}/source/*.c"
)
if (OS_LINUX)
file(GLOB AWS_IO_OS_SRC
"${AWS_IO_DIR}/source/linux/*.c"
"${AWS_IO_DIR}/source/posix/*.c"
)
elseif (OS_DARWIN)
file(GLOB AWS_IO_OS_SRC
"${AWS_IO_DIR}/source/bsd/*.c"
"${AWS_IO_DIR}/source/posix/*.c"
)
endif()
set(AWS_IO_TLS_SRC)
if (USE_S2N)
file(GLOB AWS_IO_TLS_SRC
"${AWS_IO_DIR}/source/s2n/*.c"
)
endif()
list(APPEND AWS_SOURCES ${AWS_IO_SRC} ${AWS_IO_OS_SRC} ${AWS_IO_TLS_SRC})
list(APPEND AWS_PUBLIC_INCLUDES "${AWS_IO_DIR}/include/")
# aws-s2n-tls
if (USE_S2N)
file(GLOB AWS_S2N_TLS_SRC
"${AWS_S2N_TLS_DIR}/crypto/*.c"
"${AWS_S2N_TLS_DIR}/error/*.c"
"${AWS_S2N_TLS_DIR}/stuffer/*.c"
"${AWS_S2N_TLS_DIR}/pq-crypto/*.c"
"${AWS_S2N_TLS_DIR}/pq-crypto/kyber_r3/*.c"
"${AWS_S2N_TLS_DIR}/tls/*.c"
"${AWS_S2N_TLS_DIR}/tls/extensions/*.c"
"${AWS_S2N_TLS_DIR}/utils/*.c"
)
list(APPEND AWS_SOURCES ${AWS_S2N_TLS_SRC})
list(APPEND AWS_PRIVATE_INCLUDES
"${AWS_S2N_TLS_DIR}/"
"${AWS_S2N_TLS_DIR}/api/"
)
endif()
# aws-crt-cpp
file(GLOB AWS_CRT_SRC
"${AWS_CRT_DIR}/source/*.cpp"
"${AWS_CRT_DIR}/source/auth/*.cpp"
"${AWS_CRT_DIR}/source/crypto/*.cpp"
"${AWS_CRT_DIR}/source/endpoints/*.cpp"
"${AWS_CRT_DIR}/source/external/*.cpp"
"${AWS_CRT_DIR}/source/http/*.cpp"
"${AWS_CRT_DIR}/source/io/*.cpp"
)
list(APPEND AWS_SOURCES ${AWS_CRT_SRC})
list(APPEND AWS_PUBLIC_INCLUDES "${AWS_CRT_DIR}/include/")
# aws-c-mqtt
file(GLOB AWS_MQTT_SRC
"${AWS_MQTT_DIR}/source/*.c"
)
list(APPEND AWS_SOURCES ${AWS_MQTT_SRC})
list(APPEND AWS_PUBLIC_INCLUDES "${AWS_MQTT_DIR}/include/")
# aws-c-http
file(GLOB AWS_HTTP_SRC
"${AWS_HTTP_DIR}/source/*.c"
)
list(APPEND AWS_SOURCES ${AWS_HTTP_SRC})
list(APPEND AWS_PRIVATE_INCLUDES "${AWS_HTTP_DIR}/include/")
# aws-c-compression
file(GLOB AWS_COMPRESSION_SRC
"${AWS_COMPRESSION_DIR}/source/*.c"
)
list(APPEND AWS_SOURCES ${AWS_COMPRESSION_SRC})
list(APPEND AWS_PRIVATE_INCLUDES "${AWS_COMPRESSION_DIR}/include/")
# aws-c-s3
file(GLOB AWS_S3_SRC
"${AWS_S3_DIR}/source/*.c"
)
list(APPEND AWS_SOURCES ${AWS_S3_SRC})
list(APPEND AWS_PRIVATE_INCLUDES "${AWS_S3_DIR}/include/")
# aws-c-sdkutils
file(GLOB AWS_SDKUTILS_SRC
"${AWS_SDKUTILS_DIR}/source/*.c"
)
list(APPEND AWS_SOURCES ${AWS_SDKUTILS_SRC})
list(APPEND AWS_PUBLIC_INCLUDES "${AWS_SDKUTILS_DIR}/include/")
# Add library.
add_library(_aws ${AWS_SOURCES})
target_include_directories(_aws SYSTEM BEFORE PUBLIC ${AWS_PUBLIC_INCLUDES})
target_include_directories(_aws SYSTEM BEFORE PRIVATE ${AWS_PRIVATE_INCLUDES})
target_compile_definitions(_aws PUBLIC ${AWS_PUBLIC_COMPILE_DEFS})
target_compile_definitions(_aws PRIVATE ${AWS_PRIVATE_COMPILE_DEFS})
target_link_libraries(_aws PRIVATE ${AWS_PRIVATE_LIBS})
aws_set_thread_affinity_method(_aws)
aws_set_thread_name_method(_aws)
# The library is large - avoid bloat.
if (OMIT_HEAVY_DEBUG_SYMBOLS)
target_compile_options (_aws PRIVATE -g0)
endif()
add_library(ch_contrib::aws_s3 ALIAS _aws)

1
contrib/aws-crt-cpp vendored Submodule

@ -0,0 +1 @@
Subproject commit ec0bea288f451d884c0d80d534bc5c66241c39a4

1
contrib/aws-s2n-tls vendored Submodule

@ -0,0 +1 @@
Subproject commit 0f1ba9e5c4a67cb3898de0c0b4f911d4194dc8de

View File

@ -1,122 +0,0 @@
if(NOT OS_FREEBSD)
option(ENABLE_S3 "Enable S3" ${ENABLE_LIBRARIES})
elseif(ENABLE_S3)
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't use S3 on FreeBSD")
endif()
if(NOT ENABLE_S3)
message(STATUS "Not using S3")
return()
endif()
SET(AWS_S3_LIBRARY_DIR "${ClickHouse_SOURCE_DIR}/contrib/aws/aws-cpp-sdk-s3")
SET(AWS_CORE_LIBRARY_DIR "${ClickHouse_SOURCE_DIR}/contrib/aws/aws-cpp-sdk-core")
SET(AWS_CHECKSUMS_LIBRARY_DIR "${ClickHouse_SOURCE_DIR}/contrib/aws-checksums")
SET(AWS_COMMON_LIBRARY_DIR "${ClickHouse_SOURCE_DIR}/contrib/aws-c-common")
SET(AWS_EVENT_STREAM_LIBRARY_DIR "${ClickHouse_SOURCE_DIR}/contrib/aws-c-event-stream")
OPTION(USE_AWS_MEMORY_MANAGEMENT "Aws memory management" OFF)
configure_file("${AWS_CORE_LIBRARY_DIR}/include/aws/core/SDKConfig.h.in"
"${CMAKE_CURRENT_BINARY_DIR}/include/aws/core/SDKConfig.h" @ONLY)
configure_file("${AWS_COMMON_LIBRARY_DIR}/include/aws/common/config.h.in"
"${CMAKE_CURRENT_BINARY_DIR}/include/aws/common/config.h" @ONLY)
file(GLOB AWS_CORE_SOURCES
"${AWS_CORE_LIBRARY_DIR}/source/*.cpp"
"${AWS_CORE_LIBRARY_DIR}/source/auth/*.cpp"
"${AWS_CORE_LIBRARY_DIR}/source/client/*.cpp"
"${AWS_CORE_LIBRARY_DIR}/source/http/*.cpp"
"${AWS_CORE_LIBRARY_DIR}/source/http/standard/*.cpp"
"${AWS_CORE_LIBRARY_DIR}/source/config/*.cpp"
"${AWS_CORE_LIBRARY_DIR}/source/external/cjson/*.cpp"
"${AWS_CORE_LIBRARY_DIR}/source/external/tinyxml2/*.cpp"
"${AWS_CORE_LIBRARY_DIR}/source/internal/*.cpp"
"${AWS_CORE_LIBRARY_DIR}/source/monitoring/*.cpp"
"${AWS_CORE_LIBRARY_DIR}/source/net/*.cpp"
"${AWS_CORE_LIBRARY_DIR}/source/linux-shared/*.cpp"
"${AWS_CORE_LIBRARY_DIR}/source/platform/linux-shared/*.cpp"
"${AWS_CORE_LIBRARY_DIR}/source/utils/*.cpp"
"${AWS_CORE_LIBRARY_DIR}/source/utils/base64/*.cpp"
"${AWS_CORE_LIBRARY_DIR}/source/utils/event/*.cpp"
"${AWS_CORE_LIBRARY_DIR}/source/utils/crypto/*.cpp"
"${AWS_CORE_LIBRARY_DIR}/source/utils/crypto/openssl/*.cpp"
"${AWS_CORE_LIBRARY_DIR}/source/utils/crypto/factory/*.cpp"
"${AWS_CORE_LIBRARY_DIR}/source/utils/json/*.cpp"
"${AWS_CORE_LIBRARY_DIR}/source/utils/logging/*.cpp"
"${AWS_CORE_LIBRARY_DIR}/source/utils/memory/*.cpp"
"${AWS_CORE_LIBRARY_DIR}/source/utils/memory/stl/*.cpp"
"${AWS_CORE_LIBRARY_DIR}/source/utils/stream/*.cpp"
"${AWS_CORE_LIBRARY_DIR}/source/utils/threading/*.cpp"
"${AWS_CORE_LIBRARY_DIR}/source/utils/xml/*.cpp"
)
file(GLOB AWS_S3_SOURCES
"${AWS_S3_LIBRARY_DIR}/source/*.cpp"
)
file(GLOB AWS_S3_MODEL_SOURCES
"${AWS_S3_LIBRARY_DIR}/source/model/*.cpp"
)
file(GLOB AWS_EVENT_STREAM_SOURCES
"${AWS_EVENT_STREAM_LIBRARY_DIR}/source/*.c"
)
file(GLOB AWS_COMMON_SOURCES
"${AWS_COMMON_LIBRARY_DIR}/source/*.c"
"${AWS_COMMON_LIBRARY_DIR}/source/posix/*.c"
)
file(GLOB AWS_CHECKSUMS_SOURCES
"${AWS_CHECKSUMS_LIBRARY_DIR}/source/*.c"
"${AWS_CHECKSUMS_LIBRARY_DIR}/source/intel/*.c"
"${AWS_CHECKSUMS_LIBRARY_DIR}/source/arm/*.c"
)
file(GLOB S3_UNIFIED_SRC
${AWS_EVENT_STREAM_SOURCES}
${AWS_COMMON_SOURCES}
${AWS_S3_SOURCES}
${AWS_S3_MODEL_SOURCES}
${AWS_CORE_SOURCES}
)
set(S3_INCLUDES
"${AWS_COMMON_LIBRARY_DIR}/include/"
"${AWS_EVENT_STREAM_LIBRARY_DIR}/include/"
"${AWS_S3_LIBRARY_DIR}/include/"
"${AWS_CORE_LIBRARY_DIR}/include/"
"${CMAKE_CURRENT_BINARY_DIR}/include/"
)
add_library(_aws_s3_checksums ${AWS_CHECKSUMS_SOURCES})
target_include_directories(_aws_s3_checksums SYSTEM PUBLIC "${AWS_CHECKSUMS_LIBRARY_DIR}/include/")
if(CMAKE_BUILD_TYPE_UC STREQUAL "DEBUG")
target_compile_definitions(_aws_s3_checksums PRIVATE "-DDEBUG_BUILD")
endif()
set_target_properties(_aws_s3_checksums PROPERTIES LINKER_LANGUAGE C)
set_property(TARGET _aws_s3_checksums PROPERTY C_STANDARD 99)
add_library(_aws_s3 ${S3_UNIFIED_SRC})
target_compile_definitions(_aws_s3 PUBLIC "AWS_SDK_VERSION_MAJOR=1")
target_compile_definitions(_aws_s3 PUBLIC "AWS_SDK_VERSION_MINOR=7")
target_compile_definitions(_aws_s3 PUBLIC "AWS_SDK_VERSION_PATCH=231")
target_include_directories(_aws_s3 SYSTEM BEFORE PUBLIC ${S3_INCLUDES})
if (TARGET OpenSSL::SSL)
target_compile_definitions(_aws_s3 PUBLIC -DENABLE_OPENSSL_ENCRYPTION)
target_link_libraries(_aws_s3 PRIVATE OpenSSL::Crypto OpenSSL::SSL)
endif()
target_link_libraries(_aws_s3 PRIVATE _aws_s3_checksums)
# The library is large - avoid bloat.
if (OMIT_HEAVY_DEBUG_SYMBOLS)
target_compile_options (_aws_s3 PRIVATE -g0)
target_compile_options (_aws_s3_checksums PRIVATE -g0)
endif()
add_library(ch_contrib::aws_s3 ALIAS _aws_s3)

2
contrib/azure vendored

@ -1 +1 @@
Subproject commit ef75afc075fc71fbcd8fe28dcda3794ae265fd1c
Subproject commit ea8c3044f43f5afa7016d2d580ed201f495d7e94

View File

@ -139,13 +139,6 @@ if(NOT OPENSSL_NO_ASM)
endif()
endif()
if(BUILD_SHARED_LIBS)
add_definitions(-DBORINGSSL_SHARED_LIBRARY)
# Enable position-independent code globally. This is needed because
# some library targets are OBJECT libraries.
set(CMAKE_POSITION_INDEPENDENT_CODE TRUE)
endif()
set(
CRYPTO_ios_aarch64_SOURCES

View File

@ -63,13 +63,8 @@ SET(SRCS
"${LIBRARY_DIR}/src/lib/windows_port.c"
)
if (USE_STATIC_LIBRARIES)
add_library(_c-ares STATIC ${SRCS})
target_compile_definitions(_c-ares PUBLIC CARES_STATICLIB)
else()
add_library(_c-ares SHARED ${SRCS})
target_compile_definitions(_c-ares PUBLIC CARES_BUILDING_LIBRARY)
endif()
add_library(_c-ares STATIC ${SRCS})
target_compile_definitions(_c-ares PUBLIC CARES_STATICLIB)
target_compile_definitions(_c-ares PRIVATE HAVE_CONFIG_H=1)

1
contrib/crc32-s390x vendored Submodule

@ -0,0 +1 @@
Subproject commit 30980583bf9ed3fa193abb83a1849705ff457f70

View File

@ -0,0 +1,27 @@
if(ARCH_S390X)
option (ENABLE_CRC32_S390X "Enable crc32 on s390x platform" ON)
endif()
if (NOT ENABLE_CRC32_S390X)
return()
endif()
set(CRC32_S390X_SOURCE_DIR ${ClickHouse_SOURCE_DIR}/contrib/crc32-s390x)
set(CRC32_S390X_INCLUDE_DIR ${ClickHouse_SOURCE_DIR}/contrib/crc32-s390x)
set(CRC32_SRCS
"${CRC32_S390X_SOURCE_DIR}/crc32-s390x.c"
"${CRC32_S390X_SOURCE_DIR}/crc32be-vx.S"
"${CRC32_S390X_SOURCE_DIR}/crc32le-vx.S"
)
set(CRC32_HDRS
"${CRC32_S390X_INCLUDE_DIR}/crc32-s390x.h"
)
add_library(_crc32_s390x ${CRC32_SRCS} ${CRC32_HDRS})
target_include_directories(_crc32_s390x SYSTEM PUBLIC "${CRC32_S390X_INCLUDE_DIR}")
target_compile_definitions(_crc32_s390x PUBLIC)
add_library(ch_contrib::crc32_s390x ALIAS _crc32_s390x)

1
contrib/crc32-vpmsum vendored Submodule

@ -0,0 +1 @@
Subproject commit 452155439389311fc7d143621eaf56a258e02476

View File

@ -0,0 +1,14 @@
# module crc32-vpmsum gets build along with the files vec_crc32.h and crc32_constants.h in crc32-vpmsum-cmake
# Please see README.md for information about how to generate crc32_constants.h
if (NOT ARCH_PPC64LE)
message (STATUS "crc32-vpmsum library is only supported on ppc64le")
return()
endif()
SET(LIBRARY_DIR "${ClickHouse_SOURCE_DIR}/contrib/crc32-vpmsum")
add_library(_crc32-vpmsum
"${LIBRARY_DIR}/vec_crc32.c"
)
target_include_directories(_crc32-vpmsum SYSTEM BEFORE PUBLIC "${CMAKE_CURRENT_SOURCE_DIR}")
add_library(ch_contrib::crc32-vpmsum ALIAS _crc32-vpmsum)

View File

@ -0,0 +1,9 @@
# To Generate crc32_constants.h
- Run make file in `../crc32-vpmsum` directory using following options and CRC polynomial. These options should use the same polynomial and order used by intel intrinisic functions
```bash
make crc32_constants.h CRC="0x11EDC6F41" OPTIONS="-x -r -c"
```
- move the generated `crc32_constants.h` into this directory
- To understand more about this go here: https://masterchef2209.wordpress.com/2020/06/17/guide-to-intel-sse4-2-crc-intrinisics-implementation-for-simde/
- Here is the link to information about intel intrinsic functions: https://www.intel.com/content/www/us/en/docs/intrinsics-guide/index.html#text=_mm_crc32_u64&ig_expand=1492,1493,1559

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,26 @@
#ifndef VEC_CRC32
#define VEC_CRC32
#if ! ((defined(__PPC64__) || defined(__powerpc64__)) && __BYTE_ORDER__ == __ORDER_LITTLE_ENDIAN__)
# error PowerPC architecture is expected
#endif
#ifdef __cplusplus
extern "C" {
#endif
unsigned int crc32_vpmsum(unsigned int crc, const unsigned char *p, unsigned long len);
static inline uint32_t crc32_ppc(uint64_t crc, unsigned char const *buffer, size_t len)
{
assert(buffer);
crc = crc32_vpmsum(crc, buffer, (unsigned long)len);
return crc;
}
#ifdef __cplusplus
}
#endif
#endif

2
contrib/googletest vendored

@ -1 +1 @@
Subproject commit e7e591764baba0a0c3c9ad0014430e7a27331d16
Subproject commit 71140c3ca7a87bb1b5b9c9f1500fea8858cce344

View File

@ -136,11 +136,6 @@ add_library(ch_contrib::uv ALIAS _uv)
target_compile_definitions(_uv PRIVATE ${uv_defines})
target_include_directories(_uv SYSTEM PUBLIC ${SOURCE_DIR}/include PRIVATE ${SOURCE_DIR}/src)
target_link_libraries(_uv ${uv_libraries})
if (NOT USE_STATIC_LIBRARIES)
target_compile_definitions(_uv
INTERFACE USING_UV_SHARED=1
PRIVATE BUILDING_UV_SHARED=1)
endif()
if(UNIX)
# Now for some gibbering horrors from beyond the stars...

View File

@ -6,8 +6,6 @@ endif()
option (ENABLE_EMBEDDED_COMPILER "Enable support for 'compile_expressions' option for query execution" ${ENABLE_EMBEDDED_COMPILER_DEFAULT})
# If USE_STATIC_LIBRARIES=0 was passed to CMake, we'll still build LLVM statically to keep complexity minimal.
if (NOT ENABLE_EMBEDDED_COMPILER)
message(STATUS "Not using LLVM")
return()

2
contrib/poco vendored

@ -1 +1 @@
Subproject commit 799234226187c0ae0b8c90f23465b25ed7956e56
Subproject commit 0ab9bba7ccad3c8dacce04a35cb3b78218547ab4

View File

@ -1,4 +1,4 @@
if (NOT OS_FREEBSD AND NOT SPLIT_SHARED_LIBRARIES AND NOT (OS_DARWIN AND COMPILER_CLANG))
if (NOT OS_FREEBSD AND NOT (OS_DARWIN AND COMPILER_CLANG))
option (ENABLE_SENTRY "Enable Sentry" ${ENABLE_LIBRARIES})
else()
option (ENABLE_SENTRY "Enable Sentry" OFF)
@ -51,11 +51,7 @@ endif()
add_library(_sentry ${SRCS})
if(BUILD_SHARED_LIBS)
target_compile_definitions(_sentry PRIVATE SENTRY_BUILD_SHARED)
else()
target_compile_definitions(_sentry PUBLIC SENTRY_BUILD_STATIC)
endif()
target_compile_definitions(_sentry PUBLIC SENTRY_BUILD_STATIC)
target_link_libraries(_sentry PRIVATE ch_contrib::curl pthread)
target_include_directories(_sentry PUBLIC "${SRC_DIR}/include" PRIVATE "${SRC_DIR}/src")

@ -1 +1 @@
Subproject commit 9818baa5d027ffb26d57f810dc4c597d4946781c
Subproject commit 400ad7152a0c7ee07756d96ab4f6a8f6d1080916

2
contrib/sysroot vendored

@ -1 +1 @@
Subproject commit 0f41651860fa4a530ecd68b93a15b8fd77397adf
Subproject commit f0081b2649b94837855f3bc7d05ef326b100bad8

View File

@ -2,7 +2,6 @@
"docker/packager/binary": {
"name": "clickhouse/binary-builder",
"dependent": [
"docker/test/split_build_smoke_test",
"docker/test/codebrowser"
]
},
@ -55,10 +54,6 @@
"name": "clickhouse/stress-test",
"dependent": []
},
"docker/test/split_build_smoke_test": {
"name": "clickhouse/split-build-smoke-test",
"dependent": []
},
"docker/test/codebrowser": {
"name": "clickhouse/codebrowser",
"dependent": []
@ -68,10 +63,6 @@
"name": "clickhouse/integration-tests-runner",
"dependent": []
},
"docker/test/testflows/runner": {
"name": "clickhouse/testflows-runner",
"dependent": []
},
"docker/test/fasttest": {
"name": "clickhouse/fasttest",
"dependent": []

View File

@ -22,7 +22,8 @@ RUN apt-get update && \
build-essential \
libc6 \
libc6-dev \
libc6-dev-arm64-cross && \
libc6-dev-arm64-cross \
zstd && \
apt-get clean
ENV CC=clang-${LLVM_VERSION}

View File

@ -107,8 +107,6 @@ fi
mv ./programs/clickhouse* /output
[ -x ./programs/self-extracting/clickhouse ] && mv ./programs/self-extracting/clickhouse /output
mv ./src/unit_tests_dbms /output ||: # may not exist for some binary builds
find . -name '*.so' -print -exec mv '{}' /output \;
find . -name '*.so.*' -print -exec mv '{}' /output \;
prepare_combined_output () {
local OUTPUT
@ -161,23 +159,23 @@ then
git -C "$PERF_OUTPUT"/ch log -5
(
cd "$PERF_OUTPUT"/..
tar -cv -I pigz -f /output/performance.tgz output
tar -cv --zstd -f /output/performance.tar.zst output
)
fi
# May be set for split build or for performance test.
# May be set for performance test.
if [ "" != "$COMBINED_OUTPUT" ]
then
prepare_combined_output /output
tar -cv -I pigz -f "$COMBINED_OUTPUT.tgz" /output
tar -cv --zstd -f "$COMBINED_OUTPUT.tar.zst" /output
rm -r /output/*
mv "$COMBINED_OUTPUT.tgz" /output
mv "$COMBINED_OUTPUT.tar.zst" /output
fi
if [ "coverity" == "$COMBINED_OUTPUT" ]
then
tar -cv -I pigz -f "coverity-scan.tgz" cov-int
mv "coverity-scan.tgz" /output
tar -cv --zstd -f "coverity-scan.tar.zst" cov-int
mv "coverity-scan.tar.zst" /output
fi
ccache_status

View File

@ -100,12 +100,11 @@ def run_docker_image_with_env(
subprocess.check_call(cmd, shell=True)
def is_release_build(build_type, package_type, sanitizer, shared_libraries):
def is_release_build(build_type, package_type, sanitizer):
return (
build_type == ""
and package_type == "deb"
and sanitizer == ""
and not shared_libraries
)
@ -116,7 +115,6 @@ def parse_env_variables(
package_type,
cache,
distcc_hosts,
shared_libraries,
clang_tidy,
version,
author,
@ -218,7 +216,7 @@ def parse_env_variables(
cmake_flags.append("-DCMAKE_INSTALL_PREFIX=/usr")
cmake_flags.append("-DCMAKE_INSTALL_SYSCONFDIR=/etc")
cmake_flags.append("-DCMAKE_INSTALL_LOCALSTATEDIR=/var")
if is_release_build(build_type, package_type, sanitizer, shared_libraries):
if is_release_build(build_type, package_type, sanitizer):
cmake_flags.append("-DSPLIT_DEBUG_SYMBOLS=ON")
result.append("WITH_PERFORMANCE=1")
if is_cross_arm:
@ -231,12 +229,10 @@ def parse_env_variables(
cmake_flags.append(f"-DCMAKE_C_COMPILER={cc}")
cmake_flags.append(f"-DCMAKE_CXX_COMPILER={cxx}")
# Create combined output archive for shared library build and for performance tests.
# Create combined output archive for performance tests.
if package_type == "coverity":
result.append("COMBINED_OUTPUT=coverity")
result.append('COVERITY_TOKEN="$COVERITY_TOKEN"')
elif shared_libraries:
result.append("COMBINED_OUTPUT=shared_build")
if sanitizer:
result.append(f"SANITIZER={sanitizer}")
@ -285,15 +281,6 @@ def parse_env_variables(
result.append("BINARY_OUTPUT=tests")
cmake_flags.append("-DENABLE_TESTS=1")
if shared_libraries:
cmake_flags.append("-DUSE_STATIC_LIBRARIES=0 -DSPLIT_SHARED_LIBRARIES=1")
# We can't always build utils because it requires too much space, but
# we have to build them at least in some way in CI. The shared library
# build is probably the least heavy disk-wise.
cmake_flags.append("-DENABLE_UTILS=1")
# utils are not included into clickhouse-bundle, so build everything
build_target = "all"
if clang_tidy:
cmake_flags.append("-DENABLE_CLANG_TIDY=1")
cmake_flags.append("-DENABLE_TESTS=1")
@ -371,7 +358,6 @@ if __name__ == "__main__":
default="",
)
parser.add_argument("--shared-libraries", action="store_true")
parser.add_argument("--clang-tidy", action="store_true")
parser.add_argument("--cache", choices=("ccache", "distcc", ""), default="")
parser.add_argument(
@ -424,7 +410,6 @@ if __name__ == "__main__":
args.package_type,
args.cache,
args.distcc_hosts,
args.shared_libraries,
args.clang_tidy,
args.version,
args.author,

View File

@ -33,7 +33,7 @@ RUN arch=${TARGETARCH:-amd64} \
# lts / testing / prestable / etc
ARG REPO_CHANNEL="stable"
ARG REPOSITORY="https://packages.clickhouse.com/tgz/${REPO_CHANNEL}"
ARG VERSION="22.12.1.1752"
ARG VERSION="22.12.3.5"
ARG PACKAGES="clickhouse-client clickhouse-server clickhouse-common-static"
# user/group precreated explicitly with fixed uid/gid on purpose.

View File

@ -21,7 +21,7 @@ RUN sed -i "s|http://archive.ubuntu.com|${apt_archive}|g" /etc/apt/sources.list
ARG REPO_CHANNEL="stable"
ARG REPOSITORY="deb https://packages.clickhouse.com/deb ${REPO_CHANNEL} main"
ARG VERSION="22.12.1.1752"
ARG VERSION="22.12.3.5"
ARG PACKAGES="clickhouse-client clickhouse-server clickhouse-common-static"
# set non-empty deb_location_url url to create a docker image

View File

@ -58,7 +58,7 @@ echo 'SELECT version()' | curl 'http://localhost:18123/' --data-binary @-
22.6.3.35
```
or by allowing the container to use [host ports directly](https://docs.docker.com/network/host/) using `--network=host` (also allows archiving better network performance):
or by allowing the container to use [host ports directly](https://docs.docker.com/network/host/) using `--network=host` (also allows achieving better network performance):
```bash
docker run -d --network=host --name some-clickhouse-server --ulimit nofile=262144:262144 clickhouse/clickhouse-server

View File

@ -9,6 +9,8 @@ RUN apt-get update \
netbase \
perl \
pv \
ripgrep \
zstd \
--yes --no-install-recommends
# Sanitizer options for services (clickhouse-server)

View File

@ -17,6 +17,7 @@ RUN apt-get update \
python3-termcolor \
unixodbc \
pv \
zstd \
--yes --no-install-recommends
# Install CMake 3.20+ for Rust compilation

View File

@ -188,7 +188,7 @@ function build
cp programs/clickhouse "$FASTTEST_OUTPUT/clickhouse"
strip programs/clickhouse -o "$FASTTEST_OUTPUT/clickhouse-stripped"
gzip "$FASTTEST_OUTPUT/clickhouse-stripped"
zstd --threads=0 "$FASTTEST_OUTPUT/clickhouse-stripped"
fi
ccache --show-stats ||:
ccache --evict-older-than 1d ||:

View File

@ -2,6 +2,13 @@
<profiles>
<default>
<max_execution_time>10</max_execution_time>
<max_memory_usage>10G</max_memory_usage>
<!--
Otherwise we will get the TOO_MANY_SIMULTANEOUS_QUERIES errors,
they are ok, but complicate debugging.
-->
<table_function_remote_max_addresses>200</table_function_remote_max_addresses>
<!--
Don't let the fuzzer change this setting (I've actually seen it
@ -20,6 +27,10 @@
<allow_experimental_analyzer>
<readonly/>
</allow_experimental_analyzer>
<table_function_remote_max_addresses>
<max>200</max>
</table_function_remote_max_addresses>
</constraints>
</default>
</profiles>

View File

@ -5,6 +5,7 @@ set -x
# core.COMM.PID-TID
sysctl kernel.core_pattern='core.%e.%p-%P'
dmesg --clear ||:
set -e
set -u
@ -241,13 +242,29 @@ quit
# clickhouse-client. We don't check for existence of server process, because
# the process is still present while the server is terminating and not
# accepting the connections anymore.
if clickhouse-client --query "select 1 format Null"
then
server_died=0
else
echo "Server live check returns $?"
server_died=1
fi
for _ in {1..100}
do
if clickhouse-client --query "SELECT 1" 2> err
then
server_died=0
break
else
# There are legitimate queries leading to this error, example:
# SELECT * FROM remote('127.0.0.{1..255}', system, one)
if grep -F 'TOO_MANY_SIMULTANEOUS_QUERIES' err
then
# Give it some time to cool down
clickhouse-client --query "SHOW PROCESSLIST"
sleep 1
else
echo "Server live check returns $?"
cat err
server_died=1
break
fi
fi
done
# wait in background to call wait in foreground and ensure that the
# process is alive, since w/o job control this is the only way to obtain
@ -262,14 +279,17 @@ quit
if [ "$server_died" == 1 ]
then
# The server has died.
if ! grep --text -ao "Received signal.*\|Logical error.*\|Assertion.*failed\|Failed assertion.*\|.*runtime error: .*\|.*is located.*\|SUMMARY: AddressSanitizer:.*\|SUMMARY: MemorySanitizer:.*\|SUMMARY: ThreadSanitizer:.*\|.*_LIBCPP_ASSERT.*" server.log > description.txt
if ! rg --text -o 'Received signal.*|Logical error.*|Assertion.*failed|Failed assertion.*|.*runtime error: .*|.*is located.*|(SUMMARY|ERROR): [a-zA-Z]+Sanitizer:.*|.*_LIBCPP_ASSERT.*' server.log > description.txt
then
echo "Lost connection to server. See the logs." > description.txt
fi
if grep -F --text 'Sanitizer: out-of-memory' description.txt
IS_SANITIZED=$(clickhouse-local --query "SELECT value LIKE '%-fsanitize=%' FROM system.build_options WHERE name = 'CXX_FLAGS'")
if [ "${IS_SANITIZED}" -eq "1" ] && rg --text 'Sanitizer:? (out-of-memory|out of memory|failed to allocate)|Child process was terminated by signal 9' description.txt
then
# OOM of sanitizer is not a problem we can handle - treat it as success, but preserve the description.
# Why? Because sanitizers have the memory overhead, that is not controllable from inside clickhouse-server.
task_exit_code=0
echo "success" > status.txt
else
@ -299,18 +319,18 @@ quit
# which is confusing.
task_exit_code=$fuzzer_exit_code
echo "failure" > status.txt
{ grep --text -o "Found error:.*" fuzzer.log \
|| grep --text -ao "Exception:.*" fuzzer.log \
{ rg --text -o "Found error:.*" fuzzer.log \
|| rg --text -ao "Exception:.*" fuzzer.log \
|| echo "Fuzzer failed ($fuzzer_exit_code). See the logs." ; } \
| tail -1 > description.txt
fi
if test -f core.*; then
pigz core.*
mv core.*.gz core.gz
zstd --threads=0 core.*
mv core.*.zst core.zst
fi
dmesg -T | grep -q -F -e 'Out of memory: Killed process' -e 'oom_reaper: reaped process' -e 'oom-kill:constraint=CONSTRAINT_NONE' && echo "OOM in dmesg" ||:
dmesg -T | rg -q -F -e 'Out of memory: Killed process' -e 'oom_reaper: reaped process' -e 'oom-kill:constraint=CONSTRAINT_NONE' && echo "OOM in dmesg" ||:
}
case "$stage" in
@ -344,13 +364,14 @@ case "$stage" in
"report")
CORE_LINK=''
if [ -f core.gz ]; then
CORE_LINK='<a href="core.gz">core.gz</a>'
if [ -f core.zst ]; then
CORE_LINK='<a href="core.zst">core.zst</a>'
fi
grep -F '<Fatal>' server.log > fatal.log ||:
rg --text -F '<Fatal>' server.log > fatal.log ||:
dmesg -T > dmesg.log ||:
pigz server.log
zstd --threads=0 server.log
cat > report.html <<EOF ||:
<!DOCTYPE html>
@ -358,14 +379,12 @@ cat > report.html <<EOF ||:
<style>
body { font-family: "DejaVu Sans", "Noto Sans", Arial, sans-serif; background: #EEE; }
h1 { margin-left: 10px; }
th, td { border: 0; padding: 5px 10px 5px 10px; text-align: left; vertical-align: top; line-height: 1.5; background-color: #FFF;
td { white-space: pre; font-family: Monospace, Courier New; }
border: 0; box-shadow: 0 0 0 1px rgba(0, 0, 0, 0.05), 0 8px 25px -5px rgba(0, 0, 0, 0.1); }
th, td { border: 0; padding: 5px 10px 5px 10px; text-align: left; vertical-align: top; line-height: 1.5; background-color: #FFF; }
td { white-space: pre; font-family: Monospace, Courier New; box-shadow: 0 0 0 1px rgba(0, 0, 0, 0.05), 0 8px 25px -5px rgba(0, 0, 0, 0.1); }
a { color: #06F; text-decoration: none; }
a:hover, a:active { color: #F40; text-decoration: underline; }
table { border: 0; }
p.links a { padding: 5px; margin: 3px; background: #FFF; line-height: 2; white-space: nowrap; box-shadow: 0 0 0 1px rgba(0, 0, 0, 0.05), 0 8px 25px -5px rgba(0, 0, 0, 0.1); }
th { cursor: pointer; }
</style>
<title>AST Fuzzer for PR #${PR_TO_TEST} @ ${SHA_TO_TEST}</title>
@ -377,8 +396,9 @@ th { cursor: pointer; }
<p class="links">
<a href="run.log">run.log</a>
<a href="fuzzer.log">fuzzer.log</a>
<a href="server.log.gz">server.log.gz</a>
<a href="server.log.zst">server.log.zst</a>
<a href="main.log">main.log</a>
<a href="dmesg.log">dmesg.log</a>
${CORE_LINK}
</p>
<table>
@ -390,12 +410,12 @@ th { cursor: pointer; }
<tr>
<td>AST Fuzzer</td>
<td>$(cat status.txt)</td>
<td style="white-space: pre;">$(
<td>$(
clickhouse-local --input-format RawBLOB --output-format RawBLOB --query "SELECT encodeXMLComponent(*) FROM table" < description.txt || cat description.txt
)</td>
</tr>
<tr>
<td colspan="3" style="white-space: pre; overflow-x: scroll;">$(
<td colspan="3" style="white-space: pre-wrap;">$(
clickhouse-local --input-format RawBLOB --output-format RawBLOB --query "SELECT encodeXMLComponent(*) FROM table" < fatal.log || cat fatal.log
)</td>
</tr>

View File

@ -49,7 +49,7 @@ RUN arch=${TARGETARCH:-amd64} \
&& curl -o mysql-odbc.rpm "https://cdn.mysql.com/archives/mysql-connector-odbc-8.0/mysql-connector-odbc-8.0.27-1.el8.${rarch}.rpm" \
&& rpm2archive mysql-odbc.rpm \
&& tar xf mysql-odbc.rpm.tgz -C / ./usr/lib64/ \
&& LINK_DIR=$(dpkg -L libodbc1 | grep '^/usr/lib/.*-linux-gnu/odbc$') \
&& LINK_DIR=$(dpkg -L libodbc1 | rg '^/usr/lib/.*-linux-gnu/odbc$') \
&& ln -s /usr/lib64/libmyodbc8a.so "$LINK_DIR" \
&& ln -s /usr/lib64/libmyodbc8a.so "$LINK_DIR"/libmyodbc.so
@ -57,14 +57,17 @@ RUN arch=${TARGETARCH:-amd64} \
# ZooKeeper is not started by default, but consumes some space in containers.
# 777 perms used to allow anybody to start/stop ZooKeeper
ENV ZOOKEEPER_VERSION='3.6.3'
RUN curl -O "https://dlcdn.apache.org/zookeeper/zookeeper-${ZOOKEEPER_VERSION}/apache-zookeeper-${ZOOKEEPER_VERSION}-bin.tar.gz"
RUN tar -zxvf apache-zookeeper-${ZOOKEEPER_VERSION}-bin.tar.gz && mv apache-zookeeper-${ZOOKEEPER_VERSION}-bin /opt/zookeeper && chmod -R 777 /opt/zookeeper && rm apache-zookeeper-${ZOOKEEPER_VERSION}-bin.tar.gz
RUN echo $'tickTime=2500 \n\
RUN curl "https://archive.apache.org/dist/zookeeper/zookeeper-${ZOOKEEPER_VERSION}/apache-zookeeper-${ZOOKEEPER_VERSION}-bin.tar.gz" | \
tar -C opt -zxv && \
mv /opt/apache-zookeeper-${ZOOKEEPER_VERSION}-bin /opt/zookeeper && \
chmod -R 777 /opt/zookeeper && \
echo $'tickTime=2500 \n\
tickTime=2500 \n\
dataDir=/zookeeper \n\
clientPort=2181 \n\
maxClientCnxns=80' > /opt/zookeeper/conf/zoo.cfg
RUN mkdir /zookeeper && chmod -R 777 /zookeeper
maxClientCnxns=80' > /opt/zookeeper/conf/zoo.cfg && \
mkdir /zookeeper && \
chmod -R 777 /zookeeper
ENV TZ=Etc/UTC
RUN ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && echo $TZ > /etc/timezone

View File

@ -8,6 +8,7 @@ RUN sed -i "s|http://archive.ubuntu.com|$apt_archive|g" /etc/apt/sources.list
RUN apt-get update \
&& env DEBIAN_FRONTEND=noninteractive apt-get install --yes \
adduser \
ca-certificates \
bash \
btrfs-progs \
@ -83,6 +84,7 @@ RUN python3 -m pip install \
pytest \
pytest-order==1.0.0 \
pytest-timeout \
pytest-random \
pytest-xdist \
pytest-repeat \
pytz \

View File

@ -0,0 +1,5 @@
version: '2.3'
# Used to pre-pull images with docker-compose
services:
clickhouse1:
image: clickhouse/integration-test

View File

@ -5,10 +5,10 @@ services:
hostname: hdfs1
restart: always
expose:
- ${HDFS_NAME_PORT}
- ${HDFS_DATA_PORT}
- ${HDFS_NAME_PORT:-50070}
- ${HDFS_DATA_PORT:-50075}
entrypoint: /etc/bootstrap.sh -d
volumes:
- type: ${HDFS_FS:-tmpfs}
source: ${HDFS_LOGS:-}
target: /usr/local/hadoop/logs
target: /usr/local/hadoop/logs

View File

@ -15,7 +15,7 @@ services:
image: confluentinc/cp-kafka:5.2.0
hostname: kafka1
ports:
- ${KAFKA_EXTERNAL_PORT}:${KAFKA_EXTERNAL_PORT}
- ${KAFKA_EXTERNAL_PORT:-8081}:${KAFKA_EXTERNAL_PORT:-8081}
environment:
KAFKA_ADVERTISED_LISTENERS: INSIDE://localhost:${KAFKA_EXTERNAL_PORT},OUTSIDE://kafka1:19092
KAFKA_ADVERTISED_HOST_NAME: kafka1
@ -35,7 +35,7 @@ services:
image: confluentinc/cp-schema-registry:5.2.0
hostname: schema-registry
ports:
- ${SCHEMA_REGISTRY_EXTERNAL_PORT}:${SCHEMA_REGISTRY_INTERNAL_PORT}
- ${SCHEMA_REGISTRY_EXTERNAL_PORT:-12313}:${SCHEMA_REGISTRY_INTERNAL_PORT:-12313}
environment:
SCHEMA_REGISTRY_HOST_NAME: schema-registry
SCHEMA_REGISTRY_KAFKASTORE_SECURITY_PROTOCOL: PLAINTEXT

View File

@ -15,8 +15,8 @@ services:
source: ${KERBERIZED_HDFS_LOGS:-}
target: /var/log/hadoop-hdfs
expose:
- ${KERBERIZED_HDFS_NAME_PORT}
- ${KERBERIZED_HDFS_DATA_PORT}
- ${KERBERIZED_HDFS_NAME_PORT:-50070}
- ${KERBERIZED_HDFS_DATA_PORT:-1006}
depends_on:
- hdfskerberos
entrypoint: /etc/bootstrap.sh -d

View File

@ -23,7 +23,7 @@ services:
# restart: always
hostname: kerberized_kafka1
ports:
- ${KERBERIZED_KAFKA_EXTERNAL_PORT}:${KERBERIZED_KAFKA_EXTERNAL_PORT}
- ${KERBERIZED_KAFKA_EXTERNAL_PORT:-19092}:${KERBERIZED_KAFKA_EXTERNAL_PORT:-19092}
environment:
KAFKA_LISTENERS: OUTSIDE://:19092,UNSECURED_OUTSIDE://:19093,UNSECURED_INSIDE://0.0.0.0:${KERBERIZED_KAFKA_EXTERNAL_PORT}
KAFKA_ADVERTISED_LISTENERS: OUTSIDE://kerberized_kafka1:19092,UNSECURED_OUTSIDE://kerberized_kafka1:19093,UNSECURED_INSIDE://localhost:${KERBERIZED_KAFKA_EXTERNAL_PORT}
@ -41,7 +41,7 @@ services:
KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
KAFKA_OPTS: "-Djava.security.auth.login.config=/etc/kafka/secrets/broker_jaas.conf -Djava.security.krb5.conf=/etc/kafka/secrets/krb.conf -Dsun.security.krb5.debug=true"
volumes:
- ${KERBERIZED_KAFKA_DIR}/secrets:/etc/kafka/secrets
- ${KERBERIZED_KAFKA_DIR:-}/secrets:/etc/kafka/secrets
- /dev/urandom:/dev/random
depends_on:
- kafka_kerberized_zookeeper

View File

@ -0,0 +1,11 @@
version: '2.3'
services:
kerberoskdc:
image: clickhouse/kerberos-kdc:${DOCKER_KERBEROS_KDC_TAG:-latest}
hostname: kerberoskdc
volumes:
- ${KERBEROS_KDC_DIR}/secrets:/tmp/keytab
- ${KERBEROS_KDC_DIR}/../kerberos_image_config.sh:/config.sh
- /dev/urandom:/dev/random
ports: [88, 749]

View File

@ -4,13 +4,13 @@ services:
image: getmeili/meilisearch:v0.27.0
restart: always
ports:
- ${MEILI_EXTERNAL_PORT}:${MEILI_INTERNAL_PORT}
- ${MEILI_EXTERNAL_PORT:-7700}:${MEILI_INTERNAL_PORT:-7700}
meili_secure:
image: getmeili/meilisearch:v0.27.0
restart: always
ports:
- ${MEILI_SECURE_EXTERNAL_PORT}:${MEILI_SECURE_INTERNAL_PORT}
- ${MEILI_SECURE_EXTERNAL_PORT:-7700}:${MEILI_SECURE_INTERNAL_PORT:-7700}
environment:
MEILI_MASTER_KEY: "password"

View File

@ -9,7 +9,7 @@ services:
- data1-1:/data1
- ${MINIO_CERTS_DIR:-}:/certs
expose:
- ${MINIO_PORT}
- ${MINIO_PORT:-9001}
environment:
MINIO_ACCESS_KEY: minio
MINIO_SECRET_KEY: minio123

View File

@ -7,11 +7,11 @@ services:
MONGO_INITDB_ROOT_USERNAME: root
MONGO_INITDB_ROOT_PASSWORD: clickhouse
ports:
- ${MONGO_EXTERNAL_PORT}:${MONGO_INTERNAL_PORT}
- ${MONGO_EXTERNAL_PORT:-27017}:${MONGO_INTERNAL_PORT:-27017}
command: --profile=2 --verbose
mongo2:
image: mongo:5.0
restart: always
ports:
- ${MONGO_NO_CRED_EXTERNAL_PORT}:${MONGO_NO_CRED_INTERNAL_PORT}
- ${MONGO_NO_CRED_EXTERNAL_PORT:-27017}:${MONGO_NO_CRED_INTERNAL_PORT:-27017}

View File

@ -7,7 +7,7 @@ services:
MONGO_INITDB_ROOT_USERNAME: root
MONGO_INITDB_ROOT_PASSWORD: clickhouse
volumes:
- ${MONGO_CONFIG_PATH}:/mongo/
- ${MONGO_CONFIG_PATH:-}:/mongo/
ports:
- ${MONGO_EXTERNAL_PORT}:${MONGO_INTERNAL_PORT}
- ${MONGO_EXTERNAL_PORT:-27017}:${MONGO_INTERNAL_PORT:-27017}
command: --config /mongo/mongo_secure.conf --profile=2 --verbose

View File

@ -8,7 +8,7 @@ services:
MYSQL_ROOT_HOST: ${MYSQL_ROOT_HOST}
DATADIR: /mysql/
expose:
- ${MYSQL_PORT}
- ${MYSQL_PORT:-3306}
command: --server_id=100
--log-bin='mysql-bin-1.log'
--default-time-zone='+3:00'

View File

@ -1,21 +0,0 @@
version: '2.3'
services:
mysql1:
image: mysql:5.7
restart: 'no'
environment:
MYSQL_ROOT_PASSWORD: clickhouse
ports:
- 3308:3306
command: --server_id=100 --log-bin='mysql-bin-1.log'
--default-time-zone='+3:00'
--gtid-mode="ON"
--enforce-gtid-consistency
--log-error-verbosity=3
--log-error=/var/log/mysqld/error.log
--general-log=ON
--general-log-file=/var/log/mysqld/general.log
volumes:
- type: ${MYSQL_LOGS_FS:-tmpfs}
source: ${MYSQL_LOGS:-}
target: /var/log/mysqld/

View File

@ -8,7 +8,7 @@ services:
MYSQL_ROOT_HOST: ${MYSQL_ROOT_HOST}
DATADIR: /mysql/
expose:
- ${MYSQL8_PORT}
- ${MYSQL8_PORT:-3306}
command: --server_id=100 --log-bin='mysql-bin-1.log'
--default_authentication_plugin='mysql_native_password'
--default-time-zone='+3:00' --gtid-mode="ON"

View File

@ -8,7 +8,7 @@ services:
MYSQL_ROOT_HOST: ${MYSQL_CLUSTER_ROOT_HOST}
DATADIR: /mysql/
expose:
- ${MYSQL_CLUSTER_PORT}
- ${MYSQL_CLUSTER_PORT:-3306}
command: --server_id=100
--log-bin='mysql-bin-2.log'
--default-time-zone='+3:00'
@ -30,7 +30,7 @@ services:
MYSQL_ROOT_HOST: ${MYSQL_CLUSTER_ROOT_HOST}
DATADIR: /mysql/
expose:
- ${MYSQL_CLUSTER_PORT}
- ${MYSQL_CLUSTER_PORT:-3306}
command: --server_id=100
--log-bin='mysql-bin-3.log'
--default-time-zone='+3:00'
@ -52,7 +52,7 @@ services:
MYSQL_ROOT_HOST: ${MYSQL_CLUSTER_ROOT_HOST}
DATADIR: /mysql/
expose:
- ${MYSQL_CLUSTER_PORT}
- ${MYSQL_CLUSTER_PORT:-3306}
command: --server_id=100
--log-bin='mysql-bin-4.log'
--default-time-zone='+3:00'

View File

@ -3,9 +3,9 @@ services:
nats1:
image: nats
ports:
- "${NATS_EXTERNAL_PORT}:${NATS_INTERNAL_PORT}"
- "${NATS_EXTERNAL_PORT:-4444}:${NATS_INTERNAL_PORT:-4444}"
command: "-p 4444 --user click --pass house --tls --tlscert=/etc/certs/server-cert.pem --tlskey=/etc/certs/server-key.pem"
volumes:
- type: bind
source: "${NATS_CERT_DIR}/nats"
source: "${NATS_CERT_DIR:-}/nats"
target: /etc/certs

View File

@ -5,7 +5,7 @@ services:
command: ["postgres", "-c", "wal_level=logical", "-c", "max_replication_slots=2", "-c", "logging_collector=on", "-c", "log_directory=/postgres/logs", "-c", "log_filename=postgresql.log", "-c", "log_statement=all", "-c", "max_connections=200"]
restart: always
expose:
- ${POSTGRES_PORT}
- ${POSTGRES_PORT:-5432}
healthcheck:
test: ["CMD-SHELL", "pg_isready -U postgres"]
interval: 10s

View File

@ -9,7 +9,7 @@ services:
POSTGRES_PASSWORD: mysecretpassword
PGDATA: /postgres/data
expose:
- ${POSTGRES_PORT}
- ${POSTGRES_PORT:-5432}
volumes:
- type: ${POSTGRES_LOGS_FS:-tmpfs}
source: ${POSTGRES2_DIR:-}
@ -23,7 +23,7 @@ services:
POSTGRES_PASSWORD: mysecretpassword
PGDATA: /postgres/data
expose:
- ${POSTGRES_PORT}
- ${POSTGRES_PORT:-5432}
volumes:
- type: ${POSTGRES_LOGS_FS:-tmpfs}
source: ${POSTGRES3_DIR:-}
@ -37,7 +37,7 @@ services:
POSTGRES_PASSWORD: mysecretpassword
PGDATA: /postgres/data
expose:
- ${POSTGRES_PORT}
- ${POSTGRES_PORT:-5432}
volumes:
- type: ${POSTGRES_LOGS_FS:-tmpfs}
source: ${POSTGRES4_DIR:-}

View File

@ -5,7 +5,7 @@ services:
image: rabbitmq:3.8-management-alpine
hostname: rabbitmq1
expose:
- ${RABBITMQ_PORT}
- ${RABBITMQ_PORT:-5672}
environment:
RABBITMQ_DEFAULT_USER: "root"
RABBITMQ_DEFAULT_PASS: "clickhouse"

View File

@ -4,5 +4,5 @@ services:
image: redis
restart: always
ports:
- ${REDIS_EXTERNAL_PORT}:${REDIS_INTERNAL_PORT}
- ${REDIS_EXTERNAL_PORT:-6379}:${REDIS_INTERNAL_PORT:-6379}
command: redis-server --requirepass "clickhouse" --databases 32

View File

@ -11,7 +11,7 @@ set -eu
for module; do
if [ "${module#-}" = "$module" ]; then
ip link show "$module" || true
lsmod | grep "$module" || true
lsmod | rg "$module" || true
fi
done

View File

@ -37,6 +37,8 @@ RUN apt-get update \
wget \
rustc \
cargo \
ripgrep \
zstd \
&& pip3 --no-cache-dir install 'clickhouse-driver==0.2.1' scipy \
&& apt-get purge --yes python3-dev g++ \
&& apt-get autoremove --yes \

View File

@ -193,7 +193,7 @@ function run_tests
then
# Run only explicitly specified tests, if any.
# shellcheck disable=SC2010
test_files=($(ls "$test_prefix" | grep "$CHPC_TEST_GREP" | xargs -I{} -n1 readlink -f "$test_prefix/{}"))
test_files=($(ls "$test_prefix" | rg "$CHPC_TEST_GREP" | xargs -I{} -n1 readlink -f "$test_prefix/{}"))
elif [ "$PR_TO_TEST" -ne 0 ] \
&& [ "$(wc -l < changed-test-definitions.txt)" -gt 0 ] \
&& [ "$(wc -l < other-changed-files.txt)" -eq 0 ]
@ -210,7 +210,7 @@ function run_tests
# We can filter out certain tests
if [ -v CHPC_TEST_GREP_EXCLUDE ]; then
# filter tests array in bash https://stackoverflow.com/a/40375567
filtered_test_files=( $( for i in ${test_files[@]} ; do echo $i ; done | grep -v ${CHPC_TEST_GREP_EXCLUDE} ) )
filtered_test_files=( $( for i in ${test_files[@]} ; do echo $i ; done | rg -v ${CHPC_TEST_GREP_EXCLUDE} ) )
test_files=("${filtered_test_files[@]}")
fi
@ -284,7 +284,7 @@ function run_tests
# Use awk because bash doesn't support floating point arithmetic.
profile_seconds=$(awk "BEGIN { print ($profile_seconds_left > 0 ? 10 : 0) }")
if [ "$(grep -c $(basename $test) changed-test-definitions.txt)" -gt 0 ]
if [ "$(rg -c $(basename $test) changed-test-definitions.txt)" -gt 0 ]
then
# Run all queries from changed test files to ensure that all new queries will be tested.
max_queries=0
@ -518,7 +518,7 @@ IFS=$'\n'
for prefix in $(cut -f1,2 "analyze/query-run-metrics-for-stats.tsv" | sort | uniq)
do
file="analyze/tmp/${prefix// /_}.tsv"
grep "^$prefix " "analyze/query-run-metrics-for-stats.tsv" > "$file" &
rg "^$prefix " "analyze/query-run-metrics-for-stats.tsv" > "$file" &
printf "%s\0\n" \
"clickhouse-local \
--file \"$file\" \
@ -1088,7 +1088,7 @@ do
# Build separate .svg flamegraph for each query.
# -F is somewhat unsafe because it might match not the beginning of the
# string, but this is unlikely and escaping the query for grep is a pain.
grep -F "$query " "report/stacks.$version.tsv" \
rg -F "$query " "report/stacks.$version.tsv" \
| cut -f 5- \
| sed 's/\t/ /g' \
| tee "report/tmp/$query_file.stacks.$version.tsv" \
@ -1117,7 +1117,7 @@ do
query_file=$(echo "$query" | cut -c-120 | sed 's/[/ ]/_/g')
# Ditto the above comment about -F.
grep -F "$query " "report/metric-deviation.$version.tsv" \
rg -F "$query " "report/metric-deviation.$version.tsv" \
| cut -f4- > "$query_file.$version.metrics.rep" &
done
done
@ -1132,8 +1132,8 @@ do
{
# The second grep is a heuristic for error messages like
# "socket.timeout: timed out".
grep -h -m2 -i '\(Exception\|Error\):[^:]' "$log" \
|| grep -h -m2 -i '^[^ ]\+: ' "$log" \
rg --no-filename --max-count=2 -i '\(Exception\|Error\):[^:]' "$log" \
|| rg --no-filename --max-count=2 -i '^[^ ]\+: ' "$log" \
|| head -2 "$log"
} | sed "s/^/$test\t/" >> run-errors.tsv ||:
done
@ -1180,7 +1180,7 @@ IFS=$'\n'
for prefix in $(cut -f1 "metrics/metrics.tsv" | sort | uniq)
do
file="metrics/$prefix.tsv"
grep "^$prefix " "metrics/metrics.tsv" | cut -f2- > "$file"
rg "^$prefix " "metrics/metrics.tsv" | cut -f2- > "$file"
gnuplot -e "
set datafile separator '\t';

View File

@ -28,8 +28,8 @@ function download
# Historically there were various paths for the performance test package.
# Test all of them.
declare -a urls_to_try=(
"https://s3.amazonaws.com/clickhouse-builds/$left_pr/$left_sha/$BUILD_NAME/performance.tar.zst"
"https://s3.amazonaws.com/clickhouse-builds/$left_pr/$left_sha/$BUILD_NAME/performance.tgz"
"https://s3.amazonaws.com/clickhouse-builds/$left_pr/$left_sha/performance/performance.tgz"
)
for path in "${urls_to_try[@]}"
@ -45,7 +45,7 @@ function download
# download anything, for example in some manual runs. In this case, SHAs are not set.
if ! [ "$left_sha" = "$right_sha" ]
then
wget -nv -nd -c "$left_path" -O- | tar -C left --no-same-owner --strip-components=1 -zxv &
wget -nv -nd -c "$left_path" -O- | tar -C left --no-same-owner --strip-components=1 --zstd --extract --verbose &
elif [ "$right_sha" != "" ]
then
mkdir left ||:
@ -60,7 +60,7 @@ function download
>&2 echo "Unknown dataset '$dataset_name'"
exit 1
fi
cd db0 && wget -nv -nd -c "$dataset_path" -O- | tar -xv &
cd db0 && wget -nv -nd -c "$dataset_path" -O- | tar --extract --verbose &
done
mkdir ~/fg ||:

View File

@ -66,10 +66,8 @@ function find_reference_sha
# test all of them.
unset found
declare -a urls_to_try=(
"https://s3.amazonaws.com/clickhouse-builds/0/$REF_SHA/$BUILD_NAME/performance.tar.zst"
"https://s3.amazonaws.com/clickhouse-builds/0/$REF_SHA/$BUILD_NAME/performance.tgz"
# FIXME: the following link is left there for backward compatibility.
# We should remove it after 2022-11-01
"https://s3.amazonaws.com/clickhouse-builds/0/$REF_SHA/performance/performance.tgz"
)
for path in "${urls_to_try[@]}"
do
@ -94,13 +92,13 @@ chmod 777 workspace output
cd workspace
# Download the package for the version we are going to test.
if curl_with_retry "$S3_URL/$PR_TO_TEST/$SHA_TO_TEST$COMMON_BUILD_PREFIX/$BUILD_NAME/performance.tgz"
if curl_with_retry "$S3_URL/$PR_TO_TEST/$SHA_TO_TEST$COMMON_BUILD_PREFIX/$BUILD_NAME/performance.tar.zst"
then
right_path="$S3_URL/$PR_TO_TEST/$SHA_TO_TEST$COMMON_BUILD_PREFIX/$BUILD_NAME/performance.tgz"
right_path="$S3_URL/$PR_TO_TEST/$SHA_TO_TEST$COMMON_BUILD_PREFIX/$BUILD_NAME/performance.tar.zst"
fi
mkdir right
wget -nv -nd -c "$right_path" -O- | tar -C right --no-same-owner --strip-components=1 -zxv
wget -nv -nd -c "$right_path" -O- | tar -C right --no-same-owner --strip-components=1 --zstd --extract --verbose
# Find reference revision if not specified explicitly
if [ "$REF_SHA" == "" ]; then find_reference_sha; fi

View File

@ -297,6 +297,7 @@ if not args.use_existing_tables:
# Let's sync the data to avoid writeback affects performance
os.system("sync")
reportStageEnd("sync")
# By default, test all queries.
queries_to_run = range(0, len(test_queries))

View File

@ -1,9 +0,0 @@
# rebuild in #33610
# docker build -t clickhouse/split-build-smoke-test .
ARG FROM_TAG=latest
FROM clickhouse/binary-builder:$FROM_TAG
COPY run.sh /run.sh
COPY process_split_build_smoke_test_result.py /
CMD /run.sh

View File

@ -1,64 +0,0 @@
#!/usr/bin/env python3
import os
import logging
import argparse
import csv
RESULT_LOG_NAME = "run.log"
def process_result(result_folder):
status = "success"
description = "Server started and responded"
summary = [("Smoke test", "OK")]
with open(os.path.join(result_folder, RESULT_LOG_NAME), "r") as run_log:
lines = run_log.read().split("\n")
if not lines or lines[0].strip() != "OK":
status = "failure"
logging.info("Lines is not ok: %s", str("\n".join(lines)))
summary = [("Smoke test", "FAIL")]
description = "Server failed to respond, see result in logs"
result_logs = []
server_log_path = os.path.join(result_folder, "clickhouse-server.log")
stderr_log_path = os.path.join(result_folder, "stderr.log")
client_stderr_log_path = os.path.join(result_folder, "clientstderr.log")
if os.path.exists(server_log_path):
result_logs.append(server_log_path)
if os.path.exists(stderr_log_path):
result_logs.append(stderr_log_path)
if os.path.exists(client_stderr_log_path):
result_logs.append(client_stderr_log_path)
return status, description, summary, result_logs
def write_results(results_file, status_file, results, status):
with open(results_file, "w") as f:
out = csv.writer(f, delimiter="\t")
out.writerows(results)
with open(status_file, "w") as f:
out = csv.writer(f, delimiter="\t")
out.writerow(status)
if __name__ == "__main__":
logging.basicConfig(level=logging.INFO, format="%(asctime)s %(message)s")
parser = argparse.ArgumentParser(
description="ClickHouse script for parsing results of split build smoke test"
)
parser.add_argument("--in-results-dir", default="/test_output/")
parser.add_argument("--out-results-file", default="/test_output/test_results.tsv")
parser.add_argument("--out-status-file", default="/test_output/check_status.tsv")
args = parser.parse_args()
state, description, test_results, logs = process_result(args.in_results_dir)
logging.info("Result parsed")
status = (state, description)
write_results(args.out_results_file, args.out_status_file, test_results, status)
logging.info("Result written")

View File

@ -1,22 +0,0 @@
#!/bin/bash
set -x
install_and_run_server() {
mkdir /unpacked
tar -xzf /package_folder/shared_build.tgz -C /unpacked --strip 1
LD_LIBRARY_PATH=/unpacked /unpacked/clickhouse-server --config /unpacked/config/config.xml >/test_output/stderr.log 2>&1 &
}
run_client() {
for i in {1..100}; do
sleep 1
LD_LIBRARY_PATH=/unpacked /unpacked/clickhouse-client --query "select 'OK'" > /test_output/run.log 2> /test_output/clientstderr.log && break
[[ $i == 100 ]] && echo 'FAIL'
done
}
install_and_run_server
run_client
mv /var/log/clickhouse-server/clickhouse-server.log /test_output/clickhouse-server.log
/process_split_build_smoke_test_result.py || echo -e "failure\tCannot parse results" > /test_output/check_status.tsv

View File

@ -5,12 +5,18 @@ FROM ubuntu:22.04
ARG apt_archive="http://archive.ubuntu.com"
RUN sed -i "s|http://archive.ubuntu.com|$apt_archive|g" /etc/apt/sources.list
RUN apt-get update --yes && env DEBIAN_FRONTEND=noninteractive apt-get install wget unzip git default-jdk maven python3 --yes --no-install-recommends
RUN wget https://github.com/sqlancer/sqlancer/archive/master.zip -O /sqlancer.zip
RUN apt-get update --yes && \
env DEBIAN_FRONTEND=noninteractive apt-get install wget git default-jdk maven python3 --yes --no-install-recommends && \
apt-get clean
# We need to get the repository's HEAD each time despite, so we invalidate layers' cache
ARG CACHE_INVALIDATOR=0
RUN mkdir /sqlancer && \
cd /sqlancer && \
unzip /sqlancer.zip
RUN cd /sqlancer/sqlancer-master && mvn package -DskipTests
wget -q -O- https://github.com/sqlancer/sqlancer/archive/master.tar.gz | \
tar zx -C /sqlancer && \
cd /sqlancer/sqlancer-master && \
mvn package -DskipTests && \
rm -r /root/.m2
COPY run.sh /
COPY process_sqlancer_result.py /

Some files were not shown because too many files have changed in this diff Show More