Merge branch 'master' into time_buckets_impl

This commit is contained in:
Yarik Briukhovetskyi 2023-11-17 18:53:38 +01:00 committed by GitHub
commit 76b923b3c3
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
2750 changed files with 91780 additions and 67568 deletions

View File

@ -28,7 +28,6 @@ Checks: '*,
-bugprone-not-null-terminated-result,
-bugprone-reserved-identifier, # useful but too slow, TODO retry when https://reviews.llvm.org/rG1c282052624f9d0bd273bde0b47b30c96699c6c7 is merged
-bugprone-unchecked-optional-access,
-bugprone-*, -- category temporarily disabled because some check(s) in it are slow
-cert-dcl16-c,
-cert-dcl37-c,
@ -39,40 +38,9 @@ Checks: '*,
-cert-oop54-cpp,
-cert-oop57-cpp,
-clang-analyzer-optin.performance.Padding,
-clang-analyzer-optin.portability.UnixAPI,
-clang-analyzer-security.insecureAPI.bzero,
-clang-analyzer-security.insecureAPI.strcpy,
-clang-analyzer-*, -- category temporarily disabled because some check(s) in it are slow
-clang-analyzer-unix.Malloc,
-cppcoreguidelines-avoid-c-arrays,
-cppcoreguidelines-avoid-const-or-ref-data-members,
-cppcoreguidelines-avoid-do-while,
-cppcoreguidelines-avoid-goto,
-cppcoreguidelines-avoid-magic-numbers,
-cppcoreguidelines-avoid-non-const-global-variables,
-cppcoreguidelines-explicit-virtual-functions,
-cppcoreguidelines-init-variables,
-cppcoreguidelines-interfaces-global-init,
-cppcoreguidelines-macro-usage,
-cppcoreguidelines-narrowing-conversions,
-cppcoreguidelines-no-malloc,
-cppcoreguidelines-non-private-member-variables-in-classes,
-cppcoreguidelines-owning-memory,
-cppcoreguidelines-prefer-member-initializer,
-cppcoreguidelines-pro-bounds-array-to-pointer-decay,
-cppcoreguidelines-pro-bounds-constant-array-index,
-cppcoreguidelines-pro-bounds-pointer-arithmetic,
-cppcoreguidelines-pro-type-const-cast,
-cppcoreguidelines-pro-type-cstyle-cast,
-cppcoreguidelines-pro-type-member-init,
-cppcoreguidelines-pro-type-reinterpret-cast,
-cppcoreguidelines-pro-type-static-cast-downcast,
-cppcoreguidelines-pro-type-union-access,
-cppcoreguidelines-pro-type-vararg,
-cppcoreguidelines-slicing,
-cppcoreguidelines-special-member-functions,
-cppcoreguidelines-*, -- category temporarily disabled because some check(s) in it are slow
-cppcoreguidelines-*, # impractical in a codebase as large as ClickHouse, also slow
-darwin-*,
@ -84,7 +52,6 @@ Checks: '*,
-google-readability-function-size,
-google-readability-namespace-comments,
-google-readability-todo,
-google-upgrade-googletest-case,
-hicpp-avoid-c-arrays,
-hicpp-avoid-goto,

View File

@ -7,6 +7,8 @@ assignees: ''
---
> Please make sure that the version you're using is still supported (you can find the list [here](https://github.com/ClickHouse/ClickHouse/blob/master/SECURITY.md#scope-and-supported-versions)).
> You have to provide the following information whenever possible.
**Describe what's wrong**

11
.github/actions/clean/action.yml vendored Normal file
View File

@ -0,0 +1,11 @@
name: Clean runner
description: Clean the runner's temp path on ending
runs:
using: "composite"
steps:
- name: Clean
shell: bash
run: |
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
sudo rm -fr "${{runner.temp}}"

35
.github/actions/common_setup/action.yml vendored Normal file
View File

@ -0,0 +1,35 @@
name: Common setup
description: Setup necessary environments
inputs:
job_type:
description: the name to use in the TEMP_PATH and REPO_COPY
default: common
type: string
nested_job:
description: the fuse for unintended use inside of the reusable callable jobs
default: true
type: boolean
runs:
using: "composite"
steps:
- name: Setup and check ENV
shell: bash
run: |
echo "Setup the common ENV variables"
cat >> "$GITHUB_ENV" << 'EOF'
TEMP_PATH=${{runner.temp}}/${{inputs.job_type}}
REPO_COPY=${{runner.temp}}/${{inputs.job_type}}/git-repo-copy
IMAGES_PATH=${{runner.temp}}/images_path
REPORTS_PATH=${{runner.temp}}/reports_dir
EOF
if [ -z "${{env.GITHUB_JOB_OVERRIDDEN}}" ] && [ "true" == "${{inputs.nested_job}}" ]; then
echo "The GITHUB_JOB_OVERRIDDEN ENV is unset, and must be set for the nested jobs"
exit 1
fi
- name: Setup $TEMP_PATH
shell: bash
run: |
# to remove every leftovers
sudo rm -fr "$TEMP_PATH"
mkdir -p "$REPO_COPY"
cp -a "$GITHUB_WORKSPACE"/. "$REPO_COPY"/

View File

@ -1,3 +1,4 @@
# yamllint disable rule:comments-indentation
name: BackportPR
env:
@ -33,7 +34,12 @@ jobs:
- name: Python unit tests
run: |
cd "$GITHUB_WORKSPACE/tests/ci"
python3 -m unittest discover -s . -p '*_test.py'
echo "Testing the main ci directory"
python3 -m unittest discover -s . -p 'test_*.py'
for dir in *_lambda/; do
echo "Testing $dir"
python3 -m unittest discover -s "$dir" -p 'test_*.py'
done
DockerHubPushAarch64:
runs-on: [self-hosted, style-checker-aarch64]
needs: CheckLabels
@ -69,7 +75,7 @@ jobs:
name: changed_images_amd64
path: ${{ runner.temp }}/docker_images_check/changed_images_amd64.json
DockerHubPush:
needs: [DockerHubPushAmd64, DockerHubPushAarch64]
needs: [DockerHubPushAmd64, DockerHubPushAarch64, PythonUnitTests]
runs-on: [self-hosted, style-checker]
steps:
- name: Check out repository code
@ -77,6 +83,7 @@ jobs:
with:
clear-repository: true
fetch-depth: 0 # to find ancestor merge commits necessary for finding proper docker tags
filter: tree:0
- name: Download changed aarch64 images
uses: actions/download-artifact@v3
with:
@ -98,367 +105,64 @@ jobs:
path: ${{ runner.temp }}/changed_images.json
CompatibilityCheckX86:
needs: [BuilderDebRelease]
runs-on: [self-hosted, style-checker]
steps:
- name: Set envs
run: |
cat >> "$GITHUB_ENV" << 'EOF'
TEMP_PATH=${{runner.temp}}/compatibility_check
REPO_COPY=${{runner.temp}}/compatibility_check/ClickHouse
REPORTS_PATH=${{runner.temp}}/reports_dir
EOF
- name: Check out repository code
uses: ClickHouse/checkout@v1
uses: ./.github/workflows/reusable_test.yml
with:
clear-repository: true
- name: Download json reports
uses: actions/download-artifact@v3
with:
path: ${{ env.REPORTS_PATH }}
- name: CompatibilityCheckX86
run: |
sudo rm -fr "$TEMP_PATH"
mkdir -p "$TEMP_PATH"
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
cd "$REPO_COPY/tests/ci" && python3 compatibility_check.py --check-name "Compatibility check (amd64)" --check-glibc --check-distributions
- name: Cleanup
if: always()
run: |
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
sudo rm -fr "$TEMP_PATH"
test_name: Compatibility check X86
runner_type: style-checker
run_command: |
cd "$REPO_COPY/tests/ci"
python3 compatibility_check.py --check-name "Compatibility check (amd64)" --check-glibc --check-distributions
CompatibilityCheckAarch64:
needs: [BuilderDebAarch64]
runs-on: [self-hosted, style-checker]
steps:
- name: Set envs
run: |
cat >> "$GITHUB_ENV" << 'EOF'
TEMP_PATH=${{runner.temp}}/compatibility_check
REPO_COPY=${{runner.temp}}/compatibility_check/ClickHouse
REPORTS_PATH=${{runner.temp}}/reports_dir
EOF
- name: Check out repository code
uses: ClickHouse/checkout@v1
uses: ./.github/workflows/reusable_test.yml
with:
clear-repository: true
- name: Download json reports
uses: actions/download-artifact@v3
with:
path: ${{ env.REPORTS_PATH }}
- name: CompatibilityCheckAarch64
run: |
sudo rm -fr "$TEMP_PATH"
mkdir -p "$TEMP_PATH"
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
cd "$REPO_COPY/tests/ci" && python3 compatibility_check.py --check-name "Compatibility check (aarch64)" --check-glibc
- name: Cleanup
if: always()
run: |
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
sudo rm -fr "$TEMP_PATH"
test_name: Compatibility check X86
runner_type: style-checker
run_command: |
cd "$REPO_COPY/tests/ci"
python3 compatibility_check.py --check-name "Compatibility check (aarch64)" --check-glibc
#########################################################################################
#################################### ORDINARY BUILDS ####################################
#########################################################################################
BuilderDebRelease:
needs: [DockerHubPush]
runs-on: [self-hosted, builder]
steps:
- name: Set envs
run: |
cat >> "$GITHUB_ENV" << 'EOF'
TEMP_PATH=${{runner.temp}}/build_check
IMAGES_PATH=${{runner.temp}}/images_path
REPO_COPY=${{runner.temp}}/build_check/ClickHouse
CACHES_PATH=${{runner.temp}}/../ccaches
BUILD_NAME=package_release
EOF
- name: Download changed images
uses: actions/download-artifact@v3
uses: ./.github/workflows/reusable_build.yml
with:
name: changed_images
path: ${{ env.IMAGES_PATH }}
- name: Check out repository code
uses: ClickHouse/checkout@v1
with:
clear-repository: true
submodules: true
fetch-depth: 0 # For a proper version and performance artifacts
- name: Build
run: |
sudo rm -fr "$TEMP_PATH"
mkdir -p "$TEMP_PATH"
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME"
- name: Upload build URLs to artifacts
if: ${{ success() || failure() }}
uses: actions/upload-artifact@v3
with:
name: ${{ env.BUILD_URLS }}
path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json
- name: Cleanup
if: always()
run: |
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
build_name: package_release
checkout_depth: 0
BuilderDebAarch64:
needs: [DockerHubPush]
runs-on: [self-hosted, builder]
steps:
- name: Set envs
run: |
cat >> "$GITHUB_ENV" << 'EOF'
TEMP_PATH=${{runner.temp}}/build_check
IMAGES_PATH=${{runner.temp}}/images_path
REPO_COPY=${{runner.temp}}/build_check/ClickHouse
CACHES_PATH=${{runner.temp}}/../ccaches
BUILD_NAME=package_aarch64
EOF
- name: Download changed images
uses: actions/download-artifact@v3
uses: ./.github/workflows/reusable_build.yml
with:
name: changed_images
path: ${{ env.IMAGES_PATH }}
- name: Check out repository code
uses: ClickHouse/checkout@v1
with:
clear-repository: true
submodules: true
fetch-depth: 0 # For a proper version and performance artifacts
- name: Build
run: |
sudo rm -fr "$TEMP_PATH"
mkdir -p "$TEMP_PATH"
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME"
- name: Upload build URLs to artifacts
if: ${{ success() || failure() }}
uses: actions/upload-artifact@v3
with:
name: ${{ env.BUILD_URLS }}
path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json
- name: Cleanup
if: always()
run: |
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
build_name: package_aarch64
checkout_depth: 0
BuilderDebAsan:
needs: [DockerHubPush]
runs-on: [self-hosted, builder]
steps:
- name: Set envs
run: |
cat >> "$GITHUB_ENV" << 'EOF'
TEMP_PATH=${{runner.temp}}/build_check
IMAGES_PATH=${{runner.temp}}/images_path
REPO_COPY=${{runner.temp}}/build_check/ClickHouse
CACHES_PATH=${{runner.temp}}/../ccaches
BUILD_NAME=package_asan
EOF
- name: Download changed images
uses: actions/download-artifact@v3
uses: ./.github/workflows/reusable_build.yml
with:
name: changed_images
path: ${{ env.IMAGES_PATH }}
- name: Check out repository code
uses: ClickHouse/checkout@v1
with:
clear-repository: true
submodules: true
- name: Build
run: |
sudo rm -fr "$TEMP_PATH"
mkdir -p "$TEMP_PATH"
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME"
- name: Upload build URLs to artifacts
if: ${{ success() || failure() }}
uses: actions/upload-artifact@v3
with:
name: ${{ env.BUILD_URLS }}
path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json
- name: Cleanup
if: always()
run: |
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
build_name: package_asan
BuilderDebTsan:
needs: [DockerHubPush]
runs-on: [self-hosted, builder]
steps:
- name: Set envs
run: |
cat >> "$GITHUB_ENV" << 'EOF'
TEMP_PATH=${{runner.temp}}/build_check
IMAGES_PATH=${{runner.temp}}/images_path
REPO_COPY=${{runner.temp}}/build_check/ClickHouse
CACHES_PATH=${{runner.temp}}/../ccaches
BUILD_NAME=package_tsan
EOF
- name: Download changed images
uses: actions/download-artifact@v3
uses: ./.github/workflows/reusable_build.yml
with:
name: changed_images
path: ${{ env.IMAGES_PATH }}
- name: Check out repository code
uses: ClickHouse/checkout@v1
with:
clear-repository: true
submodules: true
- name: Build
run: |
sudo rm -fr "$TEMP_PATH"
mkdir -p "$TEMP_PATH"
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME"
- name: Upload build URLs to artifacts
if: ${{ success() || failure() }}
uses: actions/upload-artifact@v3
with:
name: ${{ env.BUILD_URLS }}
path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json
- name: Cleanup
if: always()
run: |
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
build_name: package_tsan
BuilderDebDebug:
needs: [DockerHubPush]
runs-on: [self-hosted, builder]
steps:
- name: Set envs
run: |
cat >> "$GITHUB_ENV" << 'EOF'
TEMP_PATH=${{runner.temp}}/build_check
IMAGES_PATH=${{runner.temp}}/images_path
REPO_COPY=${{runner.temp}}/build_check/ClickHouse
CACHES_PATH=${{runner.temp}}/../ccaches
BUILD_NAME=package_debug
EOF
- name: Download changed images
uses: actions/download-artifact@v3
uses: ./.github/workflows/reusable_build.yml
with:
name: changed_images
path: ${{ env.IMAGES_PATH }}
- name: Check out repository code
uses: ClickHouse/checkout@v1
with:
clear-repository: true
submodules: true
- name: Apply sparse checkout for contrib # in order to check that it doesn't break build
run: |
rm -rf "$GITHUB_WORKSPACE/contrib" && echo 'removed'
git -C "$GITHUB_WORKSPACE" checkout . && echo 'restored'
"$GITHUB_WORKSPACE/contrib/update-submodules.sh" && echo 'OK'
du -hs "$GITHUB_WORKSPACE/contrib" ||:
find "$GITHUB_WORKSPACE/contrib" -type f | wc -l ||:
- name: Build
run: |
sudo rm -fr "$TEMP_PATH"
mkdir -p "$TEMP_PATH"
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME"
- name: Upload build URLs to artifacts
if: ${{ success() || failure() }}
uses: actions/upload-artifact@v3
with:
name: ${{ env.BUILD_URLS }}
path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json
- name: Cleanup
if: always()
run: |
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
build_name: package_debug
BuilderBinDarwin:
needs: [DockerHubPush]
runs-on: [self-hosted, builder]
steps:
- name: Set envs
run: |
cat >> "$GITHUB_ENV" << 'EOF'
TEMP_PATH=${{runner.temp}}/build_check
IMAGES_PATH=${{runner.temp}}/images_path
REPO_COPY=${{runner.temp}}/build_check/ClickHouse
CACHES_PATH=${{runner.temp}}/../ccaches
BUILD_NAME=binary_darwin
EOF
- name: Download changed images
uses: actions/download-artifact@v3
uses: ./.github/workflows/reusable_build.yml
with:
name: changed_images
path: ${{ env.IMAGES_PATH }}
- name: Check out repository code
uses: ClickHouse/checkout@v1
with:
clear-repository: true
submodules: true
fetch-depth: 0 # otherwise we will have no info about contributors
- name: Build
run: |
sudo rm -fr "$TEMP_PATH"
mkdir -p "$TEMP_PATH"
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME"
- name: Upload build URLs to artifacts
if: ${{ success() || failure() }}
uses: actions/upload-artifact@v3
with:
name: ${{ env.BUILD_URLS }}
path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json
- name: Cleanup
if: always()
run: |
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
build_name: binary_darwin
checkout_depth: 0
BuilderBinDarwinAarch64:
needs: [DockerHubPush]
runs-on: [self-hosted, builder]
steps:
- name: Set envs
run: |
cat >> "$GITHUB_ENV" << 'EOF'
TEMP_PATH=${{runner.temp}}/build_check
IMAGES_PATH=${{runner.temp}}/images_path
REPO_COPY=${{runner.temp}}/build_check/ClickHouse
CACHES_PATH=${{runner.temp}}/../ccaches
BUILD_NAME=binary_darwin_aarch64
EOF
- name: Download changed images
uses: actions/download-artifact@v3
uses: ./.github/workflows/reusable_build.yml
with:
name: changed_images
path: ${{ env.IMAGES_PATH }}
- name: Check out repository code
uses: ClickHouse/checkout@v1
with:
clear-repository: true
submodules: true
fetch-depth: 0 # otherwise we will have no info about contributors
- name: Build
run: |
sudo rm -fr "$TEMP_PATH"
mkdir -p "$TEMP_PATH"
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME"
- name: Upload build URLs to artifacts
if: ${{ success() || failure() }}
uses: actions/upload-artifact@v3
with:
name: ${{ env.BUILD_URLS }}
path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json
- name: Cleanup
if: always()
run: |
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
build_name: binary_darwin_aarch64
checkout_depth: 0
############################################################################################
##################################### Docker images #######################################
############################################################################################
@ -473,6 +177,7 @@ jobs:
with:
clear-repository: true
fetch-depth: 0 # It MUST BE THE SAME for all dependencies and the job itself
filter: tree:0
- name: Check docker clickhouse/clickhouse-server building
run: |
cd "$GITHUB_WORKSPACE/tests/ci"
@ -490,303 +195,114 @@ jobs:
##################################### BUILD REPORTER #######################################
############################################################################################
BuilderReport:
if: ${{ success() || failure() }}
needs:
- BuilderDebRelease
- BuilderDebAarch64
- BuilderDebAsan
- BuilderDebTsan
- BuilderDebDebug
runs-on: [self-hosted, style-checker]
if: ${{ success() || failure() }}
steps:
- name: Set envs
run: |
cat >> "$GITHUB_ENV" << 'EOF'
CHECK_NAME=ClickHouse build check
REPORTS_PATH=${{runner.temp}}/reports_dir
TEMP_PATH=${{runner.temp}}/report_check
NEEDS_DATA_PATH=${{runner.temp}}/needs.json
EOF
- name: Download json reports
uses: actions/download-artifact@v3
uses: ./.github/workflows/reusable_test.yml
with:
path: ${{ env.REPORTS_PATH }}
- name: Check out repository code
uses: ClickHouse/checkout@v1
with:
clear-repository: true
- name: Report Builder
run: |
sudo rm -fr "$TEMP_PATH"
mkdir -p "$TEMP_PATH"
cat > "$NEEDS_DATA_PATH" << 'EOF'
test_name: ClickHouse build check
runner_type: style-checker
additional_envs: |
NEEDS_DATA<<NDENV
${{ toJSON(needs) }}
EOF
NDENV
run_command: |
cd "$GITHUB_WORKSPACE/tests/ci"
python3 build_report_check.py "$CHECK_NAME"
- name: Cleanup
if: always()
run: |
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
sudo rm -fr "$TEMP_PATH"
BuilderSpecialReport:
if: ${{ success() || failure() }}
needs:
- BuilderBinDarwin
- BuilderBinDarwinAarch64
runs-on: [self-hosted, style-checker]
if: ${{ success() || failure() }}
steps:
- name: Set envs
run: |
cat >> "$GITHUB_ENV" << 'EOF'
TEMP_PATH=${{runner.temp}}/report_check
REPORTS_PATH=${{runner.temp}}/reports_dir
CHECK_NAME=ClickHouse special build check
NEEDS_DATA_PATH=${{runner.temp}}/needs.json
EOF
- name: Download json reports
uses: actions/download-artifact@v3
uses: ./.github/workflows/reusable_test.yml
with:
path: ${{ env.REPORTS_PATH }}
- name: Check out repository code
uses: ClickHouse/checkout@v1
with:
clear-repository: true
- name: Report Builder
run: |
sudo rm -fr "$TEMP_PATH"
mkdir -p "$TEMP_PATH"
cat > "$NEEDS_DATA_PATH" << 'EOF'
test_name: ClickHouse special build check
runner_type: style-checker
additional_envs: |
NEEDS_DATA<<NDENV
${{ toJSON(needs) }}
EOF
NDENV
run_command: |
cd "$GITHUB_WORKSPACE/tests/ci"
python3 build_report_check.py "$CHECK_NAME"
- name: Cleanup
if: always()
run: |
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
sudo rm -fr "$TEMP_PATH"
############################################################################################
#################################### INSTALL PACKAGES ######################################
############################################################################################
InstallPackagesTestRelease:
needs: [BuilderDebRelease]
runs-on: [self-hosted, style-checker]
steps:
- name: Set envs
run: |
cat >> "$GITHUB_ENV" << 'EOF'
TEMP_PATH=${{runner.temp}}/test_install
REPORTS_PATH=${{runner.temp}}/reports_dir
CHECK_NAME=Install packages (amd64)
REPO_COPY=${{runner.temp}}/test_install/ClickHouse
EOF
- name: Download json reports
uses: actions/download-artifact@v3
uses: ./.github/workflows/reusable_test.yml
with:
path: ${{ env.REPORTS_PATH }}
- name: Check out repository code
uses: ClickHouse/checkout@v1
with:
clear-repository: true
- name: Test packages installation
run: |
sudo rm -fr "$TEMP_PATH"
mkdir -p "$TEMP_PATH"
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
test_name: Install packages (amd64)
runner_type: style-checker
run_command: |
cd "$REPO_COPY/tests/ci"
python3 install_check.py "$CHECK_NAME"
- name: Cleanup
if: always()
run: |
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
sudo rm -fr "$TEMP_PATH"
InstallPackagesTestAarch64:
needs: [BuilderDebAarch64]
runs-on: [self-hosted, style-checker-aarch64]
steps:
- name: Set envs
run: |
cat >> "$GITHUB_ENV" << 'EOF'
TEMP_PATH=${{runner.temp}}/test_install
REPORTS_PATH=${{runner.temp}}/reports_dir
CHECK_NAME=Install packages (arm64)
REPO_COPY=${{runner.temp}}/test_install/ClickHouse
EOF
- name: Download json reports
uses: actions/download-artifact@v3
uses: ./.github/workflows/reusable_test.yml
with:
path: ${{ env.REPORTS_PATH }}
- name: Check out repository code
uses: ClickHouse/checkout@v1
with:
clear-repository: true
- name: Test packages installation
run: |
sudo rm -fr "$TEMP_PATH"
mkdir -p "$TEMP_PATH"
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
test_name: Install packages (arm64)
runner_type: style-checker-aarch64
run_command: |
cd "$REPO_COPY/tests/ci"
python3 install_check.py "$CHECK_NAME"
- name: Cleanup
if: always()
run: |
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
sudo rm -fr "$TEMP_PATH"
##############################################################################################
########################### FUNCTIONAl STATELESS TESTS #######################################
##############################################################################################
FunctionalStatelessTestAsan:
needs: [BuilderDebAsan]
runs-on: [self-hosted, func-tester]
steps:
- name: Set envs
run: |
cat >> "$GITHUB_ENV" << 'EOF'
TEMP_PATH=${{runner.temp}}/stateless_debug
REPORTS_PATH=${{runner.temp}}/reports_dir
CHECK_NAME=Stateless tests (asan)
REPO_COPY=${{runner.temp}}/stateless_debug/ClickHouse
uses: ./.github/workflows/reusable_test.yml
with:
test_name: Stateless tests (asan)
runner_type: func-tester
additional_envs: |
KILL_TIMEOUT=10800
EOF
- name: Download json reports
uses: actions/download-artifact@v3
with:
path: ${{ env.REPORTS_PATH }}
- name: Check out repository code
uses: ClickHouse/checkout@v1
with:
clear-repository: true
- name: Functional test
run: |
sudo rm -fr "$TEMP_PATH"
mkdir -p "$TEMP_PATH"
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
run_command: |
cd "$REPO_COPY/tests/ci"
python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT"
- name: Cleanup
if: always()
run: |
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
sudo rm -fr "$TEMP_PATH"
##############################################################################################
############################ FUNCTIONAl STATEFUL TESTS #######################################
##############################################################################################
FunctionalStatefulTestDebug:
needs: [BuilderDebDebug]
runs-on: [self-hosted, func-tester]
steps:
- name: Set envs
run: |
cat >> "$GITHUB_ENV" << 'EOF'
TEMP_PATH=${{runner.temp}}/stateful_debug
REPORTS_PATH=${{runner.temp}}/reports_dir
CHECK_NAME=Stateful tests (debug)
REPO_COPY=${{runner.temp}}/stateful_debug/ClickHouse
uses: ./.github/workflows/reusable_test.yml
with:
test_name: Stateful tests (debug)
runner_type: func-tester
additional_envs: |
KILL_TIMEOUT=3600
EOF
- name: Download json reports
uses: actions/download-artifact@v3
with:
path: ${{ env.REPORTS_PATH }}
- name: Check out repository code
uses: ClickHouse/checkout@v1
with:
clear-repository: true
- name: Functional test
run: |
sudo rm -fr "$TEMP_PATH"
mkdir -p "$TEMP_PATH"
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
run_command: |
cd "$REPO_COPY/tests/ci"
python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT"
- name: Cleanup
if: always()
run: |
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
sudo rm -fr "$TEMP_PATH"
##############################################################################################
######################################### STRESS TESTS #######################################
##############################################################################################
StressTestTsan:
needs: [BuilderDebTsan]
# func testers have 16 cores + 128 GB memory
# while stress testers have 36 cores + 72 memory
# It would be better to have something like 32 + 128,
# but such servers almost unavailable as spot instances.
runs-on: [self-hosted, func-tester]
steps:
- name: Set envs
run: |
cat >> "$GITHUB_ENV" << 'EOF'
TEMP_PATH=${{runner.temp}}/stress_thread
REPORTS_PATH=${{runner.temp}}/reports_dir
CHECK_NAME=Stress test (tsan)
REPO_COPY=${{runner.temp}}/stress_thread/ClickHouse
EOF
- name: Download json reports
uses: actions/download-artifact@v3
uses: ./.github/workflows/reusable_test.yml
with:
path: ${{ env.REPORTS_PATH }}
- name: Check out repository code
uses: ClickHouse/checkout@v1
with:
clear-repository: true
- name: Stress test
run: |
sudo rm -fr "$TEMP_PATH"
mkdir -p "$TEMP_PATH"
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
test_name: Stress test (tsan)
runner_type: stress-tester
run_command: |
cd "$REPO_COPY/tests/ci"
python3 stress_check.py "$CHECK_NAME"
- name: Cleanup
if: always()
run: |
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
sudo rm -fr "$TEMP_PATH"
#############################################################################################
############################# INTEGRATION TESTS #############################################
#############################################################################################
IntegrationTestsRelease:
needs: [BuilderDebRelease]
runs-on: [self-hosted, stress-tester]
steps:
- name: Set envs
run: |
cat >> "$GITHUB_ENV" << 'EOF'
TEMP_PATH=${{runner.temp}}/integration_tests_release
REPORTS_PATH=${{runner.temp}}/reports_dir
CHECK_NAME=Integration tests (release)
REPO_COPY=${{runner.temp}}/integration_tests_release/ClickHouse
EOF
- name: Download json reports
uses: actions/download-artifact@v3
uses: ./.github/workflows/reusable_test.yml
with:
path: ${{ env.REPORTS_PATH }}
- name: Check out repository code
uses: ClickHouse/checkout@v1
with:
clear-repository: true
- name: Integration test
run: |
sudo rm -fr "$TEMP_PATH"
mkdir -p "$TEMP_PATH"
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
test_name: Integration tests (release)
runner_type: stress-tester
batches: 4
run_command: |
cd "$REPO_COPY/tests/ci"
python3 integration_test_check.py "$CHECK_NAME"
- name: Cleanup
if: always()
run: |
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
sudo rm -fr "$TEMP_PATH"
FinishCheck:
needs:
- DockerHubPush

View File

@ -17,6 +17,8 @@ on: # yamllint disable-line rule:truthy
- 'docker/docs/**'
- 'docs/**'
- 'utils/check-style/aspell-ignore/**'
- 'tests/ci/docs_check.py'
- '.github/workflows/docs_check.yml'
jobs:
CheckLabels:
runs-on: [self-hosted, style-checker]
@ -72,6 +74,7 @@ jobs:
with:
clear-repository: true
fetch-depth: 0 # to find ancestor merge commits necessary for finding proper docker tags
filter: tree:0
- name: Download changed aarch64 images
uses: actions/download-artifact@v3
with:
@ -93,68 +96,30 @@ jobs:
path: ${{ runner.temp }}/changed_images.json
StyleCheck:
needs: DockerHubPush
runs-on: [self-hosted, style-checker]
if: ${{ success() || failure() }}
steps:
- name: Set envs
run: |
cat >> "$GITHUB_ENV" << 'EOF'
TEMP_PATH=${{ runner.temp }}/style_check
# We need additional `&& ! cancelled()` to have the job being able to cancel
if: ${{ success() || failure() || ( always() && ! cancelled() ) }}
uses: ./.github/workflows/reusable_test.yml
with:
test_name: Style check
runner_type: style-checker
run_command: |
cd "$REPO_COPY/tests/ci"
python3 style_check.py
secrets:
secret_envs: |
ROBOT_CLICKHOUSE_SSH_KEY<<RCSK
${{secrets.ROBOT_CLICKHOUSE_SSH_KEY}}
RCSK
EOF
- name: Download changed images
# even if artifact does not exist, e.g. on `do not test` label or failed Docker job
continue-on-error: true
uses: actions/download-artifact@v3
with:
name: changed_images
path: ${{ env.TEMP_PATH }}
- name: Check out repository code
uses: ClickHouse/checkout@v1
with:
clear-repository: true
- name: Style Check
run: |
cd "$GITHUB_WORKSPACE/tests/ci"
python3 style_check.py
- name: Cleanup
if: always()
run: |
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
sudo rm -fr "$TEMP_PATH"
DocsCheck:
needs: DockerHubPush
runs-on: [self-hosted, func-tester-aarch64]
steps:
- name: Set envs
run: |
cat >> "$GITHUB_ENV" << 'EOF'
TEMP_PATH=${{runner.temp}}/docs_check
REPO_COPY=${{runner.temp}}/docs_check/ClickHouse
EOF
- name: Download changed images
uses: actions/download-artifact@v3
uses: ./.github/workflows/reusable_test.yml
with:
name: changed_images
path: ${{ env.TEMP_PATH }}
- name: Check out repository code
uses: ClickHouse/checkout@v1
with:
clear-repository: true
- name: Docs Check
run: |
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
test_name: Docs check
runner_type: func-tester-aarch64
additional_envs: |
run_command: |
cd "$REPO_COPY/tests/ci"
python3 docs_check.py
- name: Cleanup
if: always()
run: |
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
sudo rm -fr "$TEMP_PATH"
FinishCheck:
needs:
- StyleCheck

View File

@ -11,58 +11,19 @@ on: # yamllint disable-line rule:truthy
workflow_call:
jobs:
KeeperJepsenRelease:
runs-on: [self-hosted, style-checker]
steps:
- name: Set envs
run: |
cat >> "$GITHUB_ENV" << 'EOF'
TEMP_PATH=${{runner.temp}}/keeper_jepsen
REPO_COPY=${{runner.temp}}/keeper_jepsen/ClickHouse
EOF
- name: Check out repository code
uses: ClickHouse/checkout@v1
uses: ./.github/workflows/reusable_test.yml
with:
clear-repository: true
fetch-depth: 0
- name: Jepsen Test
run: |
sudo rm -fr "$TEMP_PATH"
mkdir -p "$TEMP_PATH"
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
test_name: Jepsen keeper check
runner_type: style-checker
run_command: |
cd "$REPO_COPY/tests/ci"
python3 jepsen_check.py keeper
- name: Cleanup
if: always()
run: |
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
sudo rm -fr "$TEMP_PATH"
# ServerJepsenRelease:
# runs-on: [self-hosted, style-checker]
# if: ${{ always() }}
# needs: [KeeperJepsenRelease]
# steps:
# - name: Set envs
# run: |
# cat >> "$GITHUB_ENV" << 'EOF'
# TEMP_PATH=${{runner.temp}}/server_jepsen
# REPO_COPY=${{runner.temp}}/server_jepsen/ClickHouse
# EOF
# - name: Check out repository code
# uses: ClickHouse/checkout@v1
# uses: ./.github/workflows/reusable_test.yml
# with:
# clear-repository: true
# fetch-depth: 0
# - name: Jepsen Test
# run: |
# sudo rm -fr "$TEMP_PATH"
# mkdir -p "$TEMP_PATH"
# cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
# test_name: Jepsen server check
# runner_type: style-checker
# run_command: |
# cd "$REPO_COPY/tests/ci"
# python3 jepsen_check.py server
# - name: Cleanup
# if: always()
# run: |
# docker ps --quiet | xargs --no-run-if-empty docker kill ||:
# docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
# sudo rm -fr "$TEMP_PATH"

View File

@ -10,86 +10,17 @@ on: # yamllint disable-line rule:truthy
workflow_call:
jobs:
BuilderFuzzers:
runs-on: [self-hosted, builder]
steps:
- name: Set envs
run: |
cat >> "$GITHUB_ENV" << 'EOF'
TEMP_PATH=${{runner.temp}}/build_check
IMAGES_PATH=${{runner.temp}}/images_path
REPO_COPY=${{runner.temp}}/build_check/ClickHouse
CACHES_PATH=${{runner.temp}}/../ccaches
BUILD_NAME=fuzzers
EOF
- name: Download changed images
# even if artifact does not exist, e.g. on `do not test` label or failed Docker job
continue-on-error: true
uses: actions/download-artifact@v3
uses: ./.github/workflows/reusable_build.yml
with:
name: changed_images
path: ${{ env.IMAGES_PATH }}
- name: Check out repository code
uses: ClickHouse/checkout@v1
with:
clear-repository: true
submodules: true
ref: ${{github.ref}}
- name: Build
run: |
sudo rm -fr "$TEMP_PATH"
mkdir -p "$TEMP_PATH"
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME"
- name: Upload build URLs to artifacts
if: ${{ success() || failure() }}
uses: actions/upload-artifact@v3
with:
name: ${{ env.BUILD_URLS }}
path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json
- name: Cleanup
if: always()
run: |
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
build_name: fuzzers
libFuzzerTest:
needs: [BuilderFuzzers]
runs-on: [self-hosted, func-tester]
steps:
- name: Set envs
run: |
cat >> "$GITHUB_ENV" << 'EOF'
TEMP_PATH=${{runner.temp}}/libfuzzer
REPORTS_PATH=${{runner.temp}}/reports_dir
CHECK_NAME=libFuzzer tests
REPO_COPY=${{runner.temp}}/libfuzzer/ClickHouse
uses: ./.github/workflows/reusable_test.yml
with:
test_name: libFuzzer tests
runner_type: func-tester
additional_envs: |
KILL_TIMEOUT=10800
EOF
- name: Download changed images
# even if artifact does not exist, e.g. on `do not test` label or failed Docker job
continue-on-error: true
uses: actions/download-artifact@v3
with:
name: changed_images
path: ${{ env.TEMP_PATH }}
- name: Download json reports
uses: actions/download-artifact@v3
with:
path: ${{ env.REPORTS_PATH }}
- name: Check out repository code
uses: ClickHouse/checkout@v1
with:
clear-repository: true
- name: libFuzzer test
run: |
sudo rm -fr "$TEMP_PATH"
mkdir -p "$TEMP_PATH"
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
run_command: |
cd "$REPO_COPY/tests/ci"
python3 libfuzzer_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT"
- name: Cleanup
if: always()
run: |
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
sudo rm -fr "$TEMP_PATH"

File diff suppressed because it is too large Load Diff

View File

@ -54,6 +54,7 @@ jobs:
with:
clear-repository: true
fetch-depth: 0 # to find ancestor merge commits necessary for finding proper docker tags
filter: tree:0
- name: Download changed aarch64 images
uses: actions/download-artifact@v3
with:
@ -73,9 +74,6 @@ jobs:
with:
name: changed_images
path: ${{ runner.temp }}/changed_images.json
Codebrowser:
needs: [DockerHubPush]
uses: ./.github/workflows/woboq.yml
SonarCloud:
runs-on: [self-hosted, builder]
env:
@ -90,6 +88,7 @@ jobs:
with:
clear-repository: true
fetch-depth: 0 # Shallow clones should be disabled for a better relevancy of analysis
filter: tree:0
submodules: true
- name: Set up JDK 11
uses: actions/setup-java@v1

File diff suppressed because it is too large Load Diff

View File

@ -49,6 +49,7 @@ jobs:
with:
clear-repository: true
fetch-depth: 0 # otherwise we will have no version info
filter: tree:0
ref: ${{ env.GITHUB_TAG }}
- name: Check docker clickhouse/clickhouse-server building
run: |

File diff suppressed because it is too large Load Diff

79
.github/workflows/reusable_build.yml vendored Normal file
View File

@ -0,0 +1,79 @@
### For the pure soul wishes to move it to another place
# https://github.com/orgs/community/discussions/9050
env:
# Force the stdout and stderr streams to be unbuffered
PYTHONUNBUFFERED: 1
name: Build ClickHouse
'on':
workflow_call:
inputs:
build_name:
description: the value of build type from tests/ci/ci_config.py
required: true
type: string
checkout_depth:
description: the value of the git shallow checkout
required: false
type: number
default: 1
runner_type:
description: the label of runner to use
default: builder
type: string
additional_envs:
description: additional ENV variables to setup the job
type: string
jobs:
Build:
name: Build-${{inputs.build_name}}
env:
GITHUB_JOB_OVERRIDDEN: Build-${{inputs.build_name}}
runs-on: [self-hosted, '${{inputs.runner_type}}']
steps:
- name: Check out repository code
uses: ClickHouse/checkout@v1
with:
clear-repository: true
submodules: true
fetch-depth: ${{inputs.checkout_depth}}
filter: tree:0
- name: Set build envs
run: |
cat >> "$GITHUB_ENV" << 'EOF'
${{inputs.additional_envs}}
EOF
python3 "$GITHUB_WORKSPACE"/tests/ci/ci_config.py --build-name "${{inputs.build_name}}" >> "$GITHUB_ENV"
- name: Apply sparse checkout for contrib # in order to check that it doesn't break build
# This step is done in GITHUB_WORKSPACE,
# because it's broken in REPO_COPY for some reason
if: ${{ env.BUILD_SPARSE_CHECKOUT == 'true' }}
run: |
rm -rf "$GITHUB_WORKSPACE/contrib" && echo 'removed'
git -C "$GITHUB_WORKSPACE" checkout . && echo 'restored'
"$GITHUB_WORKSPACE/contrib/update-submodules.sh" && echo 'OK'
du -hs "$GITHUB_WORKSPACE/contrib" ||:
find "$GITHUB_WORKSPACE/contrib" -type f | wc -l ||:
- name: Common setup
uses: ./.github/actions/common_setup
with:
job_type: build_check
- name: Download changed images
uses: actions/download-artifact@v3
with:
name: changed_images
path: ${{ env.IMAGES_PATH }}
- name: Build
run: |
cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME"
- name: Upload build URLs to artifacts
if: ${{ success() || failure() }}
uses: actions/upload-artifact@v3
with:
name: ${{ env.BUILD_URLS }}
path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json
- name: Clean
if: always()
uses: ./.github/actions/clean

113
.github/workflows/reusable_test.yml vendored Normal file
View File

@ -0,0 +1,113 @@
### For the pure soul wishes to move it to another place
# https://github.com/orgs/community/discussions/9050
name: Testing workflow
'on':
workflow_call:
inputs:
test_name:
description: the value of test type from tests/ci/ci_config.py, ends up as $CHECK_NAME ENV
required: true
type: string
runner_type:
description: the label of runner to use
required: true
type: string
run_command:
description: the command to launch the check. Usually starts with `cd '$REPO_COPY/tests/ci'`
required: true
type: string
batches:
description: how many batches for the test will be launched
default: 1
type: number
checkout_depth:
description: the value of the git shallow checkout
required: false
type: number
default: 1
submodules:
description: if the submodules should be checked out
required: false
type: boolean
default: false
additional_envs:
description: additional ENV variables to setup the job
type: string
secrets:
secret_envs:
description: if given, it's passed to the environments
required: false
env:
# Force the stdout and stderr streams to be unbuffered
PYTHONUNBUFFERED: 1
CHECK_NAME: ${{inputs.test_name}}
jobs:
PrepareStrategy:
# batches < 1 is misconfiguration,
# and we need this step only for batches > 1
if: ${{ inputs.batches > 1 }}
runs-on: [self-hosted, style-checker-aarch64]
outputs:
batches: ${{steps.batches.outputs.batches}}
steps:
- name: Calculate batches
id: batches
run: |
batches_output=$(python3 -c 'import json; print(json.dumps(list(range(${{inputs.batches}}))))')
echo "batches=${batches_output}" >> "$GITHUB_OUTPUT"
Test:
# If PrepareStrategy is skipped for batches == 1,
# we still need to launch the test.
# `! failure()` is mandatory here to launch on skipped Job
# `&& !cancelled()` to allow the be cancelable
if: ${{ ( !failure() && !cancelled() ) && inputs.batches > 0 }}
# Do not add `-0` to the end, if there's only one batch
name: ${{inputs.test_name}}${{ inputs.batches > 1 && format('-{0}',matrix.batch) || '' }}
env:
GITHUB_JOB_OVERRIDDEN: ${{inputs.test_name}}${{ inputs.batches > 1 && format('-{0}',matrix.batch) || '' }}
runs-on: [self-hosted, '${{inputs.runner_type}}']
needs: [PrepareStrategy]
strategy:
fail-fast: false # we always wait for entire matrix
matrix:
# if PrepareStrategy does not have batches, we use 0
batch: ${{ needs.PrepareStrategy.outputs.batches
&& fromJson(needs.PrepareStrategy.outputs.batches)
|| fromJson('[0]')}}
steps:
- name: Check out repository code
uses: ClickHouse/checkout@v1
with:
clear-repository: true
submodules: ${{inputs.submodules}}
fetch-depth: ${{inputs.checkout_depth}}
filter: tree:0
- name: Set build envs
run: |
cat >> "$GITHUB_ENV" << 'EOF'
${{inputs.additional_envs}}
${{secrets.secret_envs}}
EOF
- name: Common setup
uses: ./.github/actions/common_setup
with:
job_type: test
- name: Download json reports
uses: actions/download-artifact@v3
with:
path: ${{ env.REPORTS_PATH }}
- name: Setup batch
if: ${{ inputs.batches > 1}}
run: |
cat >> "$GITHUB_ENV" << 'EOF'
RUN_BY_HASH_NUM=${{matrix.batch}}
RUN_BY_HASH_TOTAL=${{inputs.batches}}
EOF
- name: Run test
run: ${{inputs.run_command}}
- name: Clean
if: always()
uses: ./.github/actions/clean

View File

@ -38,6 +38,7 @@ jobs:
with:
ref: master
fetch-depth: 0
filter: tree:0
- name: Update versions, docker version, changelog, security
env:
GITHUB_TOKEN: ${{ secrets.ROBOT_CLICKHOUSE_COMMIT_TOKEN }}

View File

@ -1,44 +0,0 @@
name: WoboqBuilder
env:
# Force the stdout and stderr streams to be unbuffered
PYTHONUNBUFFERED: 1
concurrency:
group: woboq
on: # yamllint disable-line rule:truthy
workflow_dispatch:
workflow_call:
jobs:
# don't use dockerhub push because this image updates so rarely
WoboqCodebrowser:
runs-on: [self-hosted, style-checker]
timeout-minutes: 420 # the task is pretty heavy, so there's an additional hour
steps:
- name: Set envs
run: |
cat >> "$GITHUB_ENV" << 'EOF'
TEMP_PATH=${{runner.temp}}/codebrowser
REPO_COPY=${{runner.temp}}/codebrowser/ClickHouse
IMAGES_PATH=${{runner.temp}}/images_path
EOF
- name: Check out repository code
uses: ClickHouse/checkout@v1
with:
clear-repository: true
submodules: 'true'
- name: Download json reports
uses: actions/download-artifact@v3
with:
path: ${{ env.IMAGES_PATH }}
- name: Codebrowser
run: |
sudo rm -fr "$TEMP_PATH"
mkdir -p "$TEMP_PATH"
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
cd "$REPO_COPY/tests/ci" && python3 codebrowser_check.py
- name: Cleanup
if: always()
run: |
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
sudo rm -fr "$TEMP_PATH"

8
.gitmodules vendored
View File

@ -1,3 +1,6 @@
# Please do not use 'branch = ...' tags with submodule entries. Such tags make updating submodules a
# little bit more convenient but they do *not* specify the tracked submodule branch. Thus, they are
# more confusing than useful.
[submodule "contrib/zstd"]
path = contrib/zstd
url = https://github.com/facebook/zstd
@ -184,7 +187,7 @@
url = https://github.com/ClickHouse/nanodbc
[submodule "contrib/datasketches-cpp"]
path = contrib/datasketches-cpp
url = https://github.com/ClickHouse/datasketches-cpp
url = https://github.com/apache/datasketches-cpp
[submodule "contrib/yaml-cpp"]
path = contrib/yaml-cpp
url = https://github.com/ClickHouse/yaml-cpp
@ -257,6 +260,9 @@
[submodule "contrib/corrosion"]
path = contrib/corrosion
url = https://github.com/corrosion-rs/corrosion
[submodule "contrib/libssh"]
path = contrib/libssh
url = https://github.com/ClickHouse/libssh.git
[submodule "contrib/morton-nd"]
path = contrib/morton-nd
url = https://github.com/morton-nd/morton-nd

View File

@ -1,4 +1,6 @@
### Table of Contents
**[ClickHouse release v23.10, 2023-11-02](#2310)**<br/>
**[ClickHouse release v23.9, 2023-09-28](#239)**<br/>
**[ClickHouse release v23.8 LTS, 2023-08-31](#238)**<br/>
**[ClickHouse release v23.7, 2023-07-27](#237)**<br/>
**[ClickHouse release v23.6, 2023-06-30](#236)**<br/>
@ -11,6 +13,351 @@
# 2023 Changelog
### ClickHouse release 23.10, 2023-11-02
#### Backward Incompatible Change
* There is no longer an option to automatically remove broken data parts. This closes [#55174](https://github.com/ClickHouse/ClickHouse/issues/55174). [#55184](https://github.com/ClickHouse/ClickHouse/pull/55184) ([Alexey Milovidov](https://github.com/alexey-milovidov)). [#55557](https://github.com/ClickHouse/ClickHouse/pull/55557) ([Jihyuk Bok](https://github.com/tomahawk28)).
* The obsolete in-memory data parts can no longer be read from the write-ahead log. If you have configured in-memory parts before, they have to be removed before the upgrade. [#55186](https://github.com/ClickHouse/ClickHouse/pull/55186) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
* Remove the integration with Meilisearch. Reason: it was compatible only with the old version 0.18. The recent version of Meilisearch changed the protocol and does not work anymore. Note: we would appreciate it if you help to return it back. [#55189](https://github.com/ClickHouse/ClickHouse/pull/55189) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
* Rename directory monitor concept into background INSERT. All the settings `*directory_monitor*` had been renamed to `distributed_background_insert*`. *Backward compatibility should be preserved* (since old settings had been added as an alias). [#55978](https://github.com/ClickHouse/ClickHouse/pull/55978) ([Azat Khuzhin](https://github.com/azat)).
* Do not interpret the `send_timeout` set on the client side as the `receive_timeout` on the server side and vise-versa. [#56035](https://github.com/ClickHouse/ClickHouse/pull/56035) ([Azat Khuzhin](https://github.com/azat)).
* Comparison of time intervals with different units will throw an exception. This closes [#55942](https://github.com/ClickHouse/ClickHouse/issues/55942). You might have occasionally rely on the previous behavior when the underlying numeric values were compared regardless of the units. [#56090](https://github.com/ClickHouse/ClickHouse/pull/56090) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
* Rewrited the experimental `S3Queue` table engine completely: changed the way we keep information in zookeeper which allows to make less zookeeper requests, added caching of zookeeper state in cases when we know the state will not change, improved the polling from s3 process to make it less aggressive, changed the way ttl and max set for trached files is maintained, now it is a background process. Added `system.s3queue` and `system.s3queue_log` tables. Closes [#54998](https://github.com/ClickHouse/ClickHouse/issues/54998). [#54422](https://github.com/ClickHouse/ClickHouse/pull/54422) ([Kseniia Sumarokova](https://github.com/kssenii)).
#### New Feature
* Add function `arrayFold(accumulator, x1, ..., xn -> expression, initial, array1, ..., arrayn)` which applies a lambda function to multiple arrays of the same cardinality and collects the result in an accumulator. [#49794](https://github.com/ClickHouse/ClickHouse/pull/49794) ([Lirikl](https://github.com/Lirikl)).
* Support for `Npy` format. `SELECT * FROM file('example_array.npy', Npy)`. [#55982](https://github.com/ClickHouse/ClickHouse/pull/55982) ([Yarik Briukhovetskyi](https://github.com/yariks5s)).
* If a table has a space-filling curve in its key, e.g., `ORDER BY mortonEncode(x, y)`, the conditions on its arguments, e.g., `x >= 10 AND x <= 20 AND y >= 20 AND y <= 30` can be used for indexing. A setting `analyze_index_with_space_filling_curves` is added to enable or disable this analysis. This closes [#41195](https://github.com/ClickHouse/ClickHouse/issue/41195). Continuation of [#4538](https://github.com/ClickHouse/ClickHouse/pull/4538). Continuation of [#6286](https://github.com/ClickHouse/ClickHouse/pull/6286). Continuation of [#28130](https://github.com/ClickHouse/ClickHouse/pull/28130). Continuation of [#41753](https://github.com/ClickHouse/ClickHouse/pull/#41753). [#55642](https://github.com/ClickHouse/ClickHouse/pull/55642) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
* A new setting called `force_optimize_projection_name`, it takes a name of projection as an argument. If it's value set to a non-empty string, ClickHouse checks that this projection is used in the query at least once. Closes [#55331](https://github.com/ClickHouse/ClickHouse/issues/55331). [#56134](https://github.com/ClickHouse/ClickHouse/pull/56134) ([Yarik Briukhovetskyi](https://github.com/yariks5s)).
* Support asynchronous inserts with external data via native protocol. Previously it worked only if data is inlined into query. [#54730](https://github.com/ClickHouse/ClickHouse/pull/54730) ([Anton Popov](https://github.com/CurtizJ)).
* Added aggregation function `lttb` which uses the [Largest-Triangle-Three-Buckets](https://skemman.is/bitstream/1946/15343/3/SS_MSthesis.pdf) algorithm for downsampling data for visualization. [#53145](https://github.com/ClickHouse/ClickHouse/pull/53145) ([Sinan](https://github.com/sinsinan)).
* Query`CHECK TABLE` has better performance and usability (sends progress updates, cancellable). Support checking particular part with `CHECK TABLE ... PART 'part_name'`. [#53404](https://github.com/ClickHouse/ClickHouse/pull/53404) ([vdimir](https://github.com/vdimir)).
* Added function `jsonMergePatch`. When working with JSON data as strings, it provides a way to merge these strings (of JSON objects) together to form a single string containing a single JSON object. [#54364](https://github.com/ClickHouse/ClickHouse/pull/54364) ([Memo](https://github.com/Joeywzr)).
* The second part of Kusto Query Language dialect support. [Phase 1 implementation ](https://github.com/ClickHouse/ClickHouse/pull/37961) has been merged. [#42510](https://github.com/ClickHouse/ClickHouse/pull/42510) ([larryluogit](https://github.com/larryluogit)).
* Added a new SQL function, `arrayRandomSample(arr, k)` which returns a sample of k elements from the input array. Similar functionality could previously be achieved only with less convenient syntax, e.g. "SELECT arrayReduce('groupArraySample(3)', range(10))". [#54391](https://github.com/ClickHouse/ClickHouse/pull/54391) ([itayisraelov](https://github.com/itayisraelov)).
* Introduce `-ArgMin`/`-ArgMax` aggregate combinators which allow to aggregate by min/max values only. One use case can be found in [#54818](https://github.com/ClickHouse/ClickHouse/issues/54818). This PR also reorganize combinators into dedicated folder. [#54947](https://github.com/ClickHouse/ClickHouse/pull/54947) ([Amos Bird](https://github.com/amosbird)).
* Allow to drop cache for Protobuf format with `SYSTEM DROP SCHEMA FORMAT CACHE [FOR Protobuf]`. [#55064](https://github.com/ClickHouse/ClickHouse/pull/55064) ([Aleksandr Musorin](https://github.com/AVMusorin)).
* Add external HTTP Basic authenticator. [#55199](https://github.com/ClickHouse/ClickHouse/pull/55199) ([Aleksei Filatov](https://github.com/aalexfvk)).
* Added function `byteSwap` which reverses the bytes of unsigned integers. This is particularly useful for reversing values of types which are represented as unsigned integers internally such as IPv4. [#55211](https://github.com/ClickHouse/ClickHouse/pull/55211) ([Priyansh Agrawal](https://github.com/Priyansh121096)).
* Added function `formatQuery()` which returns a formatted version (possibly spanning multiple lines) of a SQL query string. Also added function `formatQuerySingleLine()` which does the same but the returned string will not contain linebreaks. [#55239](https://github.com/ClickHouse/ClickHouse/pull/55239) ([Salvatore Mesoraca](https://github.com/aiven-sal)).
* Added `DWARF` input format that reads debug symbols from an ELF executable/library/object file. [#55450](https://github.com/ClickHouse/ClickHouse/pull/55450) ([Michael Kolupaev](https://github.com/al13n321)).
* Allow to save unparsed records and errors in RabbitMQ, NATS and FileLog engines. Add virtual columns `_error` and `_raw_message`(for NATS and RabbitMQ), `_raw_record` (for FileLog) that are filled when ClickHouse fails to parse new record. The behaviour is controlled under storage settings `nats_handle_error_mode` for NATS, `rabbitmq_handle_error_mode` for RabbitMQ, `handle_error_mode` for FileLog similar to `kafka_handle_error_mode`. If it's set to `default`, en exception will be thrown when ClickHouse fails to parse a record, if it's set to `stream`, erorr and raw record will be saved into virtual columns. Closes [#36035](https://github.com/ClickHouse/ClickHouse/issues/36035). [#55477](https://github.com/ClickHouse/ClickHouse/pull/55477) ([Kruglov Pavel](https://github.com/Avogar)).
* Keeper client improvement: add `get_all_children_number command` that returns number of all children nodes under a specific path. [#55485](https://github.com/ClickHouse/ClickHouse/pull/55485) ([guoxiaolong](https://github.com/guoxiaolongzte)).
* Keeper client improvement: add `get_direct_children_number` command that returns number of direct children nodes under a path. [#55898](https://github.com/ClickHouse/ClickHouse/pull/55898) ([xuzifu666](https://github.com/xuzifu666)).
* Add statement `SHOW SETTING setting_name` which is a simpler version of existing statement `SHOW SETTINGS`. [#55979](https://github.com/ClickHouse/ClickHouse/pull/55979) ([Maksim Kita](https://github.com/kitaisreal)).
* Added fields `substreams` and `filenames` to the `system.parts_columns` table. [#55108](https://github.com/ClickHouse/ClickHouse/pull/55108) ([Anton Popov](https://github.com/CurtizJ)).
* Add support for `SHOW MERGES` query. [#55815](https://github.com/ClickHouse/ClickHouse/pull/55815) ([megao](https://github.com/jetgm)).
* Introduce a setting `create_table_empty_primary_key_by_default` for default `ORDER BY ()`. [#55899](https://github.com/ClickHouse/ClickHouse/pull/55899) ([Srikanth Chekuri](https://github.com/srikanthccv)).
#### Performance Improvement
* Add option `query_plan_preserve_num_streams_after_window_functions` to preserve the number of streams after evaluating window functions to allow parallel stream processing. [#50771](https://github.com/ClickHouse/ClickHouse/pull/50771) ([frinkr](https://github.com/frinkr)).
* Release more streams if data is small. [#53867](https://github.com/ClickHouse/ClickHouse/pull/53867) ([Jiebin Sun](https://github.com/jiebinn)).
* RoaringBitmaps being optimized before serialization. [#55044](https://github.com/ClickHouse/ClickHouse/pull/55044) ([UnamedRus](https://github.com/UnamedRus)).
* Posting lists in inverted indexes are now optimized to use the smallest possible representation for internal bitmaps. Depending on the repetitiveness of the data, this may significantly reduce the space consumption of inverted indexes. [#55069](https://github.com/ClickHouse/ClickHouse/pull/55069) ([Harry Lee](https://github.com/HarryLeeIBM)).
* Fix contention on Context lock, this significantly improves performance for a lot of short-running concurrent queries. [#55121](https://github.com/ClickHouse/ClickHouse/pull/55121) ([Maksim Kita](https://github.com/kitaisreal)).
* Improved the performance of inverted index creation by 30%. This was achieved by replacing `std::unordered_map` with `absl::flat_hash_map`. [#55210](https://github.com/ClickHouse/ClickHouse/pull/55210) ([Harry Lee](https://github.com/HarryLeeIBM)).
* Support ORC filter push down (rowgroup level). [#55330](https://github.com/ClickHouse/ClickHouse/pull/55330) ([李扬](https://github.com/taiyang-li)).
* Improve performance of external aggregation with a lot of temporary files. [#55489](https://github.com/ClickHouse/ClickHouse/pull/55489) ([Maksim Kita](https://github.com/kitaisreal)).
* Set a reasonable size for the marks cache for secondary indices by default to avoid loading the marks over and over again. [#55654](https://github.com/ClickHouse/ClickHouse/pull/55654) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
* Avoid unnecessary reconstruction of index granules when reading skip indexes. This addresses [#55653](https://github.com/ClickHouse/ClickHouse/issues/55653#issuecomment-1763766009). [#55683](https://github.com/ClickHouse/ClickHouse/pull/55683) ([Amos Bird](https://github.com/amosbird)).
* Cache CAST function in set during execution to improve the performance of function `IN` when set element type doesn't exactly match column type. [#55712](https://github.com/ClickHouse/ClickHouse/pull/55712) ([Duc Canh Le](https://github.com/canhld94)).
* Performance improvement for `ColumnVector::insertMany` and `ColumnVector::insertManyFrom`. [#55714](https://github.com/ClickHouse/ClickHouse/pull/55714) ([frinkr](https://github.com/frinkr)).
* Optimized Map subscript operations by predicting the next row's key position and reduce the comparisons. [#55929](https://github.com/ClickHouse/ClickHouse/pull/55929) ([lgbo](https://github.com/lgbo-ustc)).
* Support struct fields pruning in Parquet (in previous versions it didn't work in some cases). [#56117](https://github.com/ClickHouse/ClickHouse/pull/56117) ([lgbo](https://github.com/lgbo-ustc)).
* Add the ability to tune the number of parallel replicas used in a query execution based on the estimation of rows to read. [#51692](https://github.com/ClickHouse/ClickHouse/pull/51692) ([Raúl Marín](https://github.com/Algunenano)).
* Optimized external aggregation memory consumption in case many temporary files were generated. [#54798](https://github.com/ClickHouse/ClickHouse/pull/54798) ([Nikita Taranov](https://github.com/nickitat)).
* Distributed queries executed in `async_socket_for_remote` mode (default) now respect `max_threads` limit. Previously, some queries could create excessive threads (up to `max_distributed_connections`), causing server performance issues. [#53504](https://github.com/ClickHouse/ClickHouse/pull/53504) ([filimonov](https://github.com/filimonov)).
* Caching skip-able entries while executing DDL from Zookeeper distributed DDL queue. [#54828](https://github.com/ClickHouse/ClickHouse/pull/54828) ([Duc Canh Le](https://github.com/canhld94)).
* Experimental inverted indexes do not store tokens with too many matches (i.e. row ids in the posting list). This saves space and avoids ineffective index lookups when sequential scans would be equally fast or faster. The previous heuristics (`density` parameter passed to the index definition) that controlled when tokens would not be stored was too confusing for users. A much simpler heuristics based on parameter `max_rows_per_postings_list` (default: 64k) is introduced which directly controls the maximum allowed number of row ids in a postings list. [#55616](https://github.com/ClickHouse/ClickHouse/pull/55616) ([Harry Lee](https://github.com/HarryLeeIBM)).
* Improve write performance to `EmbeddedRocksDB` tables. [#55732](https://github.com/ClickHouse/ClickHouse/pull/55732) ([Duc Canh Le](https://github.com/canhld94)).
* Improved overall resilience for ClickHouse in case of many parts within partition (more than 1000). It might reduce the number of `TOO_MANY_PARTS` errors. [#55526](https://github.com/ClickHouse/ClickHouse/pull/55526) ([Nikita Mikhaylov](https://github.com/nikitamikhaylov)).
* Reduced memory consumption during loading of hierarchical dictionaries. [#55838](https://github.com/ClickHouse/ClickHouse/pull/55838) ([Nikita Taranov](https://github.com/nickitat)).
* All dictionaries support setting `dictionary_use_async_executor`. [#55839](https://github.com/ClickHouse/ClickHouse/pull/55839) ([vdimir](https://github.com/vdimir)).
* Prevent excesive memory usage when deserializing AggregateFunctionTopKGenericData. [#55947](https://github.com/ClickHouse/ClickHouse/pull/55947) ([Raúl Marín](https://github.com/Algunenano)).
* On a Keeper with lots of watches AsyncMetrics threads can consume 100% of CPU for noticable time in `DB::KeeperStorage::getSessionsWithWatchesCount()`. The fix is to avoid traversing heavy `watches` and `list_watches` sets. [#56054](https://github.com/ClickHouse/ClickHouse/pull/56054) ([Alexander Gololobov](https://github.com/davenger)).
* Add setting `optimize_trivial_approximate_count_query` to use `count()` approximation for storage EmbeddedRocksDB. Enable trivial count for StorageJoin. [#55806](https://github.com/ClickHouse/ClickHouse/pull/55806) ([Duc Canh Le](https://github.com/canhld94)).
#### Improvement
* Functions `toDayOfWeek()` (MySQL alias: `DAYOFWEEK()`), `toYearWeek()` (`YEARWEEK()`) and `toWeek()` (`WEEK()`) now supports `String` arguments. This makes its behavior consistent with MySQL's behavior. [#55589](https://github.com/ClickHouse/ClickHouse/pull/55589) ([Robert Schulze](https://github.com/rschu1ze)).
* Introduced setting `date_time_overflow_behavior` with possible values `ignore`, `throw`, `saturate` that controls the overflow behavior when converting from Date, Date32, DateTime64, Integer or Float to Date, Date32, DateTime or DateTime64. [#55696](https://github.com/ClickHouse/ClickHouse/pull/55696) ([Andrey Zvonov](https://github.com/zvonand)).
* Implement query parameters support for `ALTER TABLE ... ACTION PARTITION [ID] {parameter_name:ParameterType}`. Merges [#49516](https://github.com/ClickHouse/ClickHouse/issues/49516). Closes [#49449](https://github.com/ClickHouse/ClickHouse/issues/49449). [#55604](https://github.com/ClickHouse/ClickHouse/pull/55604) ([alesapin](https://github.com/alesapin)).
* Print processor ids in a prettier manner in EXPLAIN. [#48852](https://github.com/ClickHouse/ClickHouse/pull/48852) ([Vlad Seliverstov](https://github.com/behebot)).
* Creating a direct dictionary with a lifetime field will be rejected at create time (as the lifetime does not make sense for direct dictionaries). Fixes: [#27861](https://github.com/ClickHouse/ClickHouse/issues/27861). [#49043](https://github.com/ClickHouse/ClickHouse/pull/49043) ([Rory Crispin](https://github.com/RoryCrispin)).
* Allow parameters in queries with partitions like `ALTER TABLE t DROP PARTITION`. Closes [#49449](https://github.com/ClickHouse/ClickHouse/issues/49449). [#49516](https://github.com/ClickHouse/ClickHouse/pull/49516) ([Nikolay Degterinsky](https://github.com/evillique)).
* Add a new column `xid` for `system.zookeeper_connection`. [#50702](https://github.com/ClickHouse/ClickHouse/pull/50702) ([helifu](https://github.com/helifu)).
* Display the correct server settings in `system.server_settings` after configuration reload. [#53774](https://github.com/ClickHouse/ClickHouse/pull/53774) ([helifu](https://github.com/helifu)).
* Add support for mathematical minus `` character in queries, similar to `-`. [#54100](https://github.com/ClickHouse/ClickHouse/pull/54100) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
* Add replica groups to the experimental `Replicated` database engine. Closes [#53620](https://github.com/ClickHouse/ClickHouse/issues/53620). [#54421](https://github.com/ClickHouse/ClickHouse/pull/54421) ([Nikolay Degterinsky](https://github.com/evillique)).
* It is better to retry retriable s3 errors than totally fail the query. Set bigger value to the s3_retry_attempts by default. [#54770](https://github.com/ClickHouse/ClickHouse/pull/54770) ([Sema Checherinda](https://github.com/CheSema)).
* Add load balancing mode `hostname_levenshtein_distance`. [#54826](https://github.com/ClickHouse/ClickHouse/pull/54826) ([JackyWoo](https://github.com/JackyWoo)).
* Improve hiding secrets in logs. [#55089](https://github.com/ClickHouse/ClickHouse/pull/55089) ([Vitaly Baranov](https://github.com/vitlibar)).
* For now the projection analysis will be performed only on top of query plan. The setting `query_plan_optimize_projection` became obsolete (it was enabled by default long time ago). [#55112](https://github.com/ClickHouse/ClickHouse/pull/55112) ([Nikita Mikhaylov](https://github.com/nikitamikhaylov)).
* When function `untuple` is now called on a tuple with named elements and itself has an alias (e.g. `select untuple(tuple(1)::Tuple(element_alias Int)) AS untuple_alias`), then the result column name is now generated from the untuple alias and the tuple element alias (in the example: "untuple_alias.element_alias"). [#55123](https://github.com/ClickHouse/ClickHouse/pull/55123) ([garcher22](https://github.com/garcher22)).
* Added setting `describe_include_virtual_columns`, which allows to include virtual columns of table into result of `DESCRIBE` query. Added setting `describe_compact_output`. If it is set to `true`, `DESCRIBE` query returns only names and types of columns without extra information. [#55129](https://github.com/ClickHouse/ClickHouse/pull/55129) ([Anton Popov](https://github.com/CurtizJ)).
* Sometimes `OPTIMIZE` with `optimize_throw_if_noop=1` may fail with an error `unknown reason` while the real cause of it - different projections in different parts. This behavior is fixed. [#55130](https://github.com/ClickHouse/ClickHouse/pull/55130) ([Nikita Mikhaylov](https://github.com/nikitamikhaylov)).
* Allow to have several `MaterializedPostgreSQL` tables following the same Postgres table. By default this behaviour is not enabled (for compatibility, because it is a backward-incompatible change), but can be turned on with setting `materialized_postgresql_use_unique_replication_consumer_identifier`. Closes [#54918](https://github.com/ClickHouse/ClickHouse/issues/54918). [#55145](https://github.com/ClickHouse/ClickHouse/pull/55145) ([Kseniia Sumarokova](https://github.com/kssenii)).
* Allow to parse negative `DateTime64` and `DateTime` with fractional part from short strings. [#55146](https://github.com/ClickHouse/ClickHouse/pull/55146) ([Andrey Zvonov](https://github.com/zvonand)).
* To improve compatibility with MySQL, 1. `information_schema.tables` now includes the new field `table_rows`, and 2. `information_schema.columns` now includes the new field `extra`. [#55215](https://github.com/ClickHouse/ClickHouse/pull/55215) ([Robert Schulze](https://github.com/rschu1ze)).
* Clickhouse-client won't show "0 rows in set" if it is zero and if exception was thrown. [#55240](https://github.com/ClickHouse/ClickHouse/pull/55240) ([Salvatore Mesoraca](https://github.com/aiven-sal)).
* Support rename table without keyword `TABLE` like `RENAME db.t1 to db.t2`. [#55373](https://github.com/ClickHouse/ClickHouse/pull/55373) ([凌涛](https://github.com/lingtaolf)).
* Add `internal_replication` to `system.clusters`. [#55377](https://github.com/ClickHouse/ClickHouse/pull/55377) ([Konstantin Morozov](https://github.com/k-morozov)).
* Select remote proxy resolver based on request protocol, add proxy feature docs and remove `DB::ProxyConfiguration::Protocol::ANY`. [#55430](https://github.com/ClickHouse/ClickHouse/pull/55430) ([Arthur Passos](https://github.com/arthurpassos)).
* Avoid retrying keeper operations on INSERT after table shutdown. [#55519](https://github.com/ClickHouse/ClickHouse/pull/55519) ([Azat Khuzhin](https://github.com/azat)).
* `SHOW COLUMNS` now correctly reports type `FixedString` as `BLOB` if setting `use_mysql_types_in_show_columns` is on. Also added two new settings, `mysql_map_string_to_text_in_show_columns` and `mysql_map_fixed_string_to_text_in_show_columns` to switch the output for types `String` and `FixedString` as `TEXT` or `BLOB`. [#55617](https://github.com/ClickHouse/ClickHouse/pull/55617) ([Serge Klochkov](https://github.com/slvrtrn)).
* During ReplicatedMergeTree tables startup clickhouse server checks set of parts for unexpected parts (exists locally, but not in zookeeper). All unexpected parts move to detached directory and instead of them server tries to restore some ancestor (covered) parts. Now server tries to restore closest ancestors instead of random covered parts. [#55645](https://github.com/ClickHouse/ClickHouse/pull/55645) ([alesapin](https://github.com/alesapin)).
* The advanced dashboard now supports draggable charts on touch devices. This closes [#54206](https://github.com/ClickHouse/ClickHouse/issues/54206). [#55649](https://github.com/ClickHouse/ClickHouse/pull/55649) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
* Use the default query format if declared when outputting exception with `http_write_exception_in_output_format`. [#55739](https://github.com/ClickHouse/ClickHouse/pull/55739) ([Raúl Marín](https://github.com/Algunenano)).
* Provide a better message for common MATERIALIZED VIEW pitfalls. [#55826](https://github.com/ClickHouse/ClickHouse/pull/55826) ([Raúl Marín](https://github.com/Algunenano)).
* If you dropped the current database, you will still be able to run some queries in `clickhouse-local` and switch to another database. This makes the behavior consistent with `clickhouse-client`. This closes [#55834](https://github.com/ClickHouse/ClickHouse/issues/55834). [#55853](https://github.com/ClickHouse/ClickHouse/pull/55853) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
* Functions `(add|subtract)(Year|Quarter|Month|Week|Day|Hour|Minute|Second|Millisecond|Microsecond|Nanosecond)` now support string-encoded date arguments, e.g. `SELECT addDays('2023-10-22', 1)`. This increases compatibility with MySQL and is needed by Tableau Online. [#55869](https://github.com/ClickHouse/ClickHouse/pull/55869) ([Robert Schulze](https://github.com/rschu1ze)).
* The setting `apply_deleted_mask` when disabled allows to read rows that where marked as deleted by lightweight DELETE queries. This is useful for debugging. [#55952](https://github.com/ClickHouse/ClickHouse/pull/55952) ([Alexander Gololobov](https://github.com/davenger)).
* Allow skipping `null` values when serailizing Tuple to json objects, which makes it possible to keep compatiability with Spark's `to_json` function, which is also useful for gluten. [#55956](https://github.com/ClickHouse/ClickHouse/pull/55956) ([李扬](https://github.com/taiyang-li)).
* Functions `(add|sub)Date()` now support string-encoded date arguments, e.g. `SELECT addDate('2023-10-22 11:12:13', INTERVAL 5 MINUTE)`. The same support for string-encoded date arguments is added to the plus and minus operators, e.g. `SELECT '2023-10-23' + INTERVAL 1 DAY`. This increases compatibility with MySQL and is needed by Tableau Online. [#55960](https://github.com/ClickHouse/ClickHouse/pull/55960) ([Robert Schulze](https://github.com/rschu1ze)).
* Allow unquoted strings with CR (`\r`) in CSV format. Closes [#39930](https://github.com/ClickHouse/ClickHouse/issues/39930). [#56046](https://github.com/ClickHouse/ClickHouse/pull/56046) ([Kruglov Pavel](https://github.com/Avogar)).
* Allow to run `clickhouse-keeper` using embedded config. [#56086](https://github.com/ClickHouse/ClickHouse/pull/56086) ([Maksim Kita](https://github.com/kitaisreal)).
* Set limit of the maximum configuration value for `queued.min.messages` to avoid problem with start fetching data with Kafka. [#56121](https://github.com/ClickHouse/ClickHouse/pull/56121) ([Stas Morozov](https://github.com/r3b-fish)).
* Fixed a typo in SQL function `minSampleSizeContinous` (renamed `minSampleSizeContinuous`). Old name is preserved for backward compatibility. This closes: [#56139](https://github.com/ClickHouse/ClickHouse/issues/56139). [#56143](https://github.com/ClickHouse/ClickHouse/pull/56143) ([Dorota Szeremeta](https://github.com/orotaday)).
* Print path for broken parts on disk before shutting down the server. Before this change if a part is corrupted on disk and server cannot start, it was almost impossible to understand which part is broken. This is fixed. [#56181](https://github.com/ClickHouse/ClickHouse/pull/56181) ([Duc Canh Le](https://github.com/canhld94)).
#### Build/Testing/Packaging Improvement
* If the database in Docker is already initialized, it doesn't need to be initialized again upon subsequent launches. This can potentially fix the issue of infinite container restarts when the database fails to load within 1000 attempts (relevant for very large databases and multi-node setups). [#50724](https://github.com/ClickHouse/ClickHouse/pull/50724) ([Alexander Nikolaev](https://github.com/AlexNik)).
* Resource with source code including submodules is built in Darwin special build task. It may be used to build ClickHouse without checking out the submodules. [#51435](https://github.com/ClickHouse/ClickHouse/pull/51435) ([Ilya Yatsishin](https://github.com/qoega)).
* An error was occuring when building ClickHouse with the AVX series of instructions enabled globally (which isn't recommended). The reason is that snappy does not enable `SNAPPY_HAVE_X86_CRC32`. [#55049](https://github.com/ClickHouse/ClickHouse/pull/55049) ([monchickey](https://github.com/monchickey)).
* Solve issue with launching standalone `clickhouse-keeper` from `clickhouse-server` package. [#55226](https://github.com/ClickHouse/ClickHouse/pull/55226) ([Mikhail f. Shiryaev](https://github.com/Felixoid)).
* In the tests, RabbitMQ version is updated to 3.12.6. Improved logs collection for RabbitMQ tests. [#55424](https://github.com/ClickHouse/ClickHouse/pull/55424) ([Ilya Yatsishin](https://github.com/qoega)).
* Modified the error message difference between openssl and boringssl to fix the functional test. [#55975](https://github.com/ClickHouse/ClickHouse/pull/55975) ([MeenaRenganathan22](https://github.com/MeenaRenganathan22)).
* Use upstream repo for apache datasketches. [#55787](https://github.com/ClickHouse/ClickHouse/pull/55787) ([Nikita Taranov](https://github.com/nickitat)).
#### Bug Fix (user-visible misbehavior in an official stable release)
* Skip hardlinking inverted index files in mutation [#47663](https://github.com/ClickHouse/ClickHouse/pull/47663) ([cangyin](https://github.com/cangyin)).
* Fixed bug of `match` function (regex) with pattern containing alternation produces incorrect key condition. Closes #53222. [#54696](https://github.com/ClickHouse/ClickHouse/pull/54696) ([Yakov Olkhovskiy](https://github.com/yakov-olkhovskiy)).
* Fix 'Cannot find column' in read-in-order optimization with ARRAY JOIN [#51746](https://github.com/ClickHouse/ClickHouse/pull/51746) ([Nikolai Kochetov](https://github.com/KochetovNicolai)).
* Support missed experimental `Object(Nullable(json))` subcolumns in query. [#54052](https://github.com/ClickHouse/ClickHouse/pull/54052) ([zps](https://github.com/VanDarkholme7)).
* Re-add fix for `accurateCastOrNull()` [#54629](https://github.com/ClickHouse/ClickHouse/pull/54629) ([Salvatore Mesoraca](https://github.com/aiven-sal)).
* Fix detecting `DEFAULT` for columns of a Distributed table created without AS [#55060](https://github.com/ClickHouse/ClickHouse/pull/55060) ([Vitaly Baranov](https://github.com/vitlibar)).
* Proper cleanup in case of exception in ctor of ShellCommandSource [#55103](https://github.com/ClickHouse/ClickHouse/pull/55103) ([Alexander Gololobov](https://github.com/davenger)).
* Fix deadlock in LDAP assigned role update [#55119](https://github.com/ClickHouse/ClickHouse/pull/55119) ([Julian Maicher](https://github.com/jmaicher)).
* Suppress error statistics update for internal exceptions [#55128](https://github.com/ClickHouse/ClickHouse/pull/55128) ([Robert Schulze](https://github.com/rschu1ze)).
* Fix deadlock in backups [#55132](https://github.com/ClickHouse/ClickHouse/pull/55132) ([alesapin](https://github.com/alesapin)).
* Fix storage Iceberg files retrieval [#55144](https://github.com/ClickHouse/ClickHouse/pull/55144) ([Kseniia Sumarokova](https://github.com/kssenii)).
* Fix partition pruning of extra columns in set. [#55172](https://github.com/ClickHouse/ClickHouse/pull/55172) ([Amos Bird](https://github.com/amosbird)).
* Fix recalculation of skip indexes in ALTER UPDATE queries when table has adaptive granularity [#55202](https://github.com/ClickHouse/ClickHouse/pull/55202) ([Duc Canh Le](https://github.com/canhld94)).
* Fix for background download in fs cache [#55252](https://github.com/ClickHouse/ClickHouse/pull/55252) ([Kseniia Sumarokova](https://github.com/kssenii)).
* Avoid possible memory leaks in compressors in case of missing buffer finalization [#55262](https://github.com/ClickHouse/ClickHouse/pull/55262) ([Azat Khuzhin](https://github.com/azat)).
* Fix functions execution over sparse columns [#55275](https://github.com/ClickHouse/ClickHouse/pull/55275) ([Azat Khuzhin](https://github.com/azat)).
* Fix incorrect merging of Nested for SELECT FINAL FROM SummingMergeTree [#55276](https://github.com/ClickHouse/ClickHouse/pull/55276) ([Azat Khuzhin](https://github.com/azat)).
* Fix bug with inability to drop detached partition in replicated merge tree on top of S3 without zero copy [#55309](https://github.com/ClickHouse/ClickHouse/pull/55309) ([alesapin](https://github.com/alesapin)).
* Fix a crash in MergeSortingPartialResultTransform (due to zero chunks after `remerge`) [#55335](https://github.com/ClickHouse/ClickHouse/pull/55335) ([Azat Khuzhin](https://github.com/azat)).
* Fix data-race in CreatingSetsTransform (on errors) due to throwing shared exception [#55338](https://github.com/ClickHouse/ClickHouse/pull/55338) ([Azat Khuzhin](https://github.com/azat)).
* Fix trash optimization (up to a certain extent) [#55353](https://github.com/ClickHouse/ClickHouse/pull/55353) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
* Fix leak in StorageHDFS [#55370](https://github.com/ClickHouse/ClickHouse/pull/55370) ([Azat Khuzhin](https://github.com/azat)).
* Fix parsing of arrays in cast operator [#55417](https://github.com/ClickHouse/ClickHouse/pull/55417) ([Anton Popov](https://github.com/CurtizJ)).
* Fix filtering by virtual columns with OR filter in query [#55418](https://github.com/ClickHouse/ClickHouse/pull/55418) ([Azat Khuzhin](https://github.com/azat)).
* Fix MongoDB connection issues [#55419](https://github.com/ClickHouse/ClickHouse/pull/55419) ([Nikolay Degterinsky](https://github.com/evillique)).
* Fix MySQL interface boolean representation [#55427](https://github.com/ClickHouse/ClickHouse/pull/55427) ([Serge Klochkov](https://github.com/slvrtrn)).
* Fix MySQL text protocol DateTime formatting and LowCardinality(Nullable(T)) types reporting [#55479](https://github.com/ClickHouse/ClickHouse/pull/55479) ([Serge Klochkov](https://github.com/slvrtrn)).
* Make `use_mysql_types_in_show_columns` affect only `SHOW COLUMNS` [#55481](https://github.com/ClickHouse/ClickHouse/pull/55481) ([Robert Schulze](https://github.com/rschu1ze)).
* Fix stack symbolizer parsing `DW_FORM_ref_addr` incorrectly and sometimes crashing [#55483](https://github.com/ClickHouse/ClickHouse/pull/55483) ([Michael Kolupaev](https://github.com/al13n321)).
* Destroy fiber in case of exception in cancelBefore in AsyncTaskExecutor [#55516](https://github.com/ClickHouse/ClickHouse/pull/55516) ([Kruglov Pavel](https://github.com/Avogar)).
* Fix Query Parameters not working with custom HTTP handlers [#55521](https://github.com/ClickHouse/ClickHouse/pull/55521) ([Konstantin Bogdanov](https://github.com/thevar1able)).
* Fix checking of non handled data for Values format [#55527](https://github.com/ClickHouse/ClickHouse/pull/55527) ([Azat Khuzhin](https://github.com/azat)).
* Fix 'Invalid cursor state' in odbc interacting with MS SQL Server [#55558](https://github.com/ClickHouse/ClickHouse/pull/55558) ([vdimir](https://github.com/vdimir)).
* Fix max execution time and 'break' overflow mode [#55577](https://github.com/ClickHouse/ClickHouse/pull/55577) ([Alexander Gololobov](https://github.com/davenger)).
* Fix crash in QueryNormalizer with cyclic aliases [#55602](https://github.com/ClickHouse/ClickHouse/pull/55602) ([vdimir](https://github.com/vdimir)).
* Disable wrong optimization and add a test [#55609](https://github.com/ClickHouse/ClickHouse/pull/55609) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
* Merging [#52352](https://github.com/ClickHouse/ClickHouse/issues/52352) [#55621](https://github.com/ClickHouse/ClickHouse/pull/55621) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
* Add a test to avoid incorrect decimal sorting [#55662](https://github.com/ClickHouse/ClickHouse/pull/55662) ([Amos Bird](https://github.com/amosbird)).
* Fix progress bar for s3 and azure Cluster functions with url without globs [#55666](https://github.com/ClickHouse/ClickHouse/pull/55666) ([Kruglov Pavel](https://github.com/Avogar)).
* Fix filtering by virtual columns with OR filter in query (resubmit) [#55678](https://github.com/ClickHouse/ClickHouse/pull/55678) ([Azat Khuzhin](https://github.com/azat)).
* Fixes and improvements for Iceberg storage [#55695](https://github.com/ClickHouse/ClickHouse/pull/55695) ([Kruglov Pavel](https://github.com/Avogar)).
* Fix data race in CreatingSetsTransform (v2) [#55786](https://github.com/ClickHouse/ClickHouse/pull/55786) ([Azat Khuzhin](https://github.com/azat)).
* Throw exception when parsing illegal string as float if precise_float_parsing is true [#55861](https://github.com/ClickHouse/ClickHouse/pull/55861) ([李扬](https://github.com/taiyang-li)).
* Disable predicate pushdown if the CTE contains stateful functions [#55871](https://github.com/ClickHouse/ClickHouse/pull/55871) ([Raúl Marín](https://github.com/Algunenano)).
* Fix normalize ASTSelectWithUnionQuery, as it was stripping `FORMAT` from the query [#55887](https://github.com/ClickHouse/ClickHouse/pull/55887) ([flynn](https://github.com/ucasfl)).
* Try to fix possible segfault in Native ORC input format [#55891](https://github.com/ClickHouse/ClickHouse/pull/55891) ([Kruglov Pavel](https://github.com/Avogar)).
* Fix window functions in case of sparse columns. [#55895](https://github.com/ClickHouse/ClickHouse/pull/55895) ([János Benjamin Antal](https://github.com/antaljanosbenjamin)).
* fix: StorageNull supports subcolumns [#55912](https://github.com/ClickHouse/ClickHouse/pull/55912) ([FFish](https://github.com/wxybear)).
* Do not write retriable errors for Replicated mutate/merge into error log [#55944](https://github.com/ClickHouse/ClickHouse/pull/55944) ([Azat Khuzhin](https://github.com/azat)).
* Fix `SHOW DATABASES LIMIT <N>` [#55962](https://github.com/ClickHouse/ClickHouse/pull/55962) ([Raúl Marín](https://github.com/Algunenano)).
* Fix autogenerated Protobuf schema with fields with underscore [#55974](https://github.com/ClickHouse/ClickHouse/pull/55974) ([Kruglov Pavel](https://github.com/Avogar)).
* Fix dateTime64ToSnowflake64() with non-default scale [#55983](https://github.com/ClickHouse/ClickHouse/pull/55983) ([Robert Schulze](https://github.com/rschu1ze)).
* Fix output/input of Arrow dictionary column [#55989](https://github.com/ClickHouse/ClickHouse/pull/55989) ([Kruglov Pavel](https://github.com/Avogar)).
* Fix fetching schema from schema registry in AvroConfluent [#55991](https://github.com/ClickHouse/ClickHouse/pull/55991) ([Kruglov Pavel](https://github.com/Avogar)).
* Fix 'Block structure mismatch' on concurrent ALTER and INSERTs in Buffer table [#55995](https://github.com/ClickHouse/ClickHouse/pull/55995) ([Michael Kolupaev](https://github.com/al13n321)).
* Fix incorrect free space accounting for least_used JBOD policy [#56030](https://github.com/ClickHouse/ClickHouse/pull/56030) ([Azat Khuzhin](https://github.com/azat)).
* Fix missing scalar issue when evaluating subqueries inside table functions [#56057](https://github.com/ClickHouse/ClickHouse/pull/56057) ([Amos Bird](https://github.com/amosbird)).
* Fix wrong query result when http_write_exception_in_output_format=1 [#56135](https://github.com/ClickHouse/ClickHouse/pull/56135) ([Kruglov Pavel](https://github.com/Avogar)).
* Fix schema cache for fallback JSON->JSONEachRow with changed settings [#56172](https://github.com/ClickHouse/ClickHouse/pull/56172) ([Kruglov Pavel](https://github.com/Avogar)).
* Add error handler to odbc-bridge [#56185](https://github.com/ClickHouse/ClickHouse/pull/56185) ([Yakov Olkhovskiy](https://github.com/yakov-olkhovskiy)).
### ClickHouse release 23.9, 2023-09-28
#### Backward Incompatible Change
* Remove the `status_info` configuration option and dictionaries status from the default Prometheus handler. [#54090](https://github.com/ClickHouse/ClickHouse/pull/54090) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
* The experimental parts metadata cache is removed from the codebase. [#54215](https://github.com/ClickHouse/ClickHouse/pull/54215) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
* Disable setting `input_format_json_try_infer_numbers_from_strings` by default, so we don't try to infer numbers from strings in JSON formats by default to avoid possible parsing errors when sample data contains strings that looks like a number. [#55099](https://github.com/ClickHouse/ClickHouse/pull/55099) ([Kruglov Pavel](https://github.com/Avogar)).
#### New Feature
* Improve schema inference from JSON formats: 1) Now it's possible to infer named Tuples from JSON objects without experimantal JSON type under a setting `input_format_json_try_infer_named_tuples_from_objects` in JSON formats. Previously without experimantal type JSON we could only infer JSON objects as Strings or Maps, now we can infer named Tuple. Resulting Tuple type will conain all keys of objects that were read in data sample during schema inference. It can be useful for reading structured JSON data without sparse objects. The setting is enabled by default. 2) Allow parsing JSON array into a column with type String under setting `input_format_json_read_arrays_as_strings`. It can help reading arrays with values with different types. 3) Allow to use type String for JSON keys with unkown types (`null`/`[]`/`{}`) in sample data under setting `input_format_json_infer_incomplete_types_as_strings`. Now in JSON formats we can read any value into String column and we can avoid getting error `Cannot determine type for column 'column_name' by first 25000 rows of data, most likely this column contains only Nulls or empty Arrays/Maps` during schema inference by using type String for unknown types, so the data will be read successfully. [#54427](https://github.com/ClickHouse/ClickHouse/pull/54427) ([Kruglov Pavel](https://github.com/Avogar)).
* Added IO scheduling support for remote disks. Storage configuration for disk types `s3`, `s3_plain`, `hdfs` and `azure_blob_storage` can now contain `read_resource` and `write_resource` elements holding resource names. Scheduling policies for these resources can be configured in a separate server configuration section `resources`. Queries can be marked using setting `workload` and classified using server configuration section `workload_classifiers` to achieve diverse resource scheduling goals. More details in [the docs](https://clickhouse.com/docs/en/operations/workload-scheduling). [#47009](https://github.com/ClickHouse/ClickHouse/pull/47009) ([Sergei Trifonov](https://github.com/serxa)). Added "bandwidth_limit" IO scheduling node type. It allows you to specify `max_speed` and `max_burst` constraints on traffic passing though this node. [#54618](https://github.com/ClickHouse/ClickHouse/pull/54618) ([Sergei Trifonov](https://github.com/serxa)).
* Added new type of authentication based on SSH keys. It works only for the native TCP protocol. [#41109](https://github.com/ClickHouse/ClickHouse/pull/41109) ([George Gamezardashvili](https://github.com/InfJoker)).
* Added a new column `_block_number` for MergeTree tables. [#44532](https://github.com/ClickHouse/ClickHouse/issues/44532). [#47532](https://github.com/ClickHouse/ClickHouse/pull/47532) ([SmitaRKulkarni](https://github.com/SmitaRKulkarni)).
* Add `IF EMPTY` clause for `DROP TABLE` queries. [#48915](https://github.com/ClickHouse/ClickHouse/pull/48915) ([Pavel Novitskiy](https://github.com/pnovitskiy)).
* SQL functions `toString(datetime, timezone)` and `formatDateTime(datetime, format, timezone)` now support non-constant timezone arguments. [#53680](https://github.com/ClickHouse/ClickHouse/pull/53680) ([Yarik Briukhovetskyi](https://github.com/yariks5s)).
* Add support for `ALTER TABLE MODIFY COMMENT`. Note: something similar was added by an external contributor a long time ago, but the feature did not work at all and only confused users. This closes [#36377](https://github.com/ClickHouse/ClickHouse/issues/36377). [#51304](https://github.com/ClickHouse/ClickHouse/pull/51304) ([Alexey Milovidov](https://github.com/alexey-milovidov)). Note: this command does not propagate between replicas, so the replicas of a table could have different comments.
* Added `GCD` a.k.a. "greatest common denominator" as a new data compression codec. The codec computes the GCD of all column values, and then divides each value by the GCD. The GCD codec is a data preparation codec (similar to Delta and DoubleDelta) and cannot be used stand-alone. It works with data integer, decimal and date/time type. A viable use case for the GCD codec are column values that change (increase/decrease) in multiples of the GCD, e.g. 24 - 28 - 16 - 24 - 8 - 24 (assuming GCD = 4). [#53149](https://github.com/ClickHouse/ClickHouse/pull/53149) ([Alexander Nam](https://github.com/seshWCS)).
* Two new type aliases `DECIMAL(P)` (as shortcut for `DECIMAL(P, 0)` and `DECIMAL` (as shortcut for `DECIMAL(10, 0)`) were added. This makes ClickHouse more compatible with MySQL's SQL dialect. [#53328](https://github.com/ClickHouse/ClickHouse/pull/53328) ([Val Doroshchuk](https://github.com/valbok)).
* Added a new system log table `backup_log` to track all `BACKUP` and `RESTORE` operations. [#53638](https://github.com/ClickHouse/ClickHouse/pull/53638) ([Victor Krasnov](https://github.com/sirvickr)).
* Added a format setting `output_format_markdown_escape_special_characters` (default: false). The setting controls whether special characters like `!`, `#`, `$` etc. are escaped (i.e. prefixed by a backslash) in the `Markdown` output format. [#53860](https://github.com/ClickHouse/ClickHouse/pull/53860) ([irenjj](https://github.com/irenjj)).
* Add function `decodeHTMLComponent`. [#54097](https://github.com/ClickHouse/ClickHouse/pull/54097) ([Bharat Nallan](https://github.com/bharatnc)).
* Added `peak_threads_usage` to query_log table. [#54335](https://github.com/ClickHouse/ClickHouse/pull/54335) ([Alexey Gerasimchuck](https://github.com/Demilivor)).
* Add `SHOW FUNCTIONS` support to clickhouse-client. [#54337](https://github.com/ClickHouse/ClickHouse/pull/54337) ([Julia Kartseva](https://github.com/wat-ze-hex)).
* Added function `toDaysSinceYearZero` with alias `TO_DAYS` (for compatibility with MySQL) which returns the number of days passed since `0001-01-01` (in Proleptic Gregorian Calendar). [#54479](https://github.com/ClickHouse/ClickHouse/pull/54479) ([Robert Schulze](https://github.com/rschu1ze)). Function `toDaysSinceYearZero()` now supports arguments of type `DateTime` and `DateTime64`. [#54856](https://github.com/ClickHouse/ClickHouse/pull/54856) ([Serge Klochkov](https://github.com/slvrtrn)).
* Added functions `YYYYMMDDtoDate`, `YYYYMMDDtoDate32`, `YYYYMMDDhhmmssToDateTime` and `YYYYMMDDhhmmssToDateTime64`. They convert a date or date with time encoded as integer (e.g. 20230911) into a native date or date with time. As such, they provide the opposite functionality of existing functions `YYYYMMDDToDate`, `YYYYMMDDToDateTime`, `YYYYMMDDhhmmddToDateTime`, `YYYYMMDDhhmmddToDateTime64`. [#54509](https://github.com/ClickHouse/ClickHouse/pull/54509) ([Quanfa Fu](https://github.com/dentiscalprum)) ([Robert Schulze](https://github.com/rschu1ze)).
* Add several string distance functions, including `byteHammingDistance`, `editDistance`. [#54935](https://github.com/ClickHouse/ClickHouse/pull/54935) ([flynn](https://github.com/ucasfl)).
* Allow specifying the expiration date and, optionally, the time for user credentials with `VALID UNTIL datetime` clause. [#51261](https://github.com/ClickHouse/ClickHouse/pull/51261) ([Nikolay Degterinsky](https://github.com/evillique)).
* Allow S3-style URLs for table functions `s3`, `gcs`, `oss`. URL is automatically converted to HTTP. Example: `'s3://clickhouse-public-datasets/hits.csv'` is converted to `'https://clickhouse-public-datasets.s3.amazonaws.com/hits.csv'`. [#54931](https://github.com/ClickHouse/ClickHouse/pull/54931) ([Yarik Briukhovetskyi](https://github.com/yariks5s)).
* Add new setting `print_pretty_type_names` to print pretty deep nested types like Tuple/Maps/Arrays. [#55095](https://github.com/ClickHouse/ClickHouse/pull/55095) ([Kruglov Pavel](https://github.com/Avogar)).
#### Performance Improvement
* Speed up reading from S3 by enabling prefetches by default. [#53709](https://github.com/ClickHouse/ClickHouse/pull/53709) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
* Do not implicitly read PK and version columns in lonely parts if unnecessary for queries with FINAL. [#53919](https://github.com/ClickHouse/ClickHouse/pull/53919) ([Duc Canh Le](https://github.com/canhld94)).
* Optimize group by constant keys. Will optimize queries with group by `_file/_path` after https://github.com/ClickHouse/ClickHouse/pull/53529. [#53549](https://github.com/ClickHouse/ClickHouse/pull/53549) ([Kruglov Pavel](https://github.com/Avogar)).
* Improve performance of sorting for `Decimal` columns. Improve performance of insertion into `MergeTree` if ORDER BY contains a `Decimal` column. Improve performance of sorting when data is already sorted or almost sorted. [#35961](https://github.com/ClickHouse/ClickHouse/pull/35961) ([Maksim Kita](https://github.com/kitaisreal)).
* Improve performance for huge query analysis. Fixes [#51224](https://github.com/ClickHouse/ClickHouse/issues/51224). [#51469](https://github.com/ClickHouse/ClickHouse/pull/51469) ([frinkr](https://github.com/frinkr)).
* An optimization to rewrite `COUNT(DISTINCT ...)` and various `uniq` variants to `count` if it is selected from a subquery with GROUP BY. [#52082](https://github.com/ClickHouse/ClickHouse/pull/52082) [#52645](https://github.com/ClickHouse/ClickHouse/pull/52645) ([JackyWoo](https://github.com/JackyWoo)).
* Remove manual calls to `mmap/mremap/munmap` and delegate all this work to `jemalloc` - and it slightly improves performance. [#52792](https://github.com/ClickHouse/ClickHouse/pull/52792) ([Nikita Taranov](https://github.com/nickitat)).
* Fixed high in CPU consumption when working with NATS. [#54399](https://github.com/ClickHouse/ClickHouse/pull/54399) ([Vasilev Pyotr](https://github.com/vahpetr)).
* Since we use separate instructions for executing `toString()` with datetime argument, it is possible to improve performance a bit for non-datetime arguments and have some parts of the code cleaner. Follows up [#53680](https://github.com/ClickHouse/ClickHouse/issues/53680). [#54443](https://github.com/ClickHouse/ClickHouse/pull/54443) ([Yarik Briukhovetskyi](https://github.com/yariks5s)).
* Instead of serializing json elements into a `std::stringstream`, this PR try to put the serialization result into `ColumnString` direclty. [#54613](https://github.com/ClickHouse/ClickHouse/pull/54613) ([lgbo](https://github.com/lgbo-ustc)).
* Enable ORDER BY optimization for reading data in corresponding order from a MergeTree table in case that the table is behind a view. [#54628](https://github.com/ClickHouse/ClickHouse/pull/54628) ([Vitaly Baranov](https://github.com/vitlibar)).
* Improve JSON SQL functions by reusing `GeneratorJSONPath` and removing several shared pointers. [#54735](https://github.com/ClickHouse/ClickHouse/pull/54735) ([lgbo](https://github.com/lgbo-ustc)).
* Keeper tries to batch flush requests for better performance. [#53049](https://github.com/ClickHouse/ClickHouse/pull/53049) ([Antonio Andelic](https://github.com/antonio2368)).
* Now `clickhouse-client` processes files in parallel in case of `INFILE 'glob_expression'`. Closes [#54218](https://github.com/ClickHouse/ClickHouse/issues/54218). [#54533](https://github.com/ClickHouse/ClickHouse/pull/54533) ([Max K.](https://github.com/mkaynov)).
* Allow to use primary key for IN function where primary key column types are different from `IN` function right side column types. Example: `SELECT id FROM test_table WHERE id IN (SELECT '5')`. Closes [#48936](https://github.com/ClickHouse/ClickHouse/issues/48936). [#54544](https://github.com/ClickHouse/ClickHouse/pull/54544) ([Maksim Kita](https://github.com/kitaisreal)).
* Hash JOIN tries to shrink internal buffers consuming half of maximal available memory (set by `max_bytes_in_join`). [#54584](https://github.com/ClickHouse/ClickHouse/pull/54584) ([vdimir](https://github.com/vdimir)).
* Respect `max_block_size` for array join to avoid possible OOM. Close [#54290](https://github.com/ClickHouse/ClickHouse/issues/54290). [#54664](https://github.com/ClickHouse/ClickHouse/pull/54664) ([李扬](https://github.com/taiyang-li)).
* Reuse HTTP connections in the `s3` table function. [#54812](https://github.com/ClickHouse/ClickHouse/pull/54812) ([Michael Kolupaev](https://github.com/al13n321)).
* Replace the linear search in `MergeTreeRangeReader::Stream::ceilRowsToCompleteGranules` with a binary search. [#54869](https://github.com/ClickHouse/ClickHouse/pull/54869) ([usurai](https://github.com/usurai)).
#### Experimental Feature
* The creation of `Annoy` indexes can now be parallelized using setting `max_threads_for_annoy_index_creation`. [#54047](https://github.com/ClickHouse/ClickHouse/pull/54047) ([Robert Schulze](https://github.com/rschu1ze)).
* Parallel replicas over distributed don't read from all replicas [#54199](https://github.com/ClickHouse/ClickHouse/pull/54199) ([Igor Nikonov](https://github.com/devcrafter)).
#### Improvement
* Allow to replace long names of files of columns in `MergeTree` data parts to hashes of names. It helps to avoid `File name too long` error in some cases. [#50612](https://github.com/ClickHouse/ClickHouse/pull/50612) ([Anton Popov](https://github.com/CurtizJ)).
* Parse data in `JSON` format as `JSONEachRow` if failed to parse metadata. It will allow to read files with `.json` extension even if real format is JSONEachRow. Closes [#45740](https://github.com/ClickHouse/ClickHouse/issues/45740). [#54405](https://github.com/ClickHouse/ClickHouse/pull/54405) ([Kruglov Pavel](https://github.com/Avogar)).
* Output valid JSON/XML on excetpion during HTTP query execution. Add setting `http_write_exception_in_output_format` to enable/disable this behaviour (enabled by default). [#52853](https://github.com/ClickHouse/ClickHouse/pull/52853) ([Kruglov Pavel](https://github.com/Avogar)).
* View `information_schema.tables` now has a new field `data_length` which shows the approximate size of the data on disk. Required to run queries generated by Amazon QuickSight. [#55037](https://github.com/ClickHouse/ClickHouse/pull/55037) ([Robert Schulze](https://github.com/rschu1ze)).
* The MySQL interface gained a minimal implementation of prepared statements, just enough to allow a connection from Tableau Online to ClickHouse via the MySQL connector. [#54115](https://github.com/ClickHouse/ClickHouse/pull/54115) ([Serge Klochkov](https://github.com/slvrtrn)). Please note: the prepared statements implementation is pretty minimal, we do not support arguments binding yet, it is not required in this particular Tableau online use case. It will be implemented as a follow-up if necessary after extensive testing of Tableau Online in case we discover issues.
* Support case-insensitive and dot-all matching modes in `regexp_tree` dictionaries. [#50906](https://github.com/ClickHouse/ClickHouse/pull/50906) ([Johann Gan](https://github.com/johanngan)).
* Keeper improvement: Add a `createIfNotExists` Keeper command. [#48855](https://github.com/ClickHouse/ClickHouse/pull/48855) ([Konstantin Bogdanov](https://github.com/thevar1able)).
* More precise integer type inference, fix [#51236](https://github.com/ClickHouse/ClickHouse/issues/51236). [#53003](https://github.com/ClickHouse/ClickHouse/pull/53003) ([Chen768959](https://github.com/Chen768959)).
* Introduced resolving of charsets in the string literals for MaterializedMySQL. [#53220](https://github.com/ClickHouse/ClickHouse/pull/53220) ([Val Doroshchuk](https://github.com/valbok)).
* Fix a subtle issue with a rarely used `EmbeddedRocksDB` table engine in an extremely rare scenario: sometimes the `EmbeddedRocksDB` table engine does not close files correctly in NFS after running `DROP TABLE`. [#53502](https://github.com/ClickHouse/ClickHouse/pull/53502) ([Mingliang Pan](https://github.com/liangliangpan)).
* `RESTORE TABLE ON CLUSTER` must create replicated tables with a matching UUID on hosts. Otherwise the macro `{uuid}` in ZooKeeper path can't work correctly after RESTORE. This PR implements that. [#53765](https://github.com/ClickHouse/ClickHouse/pull/53765) ([Vitaly Baranov](https://github.com/vitlibar)).
* Added restore setting `restore_broken_parts_as_detached`: if it's true the RESTORE process won't stop on broken parts while restoring, instead all the broken parts will be copied to the `detached` folder with the prefix `broken-from-backup'. If it's false the RESTORE process will stop on the first broken part (if any). The default value is false. [#53877](https://github.com/ClickHouse/ClickHouse/pull/53877) ([Vitaly Baranov](https://github.com/vitlibar)).
* Add `elapsed_ns` field to HTTP headers X-ClickHouse-Progress and X-ClickHouse-Summary. [#54179](https://github.com/ClickHouse/ClickHouse/pull/54179) ([joelynch](https://github.com/joelynch)).
* Implementation of `reconfig` (https://github.com/ClickHouse/ClickHouse/pull/49450), `sync`, and `exists` commands for keeper-client. [#54201](https://github.com/ClickHouse/ClickHouse/pull/54201) ([pufit](https://github.com/pufit)).
* `clickhouse-local` and `clickhouse-client` now allow to specify the `--query` parameter multiple times, e.g. `./clickhouse-client --query "SELECT 1" --query "SELECT 2"`. This syntax is slightly more intuitive than `./clickhouse-client --multiquery "SELECT 1;S ELECT 2"`, a bit easier to script (e.g. `queries.push_back('--query "$q"')`) and more consistent with the behavior of existing parameter `--queries-file` (e.g. `./clickhouse client --queries-file queries1.sql --queries-file queries2.sql`). [#54249](https://github.com/ClickHouse/ClickHouse/pull/54249) ([Robert Schulze](https://github.com/rschu1ze)).
* Add sub-second precision to `formatReadableTimeDelta`. [#54250](https://github.com/ClickHouse/ClickHouse/pull/54250) ([Andrey Zvonov](https://github.com/zvonand)).
* Enable `allow_remove_stale_moving_parts` by default. [#54260](https://github.com/ClickHouse/ClickHouse/pull/54260) ([vdimir](https://github.com/vdimir)).
* Fix using count from cache and improve progress bar for reading from archives. [#54271](https://github.com/ClickHouse/ClickHouse/pull/54271) ([Kruglov Pavel](https://github.com/Avogar)).
* Add support for S3 credentials using SSO. To define a profile to be used with SSO, set `AWS_PROFILE` environment variable. [#54347](https://github.com/ClickHouse/ClickHouse/pull/54347) ([Antonio Andelic](https://github.com/antonio2368)).
* Support NULL as default for nested types Array/Tuple/Map for input formats. Closes [#51100](https://github.com/ClickHouse/ClickHouse/issues/51100). [#54351](https://github.com/ClickHouse/ClickHouse/pull/54351) ([Kruglov Pavel](https://github.com/Avogar)).
* Allow reading some unusual configuration of chunks from Arrow/Parquet formats. [#54370](https://github.com/ClickHouse/ClickHouse/pull/54370) ([Arthur Passos](https://github.com/arthurpassos)).
* Add `STD` alias to `stddevPop` function for MySQL compatibility. Closes [#54274](https://github.com/ClickHouse/ClickHouse/issues/54274). [#54382](https://github.com/ClickHouse/ClickHouse/pull/54382) ([Nikolay Degterinsky](https://github.com/evillique)).
* Add `addDate` function for compatibility with MySQL and `subDate` for consistency. Reference [#54275](https://github.com/ClickHouse/ClickHouse/issues/54275). [#54400](https://github.com/ClickHouse/ClickHouse/pull/54400) ([Nikolay Degterinsky](https://github.com/evillique)).
* Add `modification_time` into `system.detached_parts`. [#54506](https://github.com/ClickHouse/ClickHouse/pull/54506) ([Azat Khuzhin](https://github.com/azat)).
* Added a setting `splitby_max_substrings_includes_remaining_string` which controls if functions "splitBy*()" with argument "max_substring" > 0 include the remaining string (if any) in the result array (Python/Spark semantics) or not. The default behavior does not change. [#54518](https://github.com/ClickHouse/ClickHouse/pull/54518) ([Robert Schulze](https://github.com/rschu1ze)).
* Better integer types inference for `Int64`/`UInt64` fields. Continuation of [#53003](https://github.com/ClickHouse/ClickHouse/pull/53003). Now it works also for nested types like Arrays of Arrays and for functions like `map/tuple`. Issue: [#51236](https://github.com/ClickHouse/ClickHouse/issues/51236). [#54553](https://github.com/ClickHouse/ClickHouse/pull/54553) ([Kruglov Pavel](https://github.com/Avogar)).
* Added array operations for multiplying, dividing and modulo on scalar. Works in each way, for example `5 * [5, 5]` and `[5, 5] * 5` - both cases are possible. [#54608](https://github.com/ClickHouse/ClickHouse/pull/54608) ([Yarik Briukhovetskyi](https://github.com/yariks5s)).
* Add optional `version` argument to `rm` command in `keeper-client` to support safer deletes. [#54708](https://github.com/ClickHouse/ClickHouse/pull/54708) ([János Benjamin Antal](https://github.com/antaljanosbenjamin)).
* Disable killing the server by systemd (that may lead to data loss when using Buffer tables). [#54744](https://github.com/ClickHouse/ClickHouse/pull/54744) ([Azat Khuzhin](https://github.com/azat)).
* Added field `is_deterministic` to system table `system.functions` which indicates whether the result of a function is stable between two invocations (given exactly the same inputs) or not. [#54766](https://github.com/ClickHouse/ClickHouse/pull/54766) [#55035](https://github.com/ClickHouse/ClickHouse/pull/55035) ([Robert Schulze](https://github.com/rschu1ze)).
* Made the views in schema `information_schema` more compatible with the equivalent views in MySQL (i.e. modified and extended them) up to a point where Tableau Online is able to connect to ClickHouse. More specifically: 1. The type of field `information_schema.tables.table_type` changed from Enum8 to String. 2. Added fields `table_comment` and `table_collation` to view `information_schema.table`. 3. Added views `information_schema.key_column_usage` and `referential_constraints`. 4. Replaced uppercase aliases in `information_schema` views with concrete uppercase columns. [#54773](https://github.com/ClickHouse/ClickHouse/pull/54773) ([Serge Klochkov](https://github.com/slvrtrn)).
* The query cache now returns an error if the user tries to cache the result of a query with a non-deterministic function such as `now`, `randomString` and `dictGet`. Compared to the previous behavior (silently don't cache the result), this reduces confusion and surprise for users. [#54801](https://github.com/ClickHouse/ClickHouse/pull/54801) ([Robert Schulze](https://github.com/rschu1ze)).
* Forbid special columns like materialized/ephemeral/alias for `file`/`s3`/`url`/... storages, fix insert into ephemeral columns from files. Closes [#53477](https://github.com/ClickHouse/ClickHouse/issues/53477). [#54803](https://github.com/ClickHouse/ClickHouse/pull/54803) ([Kruglov Pavel](https://github.com/Avogar)).
* More configurable collecting metadata for backup. [#54804](https://github.com/ClickHouse/ClickHouse/pull/54804) ([Vitaly Baranov](https://github.com/vitlibar)).
* `clickhouse-local`'s log file (if enabled with --server_logs_file flag) will now prefix each line with timestamp, thread id, etc, just like `clickhouse-server`. [#54807](https://github.com/ClickHouse/ClickHouse/pull/54807) ([Michael Kolupaev](https://github.com/al13n321)).
* Field `is_obsolete` in the `system.merge_tree_settings` table - it is now 1 for obsolete merge tree settings. Previously, only the description indicated that the setting is obsolete. [#54837](https://github.com/ClickHouse/ClickHouse/pull/54837) ([Robert Schulze](https://github.com/rschu1ze)).
* Make it possible to use plural when using interval literals. `INTERVAL 2 HOURS` should be equivalent to `INTERVAL 2 HOUR`. [#54860](https://github.com/ClickHouse/ClickHouse/pull/54860) ([Jordi Villar](https://github.com/jrdi)).
* Always allow the creation of a projection with `Nullable` PK. This fixes [#54814](https://github.com/ClickHouse/ClickHouse/issues/54814). [#54895](https://github.com/ClickHouse/ClickHouse/pull/54895) ([Amos Bird](https://github.com/amosbird)).
* Retry backup's S3 operations after connection reset failure. [#54900](https://github.com/ClickHouse/ClickHouse/pull/54900) ([Vitaly Baranov](https://github.com/vitlibar)).
* Make the exception message exact in case of the maximum value of a settings is less than the minimum value. [#54925](https://github.com/ClickHouse/ClickHouse/pull/54925) ([János Benjamin Antal](https://github.com/antaljanosbenjamin)).
* `LIKE`, `match`, and other regular expressions matching functions now allow matching with patterns containing non-UTF-8 substrings by falling back to binary matching. Example: you can use `string LIKE '\xFE\xFF%'` to detect BOM. This closes [#54486](https://github.com/ClickHouse/ClickHouse/issues/54486). [#54942](https://github.com/ClickHouse/ClickHouse/pull/54942) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
* Added `ContextLockWaitMicroseconds` profile event. [#55029](https://github.com/ClickHouse/ClickHouse/pull/55029) ([Maksim Kita](https://github.com/kitaisreal)).
* The Keeper dynamically adjusts log levels. [#50372](https://github.com/ClickHouse/ClickHouse/pull/50372) ([helifu](https://github.com/helifu)).
* Added function `timestamp` for compatibility with MySQL. Closes [#54275](https://github.com/ClickHouse/ClickHouse/issues/54275). [#54639](https://github.com/ClickHouse/ClickHouse/pull/54639) ([Nikolay Degterinsky](https://github.com/evillique)).
#### Build/Testing/Packaging Improvement
* Bumped the compiler of official and continuous integration builds of ClickHouse from Clang 16 to 17. [#53831](https://github.com/ClickHouse/ClickHouse/pull/53831) ([Robert Schulze](https://github.com/rschu1ze)).
* Regenerated tld data for lookups (`tldLookup.generated.cpp`). [#54269](https://github.com/ClickHouse/ClickHouse/pull/54269) ([Bharat Nallan](https://github.com/bharatnc)).
* Remove the redundant `clickhouse-keeper-client` symlink. [#54587](https://github.com/ClickHouse/ClickHouse/pull/54587) ([Tomas Barton](https://github.com/deric)).
* Use `/usr/bin/env` to resolve bash - now it supports Nix OS. [#54603](https://github.com/ClickHouse/ClickHouse/pull/54603) ([Fionera](https://github.com/fionera)).
* CMake added `PROFILE_CPU` option needed to perform `perf record` without using a DWARF call graph. [#54917](https://github.com/ClickHouse/ClickHouse/pull/54917) ([Maksim Kita](https://github.com/kitaisreal)).
* If the linker is different than LLD, stop with a fatal error. [#55036](https://github.com/ClickHouse/ClickHouse/pull/55036) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
* Replaced the library to handle (encode/decode) base64 values from Turbo-Base64 to aklomp-base64. Both are SIMD-accelerated on x86 and ARM but 1. the license of the latter (BSD-2) is more favorable for ClickHouse, Turbo64 switched in the meantime to GPL-3, 2. with more GitHub stars, aklomp-base64 seems more future-proof, 3. aklomp-base64 has a slightly nicer API (which is arguably subjective), and 4. aklomp-base64 does not require us to hack around bugs (like non-threadsafe initialization). Note: aklomp-base64 rejects unpadded base64 values whereas Turbo-Base64 decodes them on a best-effort basis. RFC-4648 leaves it open whether padding is mandatory or not, but depending on the context this may be a behavioral change to be aware of. [#54119](https://github.com/ClickHouse/ClickHouse/pull/54119) ([Mikhail Koviazin](https://github.com/mkmkme)).
#### Bug Fix (user-visible misbehavior in an official stable release)
* Fix REPLACE/MOVE PARTITION with zero-copy replication (note: "zero-copy replication" is an experimental feature) [#54193](https://github.com/ClickHouse/ClickHouse/pull/54193) ([Alexander Tokmakov](https://github.com/tavplubix)).
* Fix zero copy locks with hardlinks (note: "zero-copy replication" is an experimental feature) [#54859](https://github.com/ClickHouse/ClickHouse/pull/54859) ([Alexander Tokmakov](https://github.com/tavplubix)).
* Fix zero copy garbage (note: "zero-copy replication" is an experimental feature) [#54550](https://github.com/ClickHouse/ClickHouse/pull/54550) ([Alexander Tokmakov](https://github.com/tavplubix)).
* Pass HTTP retry timeout as milliseconds (it was incorrect before). [#54438](https://github.com/ClickHouse/ClickHouse/pull/54438) ([Duc Canh Le](https://github.com/canhld94)).
* Fix misleading error message in OUTFILE with `CapnProto`/`Protobuf` [#52870](https://github.com/ClickHouse/ClickHouse/pull/52870) ([Kruglov Pavel](https://github.com/Avogar)).
* Fix summary reporting with parallel replicas with LIMIT [#53050](https://github.com/ClickHouse/ClickHouse/pull/53050) ([Raúl Marín](https://github.com/Algunenano)).
* Fix throttling of BACKUPs from/to S3 (in case native copy was not used) and in some other places as well [#53336](https://github.com/ClickHouse/ClickHouse/pull/53336) ([Azat Khuzhin](https://github.com/azat)).
* Fix IO throttling during copying whole directories [#53338](https://github.com/ClickHouse/ClickHouse/pull/53338) ([Azat Khuzhin](https://github.com/azat)).
* Fix: moved to prewhere condition actions can lose column [#53492](https://github.com/ClickHouse/ClickHouse/pull/53492) ([Yakov Olkhovskiy](https://github.com/yakov-olkhovskiy)).
* Fixed internal error when replacing with byte-equal parts [#53735](https://github.com/ClickHouse/ClickHouse/pull/53735) ([Pedro Riera](https://github.com/priera)).
* Fix: require columns participating in interpolate expression [#53754](https://github.com/ClickHouse/ClickHouse/pull/53754) ([Yakov Olkhovskiy](https://github.com/yakov-olkhovskiy)).
* Fix cluster discovery initialization + setting up fail points in config [#54113](https://github.com/ClickHouse/ClickHouse/pull/54113) ([vdimir](https://github.com/vdimir)).
* Fix issues in `accurateCastOrNull` [#54136](https://github.com/ClickHouse/ClickHouse/pull/54136) ([Salvatore Mesoraca](https://github.com/aiven-sal)).
* Fix nullable primary key with the FINAL modifier [#54164](https://github.com/ClickHouse/ClickHouse/pull/54164) ([Amos Bird](https://github.com/amosbird)).
* Fixed error that prevented insertion in replicated materialized view of new data in presence of duplicated data. [#54184](https://github.com/ClickHouse/ClickHouse/pull/54184) ([Pedro Riera](https://github.com/priera)).
* Fix: allow `IPv6` for bloom filter [#54200](https://github.com/ClickHouse/ClickHouse/pull/54200) ([Yakov Olkhovskiy](https://github.com/yakov-olkhovskiy)).
* fix possible type mismatch with `IPv4` [#54212](https://github.com/ClickHouse/ClickHouse/pull/54212) ([Bharat Nallan](https://github.com/bharatnc)).
* Fix `system.data_skipping_indices` for recreated indices [#54225](https://github.com/ClickHouse/ClickHouse/pull/54225) ([Artur Malchanau](https://github.com/Hexta)).
* fix name clash for multiple join rewriter v2 [#54240](https://github.com/ClickHouse/ClickHouse/pull/54240) ([Tao Wang](https://github.com/wangtZJU)).
* Fix unexpected errors in `system.errors` after join [#54306](https://github.com/ClickHouse/ClickHouse/pull/54306) ([vdimir](https://github.com/vdimir)).
* Fix `isZeroOrNull(NULL)` [#54316](https://github.com/ClickHouse/ClickHouse/pull/54316) ([flynn](https://github.com/ucasfl)).
* Fix: parallel replicas over distributed with `prefer_localhost_replica` = 1 [#54334](https://github.com/ClickHouse/ClickHouse/pull/54334) ([Igor Nikonov](https://github.com/devcrafter)).
* Fix logical error in vertical merge + replacing merge tree + optimize cleanup [#54368](https://github.com/ClickHouse/ClickHouse/pull/54368) ([alesapin](https://github.com/alesapin)).
* Fix possible error `URI contains invalid characters` in the `s3` table function [#54373](https://github.com/ClickHouse/ClickHouse/pull/54373) ([Kruglov Pavel](https://github.com/Avogar)).
* Fix segfault in AST optimization of `arrayExists` function [#54379](https://github.com/ClickHouse/ClickHouse/pull/54379) ([Nikolay Degterinsky](https://github.com/evillique)).
* Check for overflow before addition in `analysisOfVariance` function [#54385](https://github.com/ClickHouse/ClickHouse/pull/54385) ([Antonio Andelic](https://github.com/antonio2368)).
* Reproduce and fix the bug in removeSharedRecursive [#54430](https://github.com/ClickHouse/ClickHouse/pull/54430) ([Sema Checherinda](https://github.com/CheSema)).
* Fix possible incorrect result with SimpleAggregateFunction in PREWHERE and FINAL [#54436](https://github.com/ClickHouse/ClickHouse/pull/54436) ([Azat Khuzhin](https://github.com/azat)).
* Fix filtering parts with indexHint for non analyzer [#54449](https://github.com/ClickHouse/ClickHouse/pull/54449) ([Azat Khuzhin](https://github.com/azat)).
* Fix aggregate projections with normalized states [#54480](https://github.com/ClickHouse/ClickHouse/pull/54480) ([Amos Bird](https://github.com/amosbird)).
* `clickhouse-local`: something for multiquery parameter [#54498](https://github.com/ClickHouse/ClickHouse/pull/54498) ([CuiShuoGuo](https://github.com/bakam412)).
* `clickhouse-local` supports `--database` command line argument [#54503](https://github.com/ClickHouse/ClickHouse/pull/54503) ([vdimir](https://github.com/vdimir)).
* Fix possible parsing error in `-WithNames` formats with disabled `input_format_with_names_use_header` [#54513](https://github.com/ClickHouse/ClickHouse/pull/54513) ([Kruglov Pavel](https://github.com/Avogar)).
* Fix rare case of CHECKSUM_DOESNT_MATCH error [#54549](https://github.com/ClickHouse/ClickHouse/pull/54549) ([alesapin](https://github.com/alesapin)).
* Fix sorting of UNION ALL of already sorted results [#54564](https://github.com/ClickHouse/ClickHouse/pull/54564) ([Vitaly Baranov](https://github.com/vitlibar)).
* Fix snapshot install in Keeper [#54572](https://github.com/ClickHouse/ClickHouse/pull/54572) ([Antonio Andelic](https://github.com/antonio2368)).
* Fix race in `ColumnUnique` [#54575](https://github.com/ClickHouse/ClickHouse/pull/54575) ([Nikita Taranov](https://github.com/nickitat)).
* Annoy/Usearch index: Fix LOGICAL_ERROR during build-up with default values [#54600](https://github.com/ClickHouse/ClickHouse/pull/54600) ([Robert Schulze](https://github.com/rschu1ze)).
* Fix serialization of `ColumnDecimal` [#54601](https://github.com/ClickHouse/ClickHouse/pull/54601) ([Nikita Taranov](https://github.com/nickitat)).
* Fix schema inference for *Cluster functions for column names with spaces [#54635](https://github.com/ClickHouse/ClickHouse/pull/54635) ([Kruglov Pavel](https://github.com/Avogar)).
* Fix using structure from insertion tables in case of defaults and explicit insert columns [#54655](https://github.com/ClickHouse/ClickHouse/pull/54655) ([Kruglov Pavel](https://github.com/Avogar)).
* Fix: avoid using regex match, possibly containing alternation, as a key condition. [#54696](https://github.com/ClickHouse/ClickHouse/pull/54696) ([Yakov Olkhovskiy](https://github.com/yakov-olkhovskiy)).
* Fix ReplacingMergeTree with vertical merge and cleanup [#54706](https://github.com/ClickHouse/ClickHouse/pull/54706) ([SmitaRKulkarni](https://github.com/SmitaRKulkarni)).
* Fix virtual columns having incorrect values after ORDER BY [#54811](https://github.com/ClickHouse/ClickHouse/pull/54811) ([Michael Kolupaev](https://github.com/al13n321)).
* Fix filtering parts with indexHint for non analyzer [#54825](https://github.com/ClickHouse/ClickHouse/pull/54825) [#54449](https://github.com/ClickHouse/ClickHouse/pull/54449) ([Azat Khuzhin](https://github.com/azat)).
* Fix Keeper segfault during shutdown [#54841](https://github.com/ClickHouse/ClickHouse/pull/54841) ([Antonio Andelic](https://github.com/antonio2368)).
* Fix `Invalid number of rows in Chunk` in MaterializedPostgreSQL [#54844](https://github.com/ClickHouse/ClickHouse/pull/54844) ([Kseniia Sumarokova](https://github.com/kssenii)).
* Move obsolete format settings to separate section [#54855](https://github.com/ClickHouse/ClickHouse/pull/54855) ([Kruglov Pavel](https://github.com/Avogar)).
* Rebuild `minmax_count_projection` when partition key gets modified [#54943](https://github.com/ClickHouse/ClickHouse/pull/54943) ([Amos Bird](https://github.com/amosbird)).
* Fix bad cast to `ColumnVector<Int128>` in function `if` [#55019](https://github.com/ClickHouse/ClickHouse/pull/55019) ([Kruglov Pavel](https://github.com/Avogar)).
* Prevent attaching parts from tables with different projections or indices [#55062](https://github.com/ClickHouse/ClickHouse/pull/55062) ([János Benjamin Antal](https://github.com/antaljanosbenjamin)).
* Store NULL in scalar result map for empty subquery result [#52240](https://github.com/ClickHouse/ClickHouse/pull/52240) ([vdimir](https://github.com/vdimir)).
* Fix `FINAL` produces invalid read ranges in a rare case [#54934](https://github.com/ClickHouse/ClickHouse/pull/54934) ([Nikita Taranov](https://github.com/nickitat)).
* Fix: insert quorum w/o keeper retries [#55026](https://github.com/ClickHouse/ClickHouse/pull/55026) ([Igor Nikonov](https://github.com/devcrafter)).
* Fix simple state with nullable [#55030](https://github.com/ClickHouse/ClickHouse/pull/55030) ([Pedro Riera](https://github.com/priera)).
### <a id="238"></a> ClickHouse release 23.8 LTS, 2023-08-31
#### Backward Incompatible Change
@ -273,7 +620,7 @@
* Add new setting `disable_url_encoding` that allows to disable decoding/encoding path in uri in URL engine. [#52337](https://github.com/ClickHouse/ClickHouse/pull/52337) ([Kruglov Pavel](https://github.com/Avogar)).
#### Performance Improvement
* Enable automatic selection of the sparse serialization format by default. It improves performance. The format is supported since version 22.1. After this change, downgrading to versions older than 22.1 might not be possible. You can turn off the usage of the sparse serialization format by providing the `ratio_of_defaults_for_sparse_serialization = 1` setting for your MergeTree tables. [#49631](https://github.com/ClickHouse/ClickHouse/pull/49631) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
* Enable automatic selection of the sparse serialization format by default. It improves performance. The format is supported since version 22.1. After this change, downgrading to versions older than 22.1 might not be possible. A downgrade may require to set `ratio_of_defaults_for_sparse_serialization=0.9375` [55153](https://github.com/ClickHouse/ClickHouse/issues/55153). You can turn off the usage of the sparse serialization format by providing the `ratio_of_defaults_for_sparse_serialization = 1` setting for your MergeTree tables. [#49631](https://github.com/ClickHouse/ClickHouse/pull/49631) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
* Enable `move_all_conditions_to_prewhere` and `enable_multiple_prewhere_read_steps` settings by default. [#46365](https://github.com/ClickHouse/ClickHouse/pull/46365) ([Alexander Gololobov](https://github.com/davenger)).
* Improves performance of some queries by tuning allocator. [#46416](https://github.com/ClickHouse/ClickHouse/pull/46416) ([Azat Khuzhin](https://github.com/azat)).
* Now we use fixed-size tasks in `MergeTreePrefetchedReadPool` as in `MergeTreeReadPool`. Also from now we use connection pool for S3 requests. [#49732](https://github.com/ClickHouse/ClickHouse/pull/49732) ([Nikita Taranov](https://github.com/nickitat)).

View File

@ -88,7 +88,6 @@ if (ENABLE_FUZZING)
set (ENABLE_CLICKHOUSE_ODBC_BRIDGE OFF)
set (ENABLE_LIBRARIES 0)
set (ENABLE_SSL 1)
set (ENABLE_EMBEDDED_COMPILER 0)
set (ENABLE_EXAMPLES 0)
set (ENABLE_UTILS 0)
set (ENABLE_THINLTO 0)
@ -165,7 +164,7 @@ if (OS_LINUX)
# and whatever is poisoning it by LD_PRELOAD should not link to our symbols.
# - The clickhouse-odbc-bridge and clickhouse-library-bridge binaries
# should not expose their symbols to ODBC drivers and libraries.
set (CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -Wl,--no-export-dynamic")
set (CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -Wl,--no-export-dynamic -Wl,--gc-sections")
endif ()
if (OS_DARWIN)
@ -188,9 +187,10 @@ if (NOT CMAKE_BUILD_TYPE_UC STREQUAL "RELEASE")
endif ()
endif()
if (CMAKE_BUILD_TYPE_UC STREQUAL "RELEASE"
if (NOT (SANITIZE_COVERAGE OR WITH_COVERAGE)
AND (CMAKE_BUILD_TYPE_UC STREQUAL "RELEASE"
OR CMAKE_BUILD_TYPE_UC STREQUAL "RELWITHDEBINFO"
OR CMAKE_BUILD_TYPE_UC STREQUAL "MINSIZEREL")
OR CMAKE_BUILD_TYPE_UC STREQUAL "MINSIZEREL"))
set (OMIT_HEAVY_DEBUG_SYMBOLS_DEFAULT ON)
else()
set (OMIT_HEAVY_DEBUG_SYMBOLS_DEFAULT OFF)
@ -274,6 +274,11 @@ option (ENABLE_BUILD_PROFILING "Enable profiling of build time" OFF)
if (ENABLE_BUILD_PROFILING)
if (COMPILER_CLANG)
set (COMPILER_FLAGS "${COMPILER_FLAGS} -ftime-trace")
if (LINKER_NAME MATCHES "lld")
set (CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -Wl,--time-trace")
set (CMAKE_MODULE_LINKER_FLAGS "${CMAKE_MODULE_LINKER_FLAGS} -Wl,--time-trace")
endif ()
else ()
message (${RECONFIGURE_MESSAGE_LEVEL} "Build profiling is only available with CLang")
endif ()
@ -287,9 +292,6 @@ set (CMAKE_C_STANDARD 11)
set (CMAKE_C_EXTENSIONS ON) # required by most contribs written in C
set (CMAKE_C_STANDARD_REQUIRED ON)
# Compiler-specific coverage flags e.g. -fcoverage-mapping
option(WITH_COVERAGE "Profile the resulting binary/binaries" OFF)
if (COMPILER_CLANG)
# Enable C++14 sized global deallocation functions. It should be enabled by setting -std=c++14 but I'm not sure.
# See https://reviews.llvm.org/D112921
@ -305,20 +307,23 @@ if (COMPILER_CLANG)
set(BRANCHES_WITHIN_32B_BOUNDARIES "-mbranches-within-32B-boundaries")
set(COMPILER_FLAGS "${COMPILER_FLAGS} ${BRANCHES_WITHIN_32B_BOUNDARIES}")
endif()
if (WITH_COVERAGE)
set(COMPILER_FLAGS "${COMPILER_FLAGS} -fprofile-instr-generate -fcoverage-mapping")
# If we want to disable coverage for specific translation units
set(WITHOUT_COVERAGE "-fno-profile-instr-generate -fno-coverage-mapping")
endif()
endif ()
set (COMPILER_FLAGS "${COMPILER_FLAGS}")
# Our built-in unwinder only supports DWARF version up to 4.
set (DEBUG_INFO_FLAGS "-g -gdwarf-4")
set (DEBUG_INFO_FLAGS "-g")
set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${COMPILER_FLAGS}")
# Disable omit frame pointer compiler optimization using -fno-omit-frame-pointer
option(DISABLE_OMIT_FRAME_POINTER "Disable omit frame pointer compiler optimization" OFF)
if (DISABLE_OMIT_FRAME_POINTER)
set (CMAKE_CXX_FLAGS_ADD "${CMAKE_CXX_FLAGS_ADD} -fno-omit-frame-pointer -mno-omit-leaf-frame-pointer")
set (CMAKE_C_FLAGS_ADD "${CMAKE_C_FLAGS_ADD} -fno-omit-frame-pointer -mno-omit-leaf-frame-pointer")
set (CMAKE_ASM_FLAGS_ADD "${CMAKE_ASM_FLAGS_ADD} -fno-omit-frame-pointer -mno-omit-leaf-frame-pointer")
endif()
set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${COMPILER_FLAGS} ${CMAKE_CXX_FLAGS_ADD}")
set (CMAKE_CXX_FLAGS_RELWITHDEBINFO "${CMAKE_CXX_FLAGS_RELWITHDEBINFO} -O3 ${DEBUG_INFO_FLAGS} ${CMAKE_CXX_FLAGS_ADD}")
set (CMAKE_CXX_FLAGS_DEBUG "${CMAKE_CXX_FLAGS_DEBUG} -O0 ${DEBUG_INFO_FLAGS} ${CMAKE_CXX_FLAGS_ADD}")
@ -546,10 +551,16 @@ if (ENABLE_RUST)
endif()
endif()
if (CMAKE_BUILD_TYPE_UC STREQUAL "RELWITHDEBINFO" AND NOT SANITIZE AND OS_LINUX AND (ARCH_AMD64 OR ARCH_AARCH64))
set(CHECK_LARGE_OBJECT_SIZES_DEFAULT ON)
else ()
set(CHECK_LARGE_OBJECT_SIZES_DEFAULT OFF)
endif ()
option(CHECK_LARGE_OBJECT_SIZES "Check that there are no large object files after build." ${CHECK_LARGE_OBJECT_SIZES_DEFAULT})
add_subdirectory (base)
add_subdirectory (src)
add_subdirectory (programs)
add_subdirectory (tests)
add_subdirectory (utils)
if (FUZZER)

View File

@ -1,6 +1,17 @@
[<img alt="ClickHouse — open source distributed column-oriented DBMS" width="400px" src="https://clickhouse.com/images/ch_gh_logo_rounded.png" />](https://clickhouse.com?utm_source=github)
<div align=center>
ClickHouse® is an open-source column-oriented database management system that allows generating analytical data reports in real-time.
[![Website](https://img.shields.io/website?up_message=AVAILABLE&down_message=DOWN&url=https%3A%2F%2Fclickhouse.com&style=for-the-badge)](https://clickhouse.com)
[![Apache 2.0 License](https://img.shields.io/badge/license-Apache%202.0-blueviolet?style=for-the-badge)](https://www.apache.org/licenses/LICENSE-2.0)
<picture align=center>
<source media="(prefers-color-scheme: dark)" srcset="https://github.com/ClickHouse/clickhouse-docs/assets/9611008/4ef9c104-2d3f-4646-b186-507358d2fe28">
<source media="(prefers-color-scheme: light)" srcset="https://github.com/ClickHouse/clickhouse-docs/assets/9611008/b001dc7b-5a45-4dcd-9275-e03beb7f9177">
<img alt="The ClickHouse company logo." src="https://github.com/ClickHouse/clickhouse-docs/assets/9611008/b001dc7b-5a45-4dcd-9275-e03beb7f9177">
</picture>
<h4>ClickHouse® is an open-source column-oriented database management system that allows generating analytical data reports in real-time.</h4>
</div>
## How To Install (Linux, macOS, FreeBSD)
```
@ -22,17 +33,17 @@ curl https://clickhouse.com/ | sh
## Upcoming Events
* [**v23.9 Community Call**]([https://clickhouse.com/company/events/v23-8-community-release-call](https://clickhouse.com/company/events/v23-9-community-release-call)?utm_source=github&utm_medium=social&utm_campaign=release-webinar-2023-08) - Sep 28 - 23.9 is rapidly approaching. Original creator, co-founder, and CTO of ClickHouse Alexey Milovidov will walk us through the highlights of the release.
* [**ClickHouse Meetup in Amsterdam**](https://www.meetup.com/clickhouse-netherlands-user-group/events/296334590/)) - Oct 31
* [**ClickHouse Meetup in Beijing**](https://www.meetup.com/clickhouse-beijing-user-group/events/296334856/) - Nov 4
* [**ClickHouse Meetup in San Francisco**](https://www.meetup.com/clickhouse-silicon-valley-meetup-group/events/296334923/) - Nov 14
* [**ClickHouse Meetup in Singapore**](https://www.meetup.com/clickhouse-singapore-meetup-group/events/296334976/) - Nov 15
* [**ClickHouse Meetup in Berlin**](https://www.meetup.com/clickhouse-berlin-user-group/events/296488501/) - Nov 30
* [**ClickHouse Meetup in NYC**](https://www.meetup.com/clickhouse-new-york-user-group/events/296488779/) - Dec 11
* [**ClickHouse Meetup in Boston**](https://www.meetup.com/clickhouse-boston-user-group/events/296488840/) - Dec 12
Also, keep an eye out for upcoming meetups around the world. Somewhere else you want us to be? Please feel free to reach out to tyler <at> clickhouse <dot> com.
## Recent Recordings
* **Recent Meetup Videos**: [Meetup Playlist](https://www.youtube.com/playlist?list=PL0Z2YDlm0b3iNDUzpY1S3L_iV4nARda_U) Whenever possible recordings of the ClickHouse Community Meetups are edited and presented as individual talks. Current featuring "Modern SQL in 2023", "Fast, Concurrent, and Consistent Asynchronous INSERTS in ClickHouse", and "Full-Text Indices: Design and Experiments"
* **Recording available**: [**v23.6 Release Webinar**](https://www.youtube.com/watch?v=cuf_hYn7dqU) All the features of 23.6, one convenient video! Watch it now!
* **Recording available**: [**v23.10 Release Webinar**](https://www.youtube.com/watch?v=PGQS6uPb970) All the features of 23.10, one convenient video! Watch it now!
* **All release webinar recordings**: [YouTube playlist](https://www.youtube.com/playlist?list=PL0Z2YDlm0b3jAlSy1JxyP8zluvXaN3nxU)
@ -42,4 +53,4 @@ We are a globally diverse and distributed team, united behind a common goal of c
Check out our **current openings** here: https://clickhouse.com/company/careers
Cant find what you are looking for, but want to let us know you are interested in joining ClickHouse? Email careers@clickhouse.com!
Can't find what you are looking for, but want to let us know you are interested in joining ClickHouse? Email careers@clickhouse.com!

View File

@ -13,9 +13,11 @@ The following versions of ClickHouse server are currently being supported with s
| Version | Supported |
|:-|:-|
| 23.10 | ✔️ |
| 23.9 | ✔️ |
| 23.8 | ✔️ |
| 23.7 | ✔️ |
| 23.6 | ✔️ |
| 23.7 | |
| 23.6 | |
| 23.5 | ❌ |
| 23.4 | ❌ |
| 23.3 | ✔️ |

View File

@ -1,3 +1,5 @@
add_compile_options($<$<OR:$<COMPILE_LANGUAGE:C>,$<COMPILE_LANGUAGE:CXX>>:${COVERAGE_FLAGS}>)
if (USE_CLANG_TIDY)
set (CMAKE_CXX_CLANG_TIDY "${CLANG_TIDY_PATH}")
endif ()
@ -45,6 +47,10 @@ else ()
target_compile_definitions(common PUBLIC WITH_COVERAGE=0)
endif ()
if (TARGET ch_contrib::crc32_s390x)
target_link_libraries(common PUBLIC ch_contrib::crc32_s390x)
endif()
target_include_directories(common PUBLIC .. "${CMAKE_CURRENT_BINARY_DIR}/..")
target_link_libraries (common

View File

@ -35,6 +35,10 @@
#pragma clang diagnostic ignored "-Wreserved-identifier"
#endif
#if defined(__s390x__)
#include <base/crc32c_s390x.h>
#define CRC_INT s390x_crc32c
#endif
/**
* The std::string_view-like container to avoid creating strings to find substrings in the hash table.
@ -264,8 +268,8 @@ inline size_t hashLessThan8(const char * data, size_t size)
if (size >= 4)
{
UInt64 a = unalignedLoad<uint32_t>(data);
return hashLen16(size + (a << 3), unalignedLoad<uint32_t>(data + size - 4));
UInt64 a = unalignedLoadLittleEndian<uint32_t>(data);
return hashLen16(size + (a << 3), unalignedLoadLittleEndian<uint32_t>(data + size - 4));
}
if (size > 0)
@ -285,8 +289,8 @@ inline size_t hashLessThan16(const char * data, size_t size)
{
if (size > 8)
{
UInt64 a = unalignedLoad<UInt64>(data);
UInt64 b = unalignedLoad<UInt64>(data + size - 8);
UInt64 a = unalignedLoadLittleEndian<UInt64>(data);
UInt64 b = unalignedLoadLittleEndian<UInt64>(data + size - 8);
return hashLen16(a, rotateByAtLeast1(b + size, static_cast<UInt8>(size))) ^ b;
}
@ -315,13 +319,13 @@ struct CRC32Hash
do
{
UInt64 word = unalignedLoad<UInt64>(pos);
UInt64 word = unalignedLoadLittleEndian<UInt64>(pos);
res = static_cast<unsigned>(CRC_INT(res, word));
pos += 8;
} while (pos + 8 < end);
UInt64 word = unalignedLoad<UInt64>(end - 8); /// I'm not sure if this is normal.
UInt64 word = unalignedLoadLittleEndian<UInt64>(end - 8); /// I'm not sure if this is normal.
res = static_cast<unsigned>(CRC_INT(res, word));
return res;

View File

@ -1,11 +1,15 @@
#include "coverage.h"
#if WITH_COVERAGE
#pragma GCC diagnostic ignored "-Wreserved-identifier"
# include <mutex>
# include <unistd.h>
/// WITH_COVERAGE enables the default implementation of code coverage,
/// that dumps a map to the filesystem.
#if WITH_COVERAGE
#include <mutex>
#include <unistd.h>
# if defined(__clang__)
@ -31,3 +35,131 @@ void dumpCoverageReportIfPossible()
#endif
}
/// SANITIZE_COVERAGE enables code instrumentation,
/// but leaves the callbacks implementation to us,
/// which we use to calculate coverage on a per-test basis
/// and to write it to system tables.
#if defined(SANITIZE_COVERAGE)
namespace
{
bool pc_guards_initialized = false;
bool pc_table_initialized = false;
uint32_t * guards_start = nullptr;
uint32_t * guards_end = nullptr;
uintptr_t * coverage_array = nullptr;
size_t coverage_array_size = 0;
uintptr_t * all_addresses_array = nullptr;
size_t all_addresses_array_size = 0;
}
extern "C"
{
/// This is called at least once for every DSO for initialization.
/// But we will use it only for the main DSO.
void __sanitizer_cov_trace_pc_guard_init(uint32_t * start, uint32_t * stop)
{
if (pc_guards_initialized)
return;
pc_guards_initialized = true;
/// The function can be called multiple times, but we need to initialize only once.
if (start == stop || *start)
return;
guards_start = start;
guards_end = stop;
coverage_array_size = stop - start;
/// Note: we will leak this.
coverage_array = static_cast<uintptr_t*>(malloc(sizeof(uintptr_t) * coverage_array_size));
resetCoverage();
}
/// This is called at least once for every DSO for initialization
/// and provides information about all instrumented addresses.
void __sanitizer_cov_pcs_init(const uintptr_t * pcs_begin, const uintptr_t * pcs_end)
{
if (pc_table_initialized)
return;
pc_table_initialized = true;
all_addresses_array = static_cast<uintptr_t*>(malloc(sizeof(uintptr_t) * coverage_array_size));
all_addresses_array_size = pcs_end - pcs_begin;
/// They are not a real pointers, but also contain a flag in the most significant bit,
/// in which we are not interested for now. Reset it.
for (size_t i = 0; i < all_addresses_array_size; ++i)
all_addresses_array[i] = pcs_begin[i] & 0x7FFFFFFFFFFFFFFFULL;
}
/// This is called at every basic block / edge, etc.
void __sanitizer_cov_trace_pc_guard(uint32_t * guard)
{
/// Duplicate the guard check.
if (!*guard)
return;
*guard = 0;
/// If you set *guard to 0 this code will not be called again for this edge.
/// Now we can get the PC and do whatever you want:
/// - store it somewhere or symbolize it and print right away.
/// The values of `*guard` are as you set them in
/// __sanitizer_cov_trace_pc_guard_init and so you can make them consecutive
/// and use them to dereference an array or a bit vector.
void * pc = __builtin_return_address(0);
coverage_array[guard - guards_start] = reinterpret_cast<uintptr_t>(pc);
}
}
__attribute__((no_sanitize("coverage")))
std::span<const uintptr_t> getCoverage()
{
return {coverage_array, coverage_array_size};
}
__attribute__((no_sanitize("coverage")))
std::span<const uintptr_t> getAllInstrumentedAddresses()
{
return {all_addresses_array, all_addresses_array_size};
}
__attribute__((no_sanitize("coverage")))
void resetCoverage()
{
memset(coverage_array, 0, coverage_array_size * sizeof(*coverage_array));
/// The guard defines whether the __sanitizer_cov_trace_pc_guard should be called.
/// For example, you can unset it after first invocation to prevent excessive work.
/// Initially set all the guards to 1 to enable callbacks.
for (uint32_t * x = guards_start; x < guards_end; ++x)
*x = 1;
}
#else
std::span<const uintptr_t> getCoverage()
{
return {};
}
std::span<const uintptr_t> getAllInstrumentedAddresses()
{
return {};
}
void resetCoverage()
{
}
#endif

View File

@ -1,5 +1,8 @@
#pragma once
#include <span>
#include <cstdint>
/// Flush coverage report to file, depending on coverage system
/// proposed by compiler (llvm for clang and gcov for gcc).
///
@ -7,3 +10,16 @@
/// Thread safe (use exclusive lock).
/// Idempotent, may be called multiple times.
void dumpCoverageReportIfPossible();
/// This is effective if SANITIZE_COVERAGE is enabled at build time.
/// Get accumulated unique program addresses of the instrumented parts of the code,
/// seen so far after program startup or after previous reset.
/// The returned span will be represented as a sparse map, containing mostly zeros, which you should filter away.
std::span<const uintptr_t> getCoverage();
/// Get all instrumented addresses that could be in the coverage.
std::span<const uintptr_t> getAllInstrumentedAddresses();
/// Reset the accumulated coverage.
/// This is useful to compare coverage of different tests, including differential coverage.
void resetCoverage();

26
base/base/crc32c_s390x.h Normal file
View File

@ -0,0 +1,26 @@
#pragma once
#include <crc32-s390x.h>
inline uint32_t s390x_crc32c_u8(uint32_t crc, uint8_t v)
{
return crc32c_le_vx(crc, reinterpret_cast<unsigned char *>(&v), sizeof(v));
}
inline uint32_t s390x_crc32c_u16(uint32_t crc, uint16_t v)
{
v = __builtin_bswap16(v);
return crc32c_le_vx(crc, reinterpret_cast<unsigned char *>(&v), sizeof(v));
}
inline uint32_t s390x_crc32c_u32(uint32_t crc, uint32_t v)
{
v = __builtin_bswap32(v);
return crc32c_le_vx(crc, reinterpret_cast<unsigned char *>(&v), sizeof(v));
}
inline uint64_t s390x_crc32c(uint64_t crc, uint64_t v)
{
v = __builtin_bswap64(v);
return crc32c_le_vx(static_cast<uint32_t>(crc), reinterpret_cast<unsigned char *>(&v), sizeof(uint64_t));
}

View File

@ -119,17 +119,16 @@
#include <base/types.h>
namespace DB
{
void abortOnFailedAssertion(const String & description);
[[noreturn]] void abortOnFailedAssertion(const String & description);
}
#define chassert(x) static_cast<bool>(x) ? void(0) : ::DB::abortOnFailedAssertion(#x)
#define chassert(x) do { static_cast<bool>(x) ? void(0) : ::DB::abortOnFailedAssertion(#x); } while (0)
#define UNREACHABLE() abort()
// clang-format off
#else
/// Here sizeof() trick is used to suppress unused warning for result,
/// since simple "(void)x" will evaluate the expression, while
/// "sizeof(!(x))" will not.
#define NIL_EXPRESSION(x) (void)sizeof(!(x))
#define chassert(x) NIL_EXPRESSION(x)
#define chassert(x) (void)sizeof(!(x))
#define UNREACHABLE() __builtin_unreachable()
#endif
#endif
@ -150,6 +149,7 @@
# define TSA_ACQUIRE_SHARED(...) __attribute__((acquire_shared_capability(__VA_ARGS__))) /// function acquires a shared capability, but does not release it
# define TSA_TRY_ACQUIRE_SHARED(...) __attribute__((try_acquire_shared_capability(__VA_ARGS__))) /// function tries to acquire a shared capability and returns a boolean value indicating success or failure
# define TSA_RELEASE_SHARED(...) __attribute__((release_shared_capability(__VA_ARGS__))) /// function releases the given shared capability
# define TSA_SCOPED_LOCKABLE __attribute__((scoped_lockable)) /// object of a class has scoped lockable capability
/// Macros for suppressing TSA warnings for specific reads/writes (instead of suppressing it for the whole function)
/// They use a lambda function to apply function attribute to a single statement. This enable us to suppress warnings locally instead of
@ -177,6 +177,7 @@
# define TSA_ACQUIRE_SHARED(...)
# define TSA_TRY_ACQUIRE_SHARED(...)
# define TSA_RELEASE_SHARED(...)
# define TSA_SCOPED_LOCKABLE
# define TSA_SUPPRESS_WARNING_FOR_READ(x) (x)
# define TSA_SUPPRESS_WARNING_FOR_WRITE(x) (x)

View File

@ -289,3 +289,13 @@ inline void writeBinByte(UInt8 byte, void * out)
{
memcpy(out, &impl::bin_byte_to_char_table[static_cast<size_t>(byte) * 8], 8);
}
/// Converts byte array to a hex string. Useful for debug logging.
inline std::string hexString(const void * data, size_t size)
{
const char * p = reinterpret_cast<const char *>(data);
std::string s(size * 2, '\0');
for (size_t i = 0; i < size; ++i)
writeHexByteLowercase(p[i], s.data() + i * 2);
return s;
}

View File

@ -131,3 +131,29 @@ void sort(RandomIt first, RandomIt last)
using comparator = std::less<value_type>;
::sort(first, last, comparator());
}
/** Try to fast sort elements for common sorting patterns:
* 1. If elements are already sorted.
* 2. If elements are already almost sorted.
* 3. If elements are already sorted in reverse order.
*
* Returns true if fast sort was performed or elements were already sorted, false otherwise.
*/
template <typename RandomIt, typename Compare>
bool trySort(RandomIt first, RandomIt last, Compare compare)
{
#ifndef NDEBUG
::shuffle(first, last);
#endif
ComparatorWrapper<Compare> compare_wrapper = compare;
return ::pdqsort_try_sort(first, last, compare_wrapper);
}
template <typename RandomIt>
bool trySort(RandomIt first, RandomIt last)
{
using value_type = typename std::iterator_traits<RandomIt>::value_type;
using comparator = std::less<value_type>;
return ::trySort(first, last, comparator());
}

View File

@ -65,7 +65,7 @@ class IsTupleLike
static void check(...);
public:
static constexpr const bool value = !std::is_void<decltype(check<T>(nullptr))>::value;
static constexpr const bool value = !std::is_void_v<decltype(check<T>(nullptr))>;
};
}
@ -79,7 +79,7 @@ class numeric_limits<wide::integer<Bits, Signed>>
{
public:
static constexpr bool is_specialized = true;
static constexpr bool is_signed = is_same<Signed, signed>::value;
static constexpr bool is_signed = is_same_v<Signed, signed>;
static constexpr bool is_integer = true;
static constexpr bool is_exact = true;
static constexpr bool has_infinity = false;
@ -91,7 +91,7 @@ public:
static constexpr bool is_iec559 = false;
static constexpr bool is_bounded = true;
static constexpr bool is_modulo = true;
static constexpr int digits = Bits - (is_same<Signed, signed>::value ? 1 : 0);
static constexpr int digits = Bits - (is_same_v<Signed, signed> ? 1 : 0);
static constexpr int digits10 = digits * 0.30103 /*std::log10(2)*/;
static constexpr int max_digits10 = 0;
static constexpr int radix = 2;
@ -104,7 +104,7 @@ public:
static constexpr wide::integer<Bits, Signed> min() noexcept
{
if (is_same<Signed, signed>::value)
if constexpr (is_same_v<Signed, signed>)
{
using T = wide::integer<Bits, signed>;
T res{};
@ -118,7 +118,7 @@ public:
{
using T = wide::integer<Bits, Signed>;
T res{};
res.items[T::_impl::big(0)] = is_same<Signed, signed>::value
res.items[T::_impl::big(0)] = is_same_v<Signed, signed>
? std::numeric_limits<typename wide::integer<Bits, Signed>::signed_base_type>::max()
: std::numeric_limits<typename wide::integer<Bits, Signed>::base_type>::max();
for (unsigned i = 1; i < wide::integer<Bits, Signed>::_impl::item_count; ++i)

View File

@ -5,9 +5,6 @@ if (GLIBC_COMPATIBILITY)
endif()
enable_language(ASM)
include(CheckIncludeFile)
check_include_file("sys/random.h" HAVE_SYS_RANDOM_H)
add_headers_and_sources(glibc_compatibility .)
add_headers_and_sources(glibc_compatibility musl)
@ -21,11 +18,6 @@ if (GLIBC_COMPATIBILITY)
message (FATAL_ERROR "glibc_compatibility can only be used on x86_64 or aarch64.")
endif ()
list(REMOVE_ITEM glibc_compatibility_sources musl/getentropy.c)
if(HAVE_SYS_RANDOM_H)
list(APPEND glibc_compatibility_sources musl/getentropy.c)
endif()
# Need to omit frame pointers to match the performance of glibc
set (CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -fomit-frame-pointer")

View File

@ -1,5 +1,6 @@
#include "memcpy.h"
__attribute__((no_sanitize("coverage")))
extern "C" void * memcpy(void * __restrict dst, const void * __restrict src, size_t size)
{
return inline_memcpy(dst, src, size);

View File

@ -93,7 +93,7 @@
* See https://habr.com/en/company/yandex/blog/457612/
*/
__attribute__((no_sanitize("coverage")))
static inline void * inline_memcpy(void * __restrict dst_, const void * __restrict src_, size_t size)
{
/// We will use pointer arithmetic, so char pointer will be used.

View File

@ -70,7 +70,7 @@ namespace MongoDB
Document::Ptr queryBuildInfo(Connection & connection) const;
/// Queries server build info (all wire protocols)
Document::Ptr queryServerHello(Connection & connection) const;
Document::Ptr queryServerHello(Connection & connection, bool old = false) const;
/// Queries hello response from server (all wire protocols)
Int64 count(Connection & connection, const std::string & collectionName) const;

View File

@ -356,11 +356,19 @@ Document::Ptr Database::queryBuildInfo(Connection& connection) const
}
Document::Ptr Database::queryServerHello(Connection& connection) const
Document::Ptr Database::queryServerHello(Connection& connection, bool old) const
{
// hello can be issued on "config" system database
Poco::SharedPtr<Poco::MongoDB::QueryRequest> request = createCommand();
request->selector().add("hello", 1);
// 'hello' command was previously called 'isMaster'
std::string command_name;
if (old)
command_name = "isMaster";
else
command_name = "hello";
request->selector().add(command_name, 1);
Poco::MongoDB::ResponseMessage response;
connection.sendRequest(*request, response);

View File

@ -68,7 +68,7 @@ namespace Net
struct ProxyConfig
/// HTTP proxy server configuration.
{
ProxyConfig() : port(HTTP_PORT), protocol("http"), tunnel(true) { }
ProxyConfig() : port(HTTP_PORT), protocol("http"), tunnel(true), originalRequestProtocol("http") { }
std::string host;
/// Proxy server host name or IP address.
@ -87,6 +87,9 @@ namespace Net
/// A regular expression defining hosts for which the proxy should be bypassed,
/// e.g. "localhost|127\.0\.0\.1|192\.168\.0\.\d+". Can also be an empty
/// string to disable proxy bypassing.
std::string originalRequestProtocol;
/// Original request protocol (http or https).
/// Required in the case of: HTTPS request over HTTP proxy with tunneling (CONNECT) off.
};
HTTPClientSession();

View File

@ -418,7 +418,7 @@ void HTTPClientSession::reconnect()
std::string HTTPClientSession::proxyRequestPrefix() const
{
std::string result("http://");
std::string result(_proxyConfig.originalRequestProtocol + "://");
result.append(_host);
/// Do not append default by default, since this may break some servers.
/// One example of such server is GCS (Google Cloud Storage).

View File

@ -1,19 +0,0 @@
# Adding test output on failure
enable_testing ()
if (NOT TARGET check)
if (CMAKE_CONFIGURATION_TYPES)
add_custom_target (check COMMAND ${CMAKE_CTEST_COMMAND}
--force-new-ctest-process --output-on-failure --build-config "$<CONFIGURATION>"
WORKING_DIRECTORY ${PROJECT_BINARY_DIR})
else ()
add_custom_target (check COMMAND ${CMAKE_CTEST_COMMAND}
--force-new-ctest-process --output-on-failure
WORKING_DIRECTORY ${PROJECT_BINARY_DIR})
endif ()
endif ()
macro (add_check target)
add_test (NAME test_${target} COMMAND ${target} WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR})
add_dependencies (check ${target})
endmacro (add_check)

View File

@ -2,11 +2,11 @@
# NOTE: has nothing common with DBMS_TCP_PROTOCOL_VERSION,
# only DBMS_TCP_PROTOCOL_VERSION should be incremented on protocol changes.
SET(VERSION_REVISION 54478)
SET(VERSION_REVISION 54480)
SET(VERSION_MAJOR 23)
SET(VERSION_MINOR 9)
SET(VERSION_MINOR 11)
SET(VERSION_PATCH 1)
SET(VERSION_GITHASH ebc7d9a9f3b40be89e0b3e738b35d394aabeea3e)
SET(VERSION_DESCRIBE v23.9.1.1-testing)
SET(VERSION_STRING 23.9.1.1)
SET(VERSION_GITHASH 13adae0e42fd48de600486fc5d4b64d39f80c43e)
SET(VERSION_DESCRIBE v23.11.1.1-testing)
SET(VERSION_STRING 23.11.1.1)
# end of autochange

View File

@ -12,7 +12,7 @@ endif()
set(COMPILER_CACHE "auto" CACHE STRING "Speedup re-compilations using the caching tools; valid options are 'auto' (ccache, then sccache), 'ccache', 'sccache', or 'disabled'")
if(COMPILER_CACHE STREQUAL "auto")
find_program (CCACHE_EXECUTABLE ccache sccache)
find_program (CCACHE_EXECUTABLE NAMES ccache sccache)
elseif (COMPILER_CACHE STREQUAL "ccache")
find_program (CCACHE_EXECUTABLE ccache)
elseif(COMPILER_CACHE STREQUAL "sccache")

View File

@ -1,10 +1,5 @@
# https://software.intel.com/sites/landingpage/IntrinsicsGuide/
include (CheckCXXSourceCompiles)
include (CMakePushCheckState)
cmake_push_check_state ()
# The variables HAVE_* determine if compiler has support for the flag to use the corresponding instruction set.
# The options ENABLE_* determine if we will tell compiler to actually use the corresponding instruction set if compiler can do it.
@ -137,178 +132,53 @@ elseif (ARCH_AMD64)
endif()
# ClickHouse can be cross-compiled (e.g. on an ARM host for x86) but it is also possible to build ClickHouse on x86 w/o AVX for x86 w/
# AVX. We only check that the compiler can emit certain SIMD instructions, we don't care if the host system is able to run the binary.
# Therefore, use check_cxx_source_compiles (= does the code compile+link?) instead of check_cxx_source_runs (= does the code
# compile+link+run).
# AVX. We only assume that the compiler can emit certain SIMD instructions, we don't care if the host system is able to run the binary.
SET (HAVE_SSSE3 1)
SET (HAVE_SSE41 1)
SET (HAVE_SSE42 1)
SET (HAVE_PCLMULQDQ 1)
SET (HAVE_POPCNT 1)
SET (HAVE_AVX 1)
SET (HAVE_AVX2 1)
SET (HAVE_AVX512 1)
SET (HAVE_AVX512_VBMI 1)
SET (HAVE_BMI 1)
SET (HAVE_BMI2 1)
set (TEST_FLAG "-mssse3")
set (CMAKE_REQUIRED_FLAGS "${TEST_FLAG} -O0")
check_cxx_source_compiles("
#include <tmmintrin.h>
int main() {
__m64 a = _mm_abs_pi8(__m64());
(void)a;
return 0;
}
" HAVE_SSSE3)
if (HAVE_SSSE3 AND ENABLE_SSSE3)
set (COMPILER_FLAGS "${COMPILER_FLAGS} ${TEST_FLAG}")
set (COMPILER_FLAGS "${COMPILER_FLAGS} -mssse3")
endif ()
set (TEST_FLAG "-msse4.1")
set (CMAKE_REQUIRED_FLAGS "${TEST_FLAG} -O0")
check_cxx_source_compiles("
#include <smmintrin.h>
int main() {
auto a = _mm_insert_epi8(__m128i(), 0, 0);
(void)a;
return 0;
}
" HAVE_SSE41)
if (HAVE_SSE41 AND ENABLE_SSE41)
set (COMPILER_FLAGS "${COMPILER_FLAGS} ${TEST_FLAG}")
set (COMPILER_FLAGS "${COMPILER_FLAGS} -msse4.1")
endif ()
set (TEST_FLAG "-msse4.2")
set (CMAKE_REQUIRED_FLAGS "${TEST_FLAG} -O0")
check_cxx_source_compiles("
#include <nmmintrin.h>
int main() {
auto a = _mm_crc32_u64(0, 0);
(void)a;
return 0;
}
" HAVE_SSE42)
if (HAVE_SSE42 AND ENABLE_SSE42)
set (COMPILER_FLAGS "${COMPILER_FLAGS} ${TEST_FLAG}")
set (COMPILER_FLAGS "${COMPILER_FLAGS} -msse4.2")
endif ()
set (TEST_FLAG "-mpclmul")
set (CMAKE_REQUIRED_FLAGS "${TEST_FLAG} -O0")
check_cxx_source_compiles("
#include <wmmintrin.h>
int main() {
auto a = _mm_clmulepi64_si128(__m128i(), __m128i(), 0);
(void)a;
return 0;
}
" HAVE_PCLMULQDQ)
if (HAVE_PCLMULQDQ AND ENABLE_PCLMULQDQ)
set (COMPILER_FLAGS "${COMPILER_FLAGS} ${TEST_FLAG}")
set (COMPILER_FLAGS "${COMPILER_FLAGS} -mpclmul")
endif ()
set (TEST_FLAG "-mpopcnt")
set (CMAKE_REQUIRED_FLAGS "${TEST_FLAG} -O0")
check_cxx_source_compiles("
int main() {
auto a = __builtin_popcountll(0);
(void)a;
return 0;
}
" HAVE_POPCNT)
if (HAVE_POPCNT AND ENABLE_POPCNT)
set (COMPILER_FLAGS "${COMPILER_FLAGS} ${TEST_FLAG}")
set (COMPILER_FLAGS "${COMPILER_FLAGS} -mpopcnt")
endif ()
set (TEST_FLAG "-mavx")
set (CMAKE_REQUIRED_FLAGS "${TEST_FLAG} -O0")
check_cxx_source_compiles("
#include <immintrin.h>
int main() {
auto a = _mm256_insert_epi8(__m256i(), 0, 0);
(void)a;
return 0;
}
" HAVE_AVX)
if (HAVE_AVX AND ENABLE_AVX)
set (COMPILER_FLAGS "${COMPILER_FLAGS} ${TEST_FLAG}")
set (COMPILER_FLAGS "${COMPILER_FLAGS} -mavx")
endif ()
set (TEST_FLAG "-mavx2")
set (CMAKE_REQUIRED_FLAGS "${TEST_FLAG} -O0")
check_cxx_source_compiles("
#include <immintrin.h>
int main() {
auto a = _mm256_add_epi16(__m256i(), __m256i());
(void)a;
return 0;
}
" HAVE_AVX2)
if (HAVE_AVX2 AND ENABLE_AVX2)
set (COMPILER_FLAGS "${COMPILER_FLAGS} ${TEST_FLAG}")
set (COMPILER_FLAGS "${COMPILER_FLAGS} -mavx2")
endif ()
set (TEST_FLAG "-mavx512f -mavx512bw -mavx512vl")
set (CMAKE_REQUIRED_FLAGS "${TEST_FLAG} -O0")
check_cxx_source_compiles("
#include <immintrin.h>
int main() {
auto a = _mm512_setzero_epi32();
(void)a;
auto b = _mm512_add_epi16(__m512i(), __m512i());
(void)b;
auto c = _mm_cmp_epi8_mask(__m128i(), __m128i(), 0);
(void)c;
return 0;
}
" HAVE_AVX512)
if (HAVE_AVX512 AND ENABLE_AVX512)
set (COMPILER_FLAGS "${COMPILER_FLAGS} ${TEST_FLAG}")
set (COMPILER_FLAGS "${COMPILER_FLAGS} -mavx512f -mavx512bw -mavx512vl")
endif ()
set (TEST_FLAG "-mavx512vbmi")
set (CMAKE_REQUIRED_FLAGS "${TEST_FLAG} -O0")
check_cxx_source_compiles("
#include <immintrin.h>
int main() {
auto a = _mm512_permutexvar_epi8(__m512i(), __m512i());
(void)a;
return 0;
}
" HAVE_AVX512_VBMI)
if (HAVE_AVX512 AND ENABLE_AVX512 AND HAVE_AVX512_VBMI AND ENABLE_AVX512_VBMI)
set (COMPILER_FLAGS "${COMPILER_FLAGS} ${TEST_FLAG}")
set (COMPILER_FLAGS "${COMPILER_FLAGS} -mavx512vbmi")
endif ()
set (TEST_FLAG "-mbmi")
set (CMAKE_REQUIRED_FLAGS "${TEST_FLAG} -O0")
check_cxx_source_compiles("
#include <immintrin.h>
int main() {
auto a = _blsr_u32(0);
(void)a;
return 0;
}
" HAVE_BMI)
if (HAVE_BMI AND ENABLE_BMI)
set (COMPILER_FLAGS "${COMPILER_FLAGS} ${TEST_FLAG}")
set (COMPILER_FLAGS "${COMPILER_FLAGS} -mbmi")
endif ()
set (TEST_FLAG "-mbmi2")
set (CMAKE_REQUIRED_FLAGS "${TEST_FLAG} -O0")
check_cxx_source_compiles("
#include <immintrin.h>
int main() {
auto a = _pdep_u64(0, 0);
(void)a;
return 0;
}
" HAVE_BMI2)
if (HAVE_BMI2 AND HAVE_AVX2 AND ENABLE_AVX2 AND ENABLE_BMI2)
set (COMPILER_FLAGS "${COMPILER_FLAGS} ${TEST_FLAG}")
set (COMPILER_FLAGS "${COMPILER_FLAGS} -mbmi2")
endif ()
# Limit avx2/avx512 flag for specific source build
set (X86_INTRINSICS_FLAGS "")
if (ENABLE_AVX2_FOR_SPEC_OP)
if (HAVE_BMI)
set (X86_INTRINSICS_FLAGS "${X86_INTRINSICS_FLAGS} -mbmi")
endif ()
if (HAVE_AVX AND HAVE_AVX2)
set (X86_INTRINSICS_FLAGS "${X86_INTRINSICS_FLAGS} -mavx -mavx2")
endif ()
endif ()
if (ENABLE_AVX512_FOR_SPEC_OP)
set (X86_INTRINSICS_FLAGS "")
if (HAVE_BMI)
@ -321,5 +191,3 @@ elseif (ARCH_AMD64)
else ()
# RISC-V + exotic platforms
endif ()
cmake_pop_check_state ()

View File

@ -54,10 +54,31 @@ if (SANITIZE)
set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${SAN_FLAGS} ${UBSAN_FLAGS}")
set (CMAKE_C_FLAGS "${CMAKE_C_FLAGS} ${SAN_FLAGS} ${UBSAN_FLAGS}")
# llvm-tblgen, that is used during LLVM build, doesn't work with UBSan.
set (ENABLE_EMBEDDED_COMPILER 0 CACHE BOOL "")
else ()
message (FATAL_ERROR "Unknown sanitizer type: ${SANITIZE}")
endif ()
endif()
# Default coverage instrumentation (dumping the coverage map on exit)
option(WITH_COVERAGE "Instrumentation for code coverage with default implementation" OFF)
if (WITH_COVERAGE)
message (INFORMATION "Enabled instrumentation for code coverage")
set(COVERAGE_FLAGS "-fprofile-instr-generate -fcoverage-mapping")
endif()
option (SANITIZE_COVERAGE "Instrumentation for code coverage with custom callbacks" OFF)
if (SANITIZE_COVERAGE)
message (INFORMATION "Enabled instrumentation for code coverage")
# We set this define for whole build to indicate that at least some parts are compiled with coverage.
# And to expose it in system.build_options.
set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -DSANITIZE_COVERAGE=1")
set (CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -DSANITIZE_COVERAGE=1")
# But the actual coverage will be enabled on per-library basis: for ClickHouse code, but not for 3rd-party.
set (COVERAGE_FLAGS "-fsanitize-coverage=trace-pc-guard,pc-table")
endif()
set (WITHOUT_COVERAGE_FLAGS "-fno-profile-instr-generate -fno-coverage-mapping -fno-sanitize-coverage=trace-pc-guard,pc-table")

View File

@ -1,3 +1,5 @@
# Generates a separate file with debug symbols while stripping it from the main binary.
# This is needed for Debian packages.
macro(clickhouse_split_debug_symbols)
set(oneValueArgs TARGET DESTINATION_DIR BINARY_PATH)

View File

@ -68,6 +68,7 @@ if (CMAKE_CROSSCOMPILING)
set (ENABLE_ORC OFF CACHE INTERNAL "")
set (ENABLE_GRPC OFF CACHE INTERNAL "")
set (ENABLE_EMBEDDED_COMPILER OFF CACHE INTERNAL "")
set (ENABLE_DWARF_PARSER OFF CACHE INTERNAL "")
else ()
message (FATAL_ERROR "Trying to cross-compile to unsupported system: ${CMAKE_SYSTEM_NAME}!")
endif ()

View File

@ -49,14 +49,14 @@ endif ()
if (NOT LINKER_NAME)
if (COMPILER_CLANG)
if (OS_LINUX)
if (NOT ARCH_S390X) # s390x doesnt support lld
if (OS_LINUX AND NOT ARCH_S390X)
find_program (LLD_PATH NAMES "ld.lld-${COMPILER_VERSION_MAJOR}" "ld.lld")
elseif (OS_DARWIN)
find_program (LLD_PATH NAMES "ld")
endif ()
endif ()
endif ()
if (OS_LINUX)
if (LLD_PATH)
if (OS_LINUX OR OS_DARWIN)
if (COMPILER_CLANG)
# Clang driver simply allows full linker path.
set (LINKER_NAME ${LLD_PATH})
@ -75,9 +75,11 @@ endif ()
if (LINKER_NAME)
message(STATUS "Using linker: ${LINKER_NAME}")
else()
elseif (NOT ARCH_S390X AND NOT OS_FREEBSD)
message (FATAL_ERROR "The only supported linker is LLVM's LLD, but we cannot find it.")
else ()
message(STATUS "Using linker: <default>")
endif()
endif ()
# Archiver

2
contrib/AMQP-CPP vendored

@ -1 +1 @@
Subproject commit 818c2d8ad96a08a5d20fece7d1e1e8855a2b0860
Subproject commit 00f09897ce020a84e38f87dc416af4a19c5da9ae

View File

@ -1,16 +1,7 @@
#"${folder}/CMakeLists.txt" Third-party libraries may have substandard code.
set (CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -w")
set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -w")
if (WITH_COVERAGE)
set (WITHOUT_COVERAGE_LIST ${WITHOUT_COVERAGE})
separate_arguments(WITHOUT_COVERAGE_LIST)
# disable coverage for contib files and build with optimisations
if (COMPILER_CLANG)
add_compile_options(-O3 -DNDEBUG -finline-functions -finline-hint-functions ${WITHOUT_COVERAGE_LIST})
endif()
endif()
set (CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -w -ffunction-sections -fdata-sections")
set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -w -ffunction-sections -fdata-sections")
if (SANITIZE STREQUAL "undefined")
# 3rd-party libraries usually not intended to work with UBSan.
@ -212,6 +203,8 @@ add_contrib (libbcrypt-cmake libbcrypt)
add_contrib (google-benchmark-cmake google-benchmark)
add_contrib (ulid-c-cmake ulid-c)
add_contrib (libssh-cmake libssh)
# Put all targets defined here and in subdirectories under "contrib/<immediate-subdir>" folders in GUI-based IDEs.
# Some of third-party projects may override CMAKE_FOLDER or FOLDER property of their targets, so they would not appear
# in "contrib/..." as originally planned, so we workaround this by fixing FOLDER properties of all targets manually,

2
contrib/NuRaft vendored

@ -1 +1 @@
Subproject commit eb1572129c71beb2156dcdaadc3fb136954aed96
Subproject commit b7ea89b817a18dc0eafc1f909d568869f02d2d04

2
contrib/abseil-cpp vendored

@ -1 +1 @@
Subproject commit 5655528c41830f733160de4fb0b99073841bae9e
Subproject commit 3bd86026c93da5a40006fd53403dff9d5f5e30e3

2
contrib/arrow vendored

@ -1 +1 @@
Subproject commit 1d93838f69a802639ca144ea5704a98e2481810d
Subproject commit ba5c67934e8274d649befcffab56731632dc5253

View File

@ -109,7 +109,6 @@ set (ORC_CXX_HAS_CSTDINT 1)
set (ORC_CXX_HAS_THREAD_LOCAL 1)
include(orc_check.cmake)
configure_file("${ORC_INCLUDE_DIR}/orc/orc-config.hh.in" "${ORC_BUILD_INCLUDE_DIR}/orc/orc-config.hh")
configure_file("${ORC_SOURCE_SRC_DIR}/Adaptor.hh.in" "${ORC_BUILD_INCLUDE_DIR}/Adaptor.hh")
@ -198,7 +197,9 @@ target_link_libraries(_orc PRIVATE
ch_contrib::snappy
ch_contrib::zlib
ch_contrib::zstd)
target_include_directories(_orc SYSTEM BEFORE PUBLIC ${ORC_INCLUDE_DIR})
target_include_directories(_orc SYSTEM BEFORE PUBLIC
${ORC_INCLUDE_DIR}
"${ClickHouse_SOURCE_DIR}/contrib/arrow-cmake/cpp/src/orc/c++/include")
target_include_directories(_orc SYSTEM BEFORE PUBLIC ${ORC_BUILD_INCLUDE_DIR})
target_include_directories(_orc SYSTEM PRIVATE
${ORC_SOURCE_SRC_DIR}
@ -212,8 +213,6 @@ target_include_directories(_orc SYSTEM PRIVATE
set(LIBRARY_DIR "${ClickHouse_SOURCE_DIR}/contrib/arrow/cpp/src/arrow")
configure_file("${LIBRARY_DIR}/util/config.h.cmake" "${CMAKE_CURRENT_BINARY_DIR}/cpp/src/arrow/util/config.h")
# arrow/cpp/src/arrow/CMakeLists.txt (ARROW_SRCS + ARROW_COMPUTE + ARROW_IPC)
set(ARROW_SRCS
"${LIBRARY_DIR}/array/array_base.cc"
@ -230,6 +229,8 @@ set(ARROW_SRCS
"${LIBRARY_DIR}/array/builder_nested.cc"
"${LIBRARY_DIR}/array/builder_primitive.cc"
"${LIBRARY_DIR}/array/builder_union.cc"
"${LIBRARY_DIR}/array/builder_run_end.cc"
"${LIBRARY_DIR}/array/array_run_end.cc"
"${LIBRARY_DIR}/array/concatenate.cc"
"${LIBRARY_DIR}/array/data.cc"
"${LIBRARY_DIR}/array/diff.cc"
@ -309,9 +310,12 @@ set(ARROW_SRCS
"${LIBRARY_DIR}/util/debug.cc"
"${LIBRARY_DIR}/util/tracing.cc"
"${LIBRARY_DIR}/util/atfork_internal.cc"
"${LIBRARY_DIR}/util/crc32.cc"
"${LIBRARY_DIR}/util/hashing.cc"
"${LIBRARY_DIR}/util/ree_util.cc"
"${LIBRARY_DIR}/util/union_util.cc"
"${LIBRARY_DIR}/vendored/base64.cpp"
"${LIBRARY_DIR}/vendored/datetime/tz.cpp"
"${LIBRARY_DIR}/vendored/musl/strptime.c"
"${LIBRARY_DIR}/vendored/uriparser/UriCommon.c"
"${LIBRARY_DIR}/vendored/uriparser/UriCompare.c"
@ -328,39 +332,20 @@ set(ARROW_SRCS
"${LIBRARY_DIR}/vendored/uriparser/UriRecompose.c"
"${LIBRARY_DIR}/vendored/uriparser/UriResolve.c"
"${LIBRARY_DIR}/vendored/uriparser/UriShorten.c"
"${LIBRARY_DIR}/vendored/double-conversion/bignum.cc"
"${LIBRARY_DIR}/vendored/double-conversion/bignum-dtoa.cc"
"${LIBRARY_DIR}/vendored/double-conversion/cached-powers.cc"
"${LIBRARY_DIR}/vendored/double-conversion/double-to-string.cc"
"${LIBRARY_DIR}/vendored/double-conversion/fast-dtoa.cc"
"${LIBRARY_DIR}/vendored/double-conversion/fixed-dtoa.cc"
"${LIBRARY_DIR}/vendored/double-conversion/string-to-double.cc"
"${LIBRARY_DIR}/vendored/double-conversion/strtod.cc"
"${LIBRARY_DIR}/compute/api_aggregate.cc"
"${LIBRARY_DIR}/compute/api_scalar.cc"
"${LIBRARY_DIR}/compute/api_vector.cc"
"${LIBRARY_DIR}/compute/cast.cc"
"${LIBRARY_DIR}/compute/exec.cc"
"${LIBRARY_DIR}/compute/exec/accumulation_queue.cc"
"${LIBRARY_DIR}/compute/exec/accumulation_queue.h"
"${LIBRARY_DIR}/compute/exec/aggregate.cc"
"${LIBRARY_DIR}/compute/exec/aggregate_node.cc"
"${LIBRARY_DIR}/compute/exec/asof_join_node.cc"
"${LIBRARY_DIR}/compute/exec/bloom_filter.cc"
"${LIBRARY_DIR}/compute/exec/exec_plan.cc"
"${LIBRARY_DIR}/compute/exec/expression.cc"
"${LIBRARY_DIR}/compute/exec/filter_node.cc"
"${LIBRARY_DIR}/compute/exec/hash_join.cc"
"${LIBRARY_DIR}/compute/exec/hash_join_dict.cc"
"${LIBRARY_DIR}/compute/exec/hash_join_node.cc"
"${LIBRARY_DIR}/compute/exec/key_hash.cc"
"${LIBRARY_DIR}/compute/exec/key_map.cc"
"${LIBRARY_DIR}/compute/exec/map_node.cc"
"${LIBRARY_DIR}/compute/exec/options.cc"
"${LIBRARY_DIR}/compute/exec/order_by_impl.cc"
"${LIBRARY_DIR}/compute/exec/partition_util.cc"
"${LIBRARY_DIR}/compute/exec/project_node.cc"
"${LIBRARY_DIR}/compute/exec/query_context.cc"
"${LIBRARY_DIR}/compute/exec/sink_node.cc"
"${LIBRARY_DIR}/compute/exec/source_node.cc"
"${LIBRARY_DIR}/compute/exec/swiss_join.cc"
"${LIBRARY_DIR}/compute/exec/task_util.cc"
"${LIBRARY_DIR}/compute/exec/tpch_node.cc"
"${LIBRARY_DIR}/compute/exec/union_node.cc"
"${LIBRARY_DIR}/compute/exec/util.cc"
"${LIBRARY_DIR}/compute/function.cc"
"${LIBRARY_DIR}/compute/function_internal.cc"
"${LIBRARY_DIR}/compute/kernel.cc"
@ -403,8 +388,13 @@ set(ARROW_SRCS
"${LIBRARY_DIR}/compute/kernels/vector_select_k.cc"
"${LIBRARY_DIR}/compute/kernels/vector_selection.cc"
"${LIBRARY_DIR}/compute/kernels/vector_sort.cc"
"${LIBRARY_DIR}/compute/kernels/vector_selection_internal.cc"
"${LIBRARY_DIR}/compute/kernels/vector_selection_filter_internal.cc"
"${LIBRARY_DIR}/compute/kernels/vector_selection_take_internal.cc"
"${LIBRARY_DIR}/compute/light_array.cc"
"${LIBRARY_DIR}/compute/registry.cc"
"${LIBRARY_DIR}/compute/expression.cc"
"${LIBRARY_DIR}/compute/ordering.cc"
"${LIBRARY_DIR}/compute/row/compare_internal.cc"
"${LIBRARY_DIR}/compute/row/encode_internal.cc"
"${LIBRARY_DIR}/compute/row/grouper.cc"
@ -459,7 +449,7 @@ target_link_libraries(_arrow PUBLIC _orc)
add_dependencies(_arrow protoc)
target_include_directories(_arrow SYSTEM BEFORE PUBLIC ${ARROW_SRC_DIR})
target_include_directories(_arrow SYSTEM BEFORE PUBLIC "${CMAKE_CURRENT_BINARY_DIR}/cpp/src")
target_include_directories(_arrow SYSTEM BEFORE PUBLIC "${ClickHouse_SOURCE_DIR}/contrib/arrow-cmake/cpp/src")
target_include_directories(_arrow SYSTEM PRIVATE ${ARROW_SRC_DIR})
target_include_directories(_arrow SYSTEM PRIVATE ${HDFS_INCLUDE_DIR})
@ -488,10 +478,10 @@ set(PARQUET_SRCS
"${LIBRARY_DIR}/exception.cc"
"${LIBRARY_DIR}/file_reader.cc"
"${LIBRARY_DIR}/file_writer.cc"
"${LIBRARY_DIR}/page_index.cc"
"${LIBRARY_DIR}/level_conversion.cc"
"${LIBRARY_DIR}/level_comparison.cc"
"${LIBRARY_DIR}/metadata.cc"
"${LIBRARY_DIR}/murmur3.cc"
"${LIBRARY_DIR}/platform.cc"
"${LIBRARY_DIR}/printer.cc"
"${LIBRARY_DIR}/properties.cc"
@ -500,6 +490,8 @@ set(PARQUET_SRCS
"${LIBRARY_DIR}/stream_reader.cc"
"${LIBRARY_DIR}/stream_writer.cc"
"${LIBRARY_DIR}/types.cc"
"${LIBRARY_DIR}/bloom_filter_reader.cc"
"${LIBRARY_DIR}/xxhasher.cc"
"${GEN_LIBRARY_DIR}/parquet_constants.cpp"
"${GEN_LIBRARY_DIR}/parquet_types.cpp"

View File

@ -1 +0,0 @@
../../../thrift/build/cmake/config.h.in

View File

@ -0,0 +1,61 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
#define ARROW_VERSION_MAJOR 11
#define ARROW_VERSION_MINOR 0
#define ARROW_VERSION_PATCH 0
#define ARROW_VERSION ((ARROW_VERSION_MAJOR * 1000) + ARROW_VERSION_MINOR) * 1000 + ARROW_VERSION_PATCH
#define ARROW_VERSION_STRING "11.0.0"
#define ARROW_SO_VERSION "1100"
#define ARROW_FULL_SO_VERSION "1100.0.0"
#define ARROW_CXX_COMPILER_ID "Clang"
#define ARROW_CXX_COMPILER_VERSION "ClickHouse"
#define ARROW_CXX_COMPILER_FLAGS ""
#define ARROW_BUILD_TYPE ""
#define ARROW_GIT_ID ""
#define ARROW_GIT_DESCRIPTION ""
#define ARROW_PACKAGE_KIND ""
/* #undef ARROW_COMPUTE */
/* #undef ARROW_CSV */
/* #undef ARROW_CUDA */
/* #undef ARROW_DATASET */
/* #undef ARROW_FILESYSTEM */
/* #undef ARROW_FLIGHT */
/* #undef ARROW_FLIGHT_SQL */
/* #undef ARROW_IPC */
/* #undef ARROW_JEMALLOC */
/* #undef ARROW_JEMALLOC_VENDORED */
/* #undef ARROW_JSON */
/* #undef ARROW_ORC */
/* #undef ARROW_PARQUET */
/* #undef ARROW_SUBSTRAIT */
/* #undef ARROW_GCS */
/* #undef ARROW_S3 */
/* #undef ARROW_USE_NATIVE_INT128 */
/* #undef ARROW_WITH_MUSL */
/* #undef ARROW_WITH_OPENTELEMETRY */
/* #undef ARROW_WITH_UCX */
/* #undef GRPCPP_PP_INCLUDE */

View File

@ -0,0 +1,38 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef ORC_CONFIG_HH
#define ORC_CONFIG_HH
#define ORC_VERSION ""
#define ORC_CXX_HAS_CSTDINT
#ifdef ORC_CXX_HAS_CSTDINT
#include <cstdint>
#else
#include <stdint.h>
#endif
// Following MACROS should be keeped for backward compatibility.
#define ORC_NOEXCEPT noexcept
#define ORC_NULLPTR nullptr
#define ORC_OVERRIDE override
#define ORC_UNIQUE_PTR std::unique_ptr
#endif

2
contrib/avro vendored

@ -1 +1 @@
Subproject commit 7832659ec986075d560f930c288e973c64679552
Subproject commit 2fb8a8a6ec0eab9109b68abf3b4857e8c476b918

View File

@ -1,114 +1,13 @@
# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
# SPDX-License-Identifier: Apache-2.0.
include(CheckCSourceRuns)
option(USE_CPU_EXTENSIONS "Whenever possible, use functions optimized for CPUs with specific extensions (ex: SSE, AVX)." ON)
# In the current (11/2/21) state of mingw64, the packaged gcc is not capable of emitting properly aligned avx2 instructions under certain circumstances.
# This leads to crashes for windows builds using mingw64 when invoking the avx2-enabled versions of certain functions. Until we can find a better
# work-around, disable avx2 (and all other extensions) in mingw builds.
#
# https://gcc.gnu.org/bugzilla/show_bug.cgi?id=54412
#
if (MINGW)
message(STATUS "MINGW detected! Disabling avx2 and other CPU extensions")
set(USE_CPU_EXTENSIONS OFF)
endif()
if (ARCH_AMD64)
set (AWS_ARCH_INTEL 1)
elseif (ARCH_AARCH64)
set (AWS_ARCH_ARM64 1)
endif ()
if(NOT CMAKE_CROSSCOMPILING)
check_c_source_runs("
#include <stdbool.h>
bool foo(int a, int b, int *c) {
return __builtin_mul_overflow(a, b, c);
}
int main() {
int out;
if (foo(1, 2, &out)) {
return 0;
}
return 0;
}" AWS_HAVE_GCC_OVERFLOW_MATH_EXTENSIONS)
if (USE_CPU_EXTENSIONS)
check_c_source_runs("
int main() {
int foo = 42;
_mulx_u32(1, 2, &foo);
return foo != 2;
}" AWS_HAVE_MSVC_MULX)
endif()
endif()
check_c_source_compiles("
#include <Windows.h>
#if WINAPI_FAMILY_PARTITION(WINAPI_PARTITION_DESKTOP)
int main() {
return 0;
}
#else
it's not windows desktop
#endif
" AWS_HAVE_WINAPI_DESKTOP)
check_c_source_compiles("
int main() {
#if !(defined(__x86_64__) || defined(__i386__) || defined(_M_X64) || defined(_M_IX86))
# error \"not intel\"
#endif
return 0;
}
" AWS_ARCH_INTEL)
check_c_source_compiles("
int main() {
#if !(defined(__aarch64__) || defined(_M_ARM64))
# error \"not arm64\"
#endif
return 0;
}
" AWS_ARCH_ARM64)
check_c_source_compiles("
int main() {
#if !(defined(__arm__) || defined(_M_ARM))
# error \"not arm\"
#endif
return 0;
}
" AWS_ARCH_ARM32)
check_c_source_compiles("
int main() {
int foo = 42, bar = 24;
__asm__ __volatile__(\"\":\"=r\"(foo):\"r\"(bar):\"memory\");
}" AWS_HAVE_GCC_INLINE_ASM)
check_c_source_compiles("
#include <sys/auxv.h>
int main() {
#ifdef __linux__
getauxval(AT_HWCAP);
getauxval(AT_HWCAP2);
#endif
return 0;
}" AWS_HAVE_AUXV)
string(REGEX MATCH "^(aarch64|arm)" ARM_CPU "${CMAKE_SYSTEM_PROCESSOR}")
if(NOT LEGACY_COMPILER_SUPPORT OR ARM_CPU)
check_c_source_compiles("
#include <execinfo.h>
int main() {
backtrace(NULL, 0);
return 0;
}" AWS_HAVE_EXECINFO)
endif()
check_c_source_compiles("
#include <linux/if_link.h>
int main() {
return 1;
}" AWS_HAVE_LINUX_IF_LINK_H)
set (AWS_HAVE_GCC_INLINE_ASM 1)
set (AWS_HAVE_AUXV 1)

View File

@ -1,54 +1,13 @@
# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
# SPDX-License-Identifier: Apache-2.0.
include(CheckCCompilerFlag)
include(CheckIncludeFile)
if (USE_CPU_EXTENSIONS)
if (MSVC)
check_c_compiler_flag("/arch:AVX2" HAVE_M_AVX2_FLAG)
if (HAVE_M_AVX2_FLAG)
set(AVX2_CFLAGS "/arch:AVX2")
if (HAVE_AVX2)
set (AVX2_CFLAGS "-mavx -mavx2")
set (HAVE_AVX2_INTRINSICS 1)
set (HAVE_MM256_EXTRACT_EPI64 1)
endif()
else()
check_c_compiler_flag(-mavx2 HAVE_M_AVX2_FLAG)
if (HAVE_M_AVX2_FLAG)
set(AVX2_CFLAGS "-mavx -mavx2")
endif()
endif()
cmake_push_check_state()
set(CMAKE_REQUIRED_FLAGS "${CMAKE_REQUIRED_FLAGS} ${AVX2_CFLAGS}")
check_c_source_compiles("
#include <immintrin.h>
#include <emmintrin.h>
#include <string.h>
int main() {
__m256i vec;
memset(&vec, 0, sizeof(vec));
_mm256_shuffle_epi8(vec, vec);
_mm256_set_epi32(1,2,3,4,5,6,7,8);
_mm256_permutevar8x32_epi32(vec, vec);
return 0;
}" HAVE_AVX2_INTRINSICS)
check_c_source_compiles("
#include <immintrin.h>
#include <string.h>
int main() {
__m256i vec;
memset(&vec, 0, sizeof(vec));
return (int)_mm256_extract_epi64(vec, 2);
}" HAVE_MM256_EXTRACT_EPI64)
cmake_pop_check_state()
endif() # USE_CPU_EXTENSIONS
endif()
macro(simd_add_definition_if target definition)
if(${definition})

View File

@ -1,50 +1,9 @@
# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
# SPDX-License-Identifier: Apache-2.0.
include(CheckSymbolExists)
# Check if the platform supports setting thread affinity
# (important for hitting full NIC entitlement on NUMA architectures)
function(aws_set_thread_affinity_method target)
# Non-POSIX, Android, and Apple platforms do not support thread affinity.
if (NOT UNIX OR ANDROID OR APPLE)
target_compile_definitions(${target} PRIVATE
-DAWS_AFFINITY_METHOD=AWS_AFFINITY_METHOD_NONE)
return()
endif()
cmake_push_check_state()
list(APPEND CMAKE_REQUIRED_DEFINITIONS -D_GNU_SOURCE)
list(APPEND CMAKE_REQUIRED_LIBRARIES pthread)
set(headers "pthread.h")
# BSDs put nonportable pthread declarations in a separate header.
if(CMAKE_SYSTEM_NAME MATCHES BSD)
set(headers "${headers};pthread_np.h")
endif()
# Using pthread attrs is the preferred method, but is glibc-specific.
check_symbol_exists(pthread_attr_setaffinity_np "${headers}" USE_PTHREAD_ATTR_SETAFFINITY)
if (USE_PTHREAD_ATTR_SETAFFINITY)
target_compile_definitions(${target} PRIVATE
-DAWS_AFFINITY_METHOD=AWS_AFFINITY_METHOD_PTHREAD_ATTR)
return()
endif()
# This method is still nonportable, but is supported by musl and BSDs.
check_symbol_exists(pthread_setaffinity_np "${headers}" USE_PTHREAD_SETAFFINITY)
if (USE_PTHREAD_SETAFFINITY)
target_compile_definitions(${target} PRIVATE
-DAWS_AFFINITY_METHOD=AWS_AFFINITY_METHOD_PTHREAD)
return()
endif()
# If we got here, we expected thread affinity support but didn't find it.
# We still build with degraded NUMA performance, but show a warning.
message(WARNING "No supported method for setting thread affinity")
target_compile_definitions(${target} PRIVATE
-DAWS_AFFINITY_METHOD=AWS_AFFINITY_METHOD_NONE)
cmake_pop_check_state()
# This code has been cut, because I don't care about it.
target_compile_definitions(${target} PRIVATE -DAWS_AFFINITY_METHOD=AWS_AFFINITY_METHOD_NONE)
endfunction()

View File

@ -1,61 +1,13 @@
# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
# SPDX-License-Identifier: Apache-2.0.
include(CheckSymbolExists)
# Check how the platform supports setting thread name
function(aws_set_thread_name_method target)
if (WINDOWS)
# On Windows we do a runtime check, instead of compile-time check
return()
elseif (APPLE)
if (APPLE)
# All Apple platforms we support have the same function, so no need for compile-time check.
return()
endif()
cmake_push_check_state()
list(APPEND CMAKE_REQUIRED_DEFINITIONS -D_GNU_SOURCE)
list(APPEND CMAKE_REQUIRED_LIBRARIES pthread)
# The start of the test program
set(c_source_start "
#define _GNU_SOURCE
#include <pthread.h>
#if defined(__FreeBSD__) || defined(__NETBSD__)
#include <pthread_np.h>
#endif
int main() {
pthread_t thread_id;
")
# The end of the test program
set(c_source_end "}")
# pthread_setname_np() usually takes 2 args
check_c_source_compiles("
${c_source_start}
pthread_setname_np(thread_id, \"asdf\");
${c_source_end}"
PTHREAD_SETNAME_TAKES_2ARGS)
if (PTHREAD_SETNAME_TAKES_2ARGS)
target_compile_definitions(${target} PRIVATE -DAWS_PTHREAD_SETNAME_TAKES_2ARGS)
return()
endif()
# But on NetBSD it takes 3!
check_c_source_compiles("
${c_source_start}
pthread_setname_np(thread_id, \"asdf\", NULL);
${c_source_end}
" PTHREAD_SETNAME_TAKES_3ARGS)
if (PTHREAD_SETNAME_TAKES_3ARGS)
target_compile_definitions(${target} PRIVATE -DAWS_PTHREAD_SETNAME_TAKES_3ARGS)
return()
endif()
# And on many older/weirder platforms it's just not supported
cmake_pop_check_state()
endfunction()

2
contrib/croaring vendored

@ -1 +1 @@
Subproject commit f40ed52bcdd635840a79877cef4857315dba817c
Subproject commit 9b7cc0ff1c41e9457efb6228cfd2c538d0155303

View File

@ -2,23 +2,25 @@ set(LIBRARY_DIR "${ClickHouse_SOURCE_DIR}/contrib/croaring")
set(SRCS
"${LIBRARY_DIR}/src/array_util.c"
"${LIBRARY_DIR}/src/bitset.c"
"${LIBRARY_DIR}/src/bitset_util.c"
"${LIBRARY_DIR}/src/isadetection.c"
"${LIBRARY_DIR}/src/memory.c"
"${LIBRARY_DIR}/src/roaring.c"
"${LIBRARY_DIR}/src/roaring_array.c"
"${LIBRARY_DIR}/src/roaring_priority_queue.c"
"${LIBRARY_DIR}/src/containers/array.c"
"${LIBRARY_DIR}/src/containers/bitset.c"
"${LIBRARY_DIR}/src/containers/containers.c"
"${LIBRARY_DIR}/src/containers/convert.c"
"${LIBRARY_DIR}/src/containers/mixed_intersection.c"
"${LIBRARY_DIR}/src/containers/mixed_union.c"
"${LIBRARY_DIR}/src/containers/mixed_equal.c"
"${LIBRARY_DIR}/src/containers/mixed_subset.c"
"${LIBRARY_DIR}/src/containers/mixed_negation.c"
"${LIBRARY_DIR}/src/containers/mixed_xor.c"
"${LIBRARY_DIR}/src/containers/mixed_andnot.c"
"${LIBRARY_DIR}/src/containers/run.c"
"${LIBRARY_DIR}/src/roaring.c"
"${LIBRARY_DIR}/src/roaring_priority_queue.c"
"${LIBRARY_DIR}/src/roaring_array.c"
"${LIBRARY_DIR}/src/memory.c")
"${LIBRARY_DIR}/src/containers/mixed_equal.c"
"${LIBRARY_DIR}/src/containers/mixed_intersection.c"
"${LIBRARY_DIR}/src/containers/mixed_negation.c"
"${LIBRARY_DIR}/src/containers/mixed_subset.c"
"${LIBRARY_DIR}/src/containers/mixed_union.c"
"${LIBRARY_DIR}/src/containers/mixed_xor.c"
"${LIBRARY_DIR}/src/containers/run.c")
add_library(_roaring ${SRCS})

2
contrib/curl vendored

@ -1 +1 @@
Subproject commit eb3b049df526bf125eda23218e680ce7fa9ec46c
Subproject commit d755a5f7c009dd63a61b2c745180d8ba937cbfeb

View File

@ -64,6 +64,7 @@ set (SRCS
"${LIBRARY_DIR}/lib/hostsyn.c"
"${LIBRARY_DIR}/lib/hsts.c"
"${LIBRARY_DIR}/lib/http.c"
"${LIBRARY_DIR}/lib/http1.c"
"${LIBRARY_DIR}/lib/http2.c"
"${LIBRARY_DIR}/lib/http_aws_sigv4.c"
"${LIBRARY_DIR}/lib/http_chunks.c"

@ -1 +1 @@
Subproject commit 7abd49bb2e72bf9a5029993d31dcb1872da88292
Subproject commit c3abaaefe5fa400eed99e082af07c1b61a7144db

@ -1 +1 @@
Subproject commit c47efe2d8f6a60022b49ecd6cc23660687c8598f
Subproject commit 0862007f6ca1f5723c58f10f0ca34f3f25a63b2e

View File

@ -20,7 +20,6 @@ endif()
set(protobuf_source_dir "${ClickHouse_SOURCE_DIR}/contrib/google-protobuf")
set(protobuf_binary_dir "${ClickHouse_BINARY_DIR}/contrib/google-protobuf")
add_definitions(-DGOOGLE_PROTOBUF_CMAKE_BUILD)
add_definitions(-DHAVE_PTHREAD)
@ -30,18 +29,69 @@ include_directories(
${protobuf_binary_dir}
${protobuf_source_dir}/src)
add_library(utf8_range
${protobuf_source_dir}/third_party/utf8_range/naive.c
${protobuf_source_dir}/third_party/utf8_range/range2-neon.c
${protobuf_source_dir}/third_party/utf8_range/range2-sse.c
)
include_directories(${protobuf_source_dir}/third_party/utf8_range)
add_library(utf8_validity
${protobuf_source_dir}/third_party/utf8_range/utf8_validity.cc
)
target_link_libraries(utf8_validity PUBLIC absl::strings)
set(protobuf_absl_used_targets
absl::absl_check
absl::absl_log
absl::algorithm
absl::base
absl::bind_front
absl::bits
absl::btree
absl::cleanup
absl::cord
absl::core_headers
absl::debugging
absl::die_if_null
absl::dynamic_annotations
absl::flags
absl::flat_hash_map
absl::flat_hash_set
absl::function_ref
absl::hash
absl::layout
absl::log_initialize
absl::log_severity
absl::memory
absl::node_hash_map
absl::node_hash_set
absl::optional
absl::span
absl::status
absl::statusor
absl::strings
absl::synchronization
absl::time
absl::type_traits
absl::utility
absl::variant
)
set(libprotobuf_lite_files
${protobuf_source_dir}/src/google/protobuf/any_lite.cc
${protobuf_source_dir}/src/google/protobuf/arena.cc
${protobuf_source_dir}/src/google/protobuf/arena_align.cc
${protobuf_source_dir}/src/google/protobuf/arenastring.cc
${protobuf_source_dir}/src/google/protobuf/arenaz_sampler.cc
${protobuf_source_dir}/src/google/protobuf/extension_set.cc
${protobuf_source_dir}/src/google/protobuf/generated_enum_util.cc
${protobuf_source_dir}/src/google/protobuf/generated_message_table_driven_lite.cc
${protobuf_source_dir}/src/google/protobuf/generated_message_tctable_lite.cc
${protobuf_source_dir}/src/google/protobuf/generated_message_util.cc
${protobuf_source_dir}/src/google/protobuf/implicit_weak_message.cc
${protobuf_source_dir}/src/google/protobuf/inlined_string_field.cc
${protobuf_source_dir}/src/google/protobuf/io/coded_stream.cc
${protobuf_source_dir}/src/google/protobuf/io/io_win32.cc
${protobuf_source_dir}/src/google/protobuf/io/strtod.cc
${protobuf_source_dir}/src/google/protobuf/io/zero_copy_stream.cc
${protobuf_source_dir}/src/google/protobuf/io/zero_copy_stream_impl.cc
${protobuf_source_dir}/src/google/protobuf/io/zero_copy_stream_impl_lite.cc
@ -49,21 +99,15 @@ set(libprotobuf_lite_files
${protobuf_source_dir}/src/google/protobuf/message_lite.cc
${protobuf_source_dir}/src/google/protobuf/parse_context.cc
${protobuf_source_dir}/src/google/protobuf/repeated_field.cc
${protobuf_source_dir}/src/google/protobuf/stubs/bytestream.cc
${protobuf_source_dir}/src/google/protobuf/repeated_ptr_field.cc
${protobuf_source_dir}/src/google/protobuf/stubs/common.cc
${protobuf_source_dir}/src/google/protobuf/stubs/int128.cc
${protobuf_source_dir}/src/google/protobuf/stubs/status.cc
${protobuf_source_dir}/src/google/protobuf/stubs/statusor.cc
${protobuf_source_dir}/src/google/protobuf/stubs/stringpiece.cc
${protobuf_source_dir}/src/google/protobuf/stubs/stringprintf.cc
${protobuf_source_dir}/src/google/protobuf/stubs/structurally_valid.cc
${protobuf_source_dir}/src/google/protobuf/stubs/strutil.cc
${protobuf_source_dir}/src/google/protobuf/stubs/time.cc
${protobuf_source_dir}/src/google/protobuf/wire_format_lite.cc
)
add_library(_libprotobuf-lite ${libprotobuf_lite_files})
target_link_libraries(_libprotobuf-lite pthread)
target_link_libraries(_libprotobuf-lite
pthread
utf8_validity)
if(${CMAKE_SYSTEM_NAME} STREQUAL "Android")
target_link_libraries(_libprotobuf-lite log)
endif()
@ -72,67 +116,93 @@ add_library(protobuf::libprotobuf-lite ALIAS _libprotobuf-lite)
set(libprotobuf_files
${protobuf_source_dir}/src/google/protobuf/any.cc
${protobuf_source_dir}/src/google/protobuf/any.pb.cc
${protobuf_source_dir}/src/google/protobuf/api.pb.cc
${protobuf_source_dir}/src/google/protobuf/duration.pb.cc
${protobuf_source_dir}/src/google/protobuf/empty.pb.cc
${protobuf_source_dir}/src/google/protobuf/field_mask.pb.cc
${protobuf_source_dir}/src/google/protobuf/source_context.pb.cc
${protobuf_source_dir}/src/google/protobuf/struct.pb.cc
${protobuf_source_dir}/src/google/protobuf/timestamp.pb.cc
${protobuf_source_dir}/src/google/protobuf/type.pb.cc
${protobuf_source_dir}/src/google/protobuf/wrappers.pb.cc
${protobuf_source_dir}/src/google/protobuf/any.cc
${protobuf_source_dir}/src/google/protobuf/any_lite.cc
${protobuf_source_dir}/src/google/protobuf/arena.cc
${protobuf_source_dir}/src/google/protobuf/arena_align.cc
${protobuf_source_dir}/src/google/protobuf/arenastring.cc
${protobuf_source_dir}/src/google/protobuf/arenaz_sampler.cc
${protobuf_source_dir}/src/google/protobuf/compiler/importer.cc
${protobuf_source_dir}/src/google/protobuf/compiler/parser.cc
${protobuf_source_dir}/src/google/protobuf/cpp_features.pb.cc
${protobuf_source_dir}/src/google/protobuf/descriptor.cc
${protobuf_source_dir}/src/google/protobuf/descriptor.pb.cc
${protobuf_source_dir}/src/google/protobuf/descriptor_database.cc
${protobuf_source_dir}/src/google/protobuf/duration.pb.cc
${protobuf_source_dir}/src/google/protobuf/dynamic_message.cc
${protobuf_source_dir}/src/google/protobuf/empty.pb.cc
${protobuf_source_dir}/src/google/protobuf/extension_set.cc
${protobuf_source_dir}/src/google/protobuf/extension_set_heavy.cc
${protobuf_source_dir}/src/google/protobuf/field_mask.pb.cc
${protobuf_source_dir}/src/google/protobuf/feature_resolver.cc
${protobuf_source_dir}/src/google/protobuf/generated_enum_util.cc
${protobuf_source_dir}/src/google/protobuf/generated_message_bases.cc
${protobuf_source_dir}/src/google/protobuf/generated_message_reflection.cc
${protobuf_source_dir}/src/google/protobuf/generated_message_table_driven.cc
${protobuf_source_dir}/src/google/protobuf/generated_message_tctable_full.cc
${protobuf_source_dir}/src/google/protobuf/generated_message_tctable_gen.cc
${protobuf_source_dir}/src/google/protobuf/generated_message_tctable_lite.cc
${protobuf_source_dir}/src/google/protobuf/generated_message_util.cc
${protobuf_source_dir}/src/google/protobuf/implicit_weak_message.cc
${protobuf_source_dir}/src/google/protobuf/inlined_string_field.cc
${protobuf_source_dir}/src/google/protobuf/io/coded_stream.cc
${protobuf_source_dir}/src/google/protobuf/io/gzip_stream.cc
${protobuf_source_dir}/src/google/protobuf/io/io_win32.cc
${protobuf_source_dir}/src/google/protobuf/io/printer.cc
${protobuf_source_dir}/src/google/protobuf/io/strtod.cc
${protobuf_source_dir}/src/google/protobuf/io/tokenizer.cc
${protobuf_source_dir}/src/google/protobuf/io/zero_copy_sink.cc
${protobuf_source_dir}/src/google/protobuf/io/zero_copy_stream.cc
${protobuf_source_dir}/src/google/protobuf/io/zero_copy_stream_impl.cc
${protobuf_source_dir}/src/google/protobuf/io/zero_copy_stream_impl_lite.cc
${protobuf_source_dir}/src/google/protobuf/json/internal/lexer.cc
${protobuf_source_dir}/src/google/protobuf/json/internal/message_path.cc
${protobuf_source_dir}/src/google/protobuf/json/internal/parser.cc
${protobuf_source_dir}/src/google/protobuf/json/internal/unparser.cc
${protobuf_source_dir}/src/google/protobuf/json/internal/untyped_message.cc
${protobuf_source_dir}/src/google/protobuf/json/internal/writer.cc
${protobuf_source_dir}/src/google/protobuf/json/internal/zero_copy_buffered_stream.cc
${protobuf_source_dir}/src/google/protobuf/json/json.cc
${protobuf_source_dir}/src/google/protobuf/map.cc
${protobuf_source_dir}/src/google/protobuf/map_field.cc
${protobuf_source_dir}/src/google/protobuf/message.cc
${protobuf_source_dir}/src/google/protobuf/message_lite.cc
${protobuf_source_dir}/src/google/protobuf/parse_context.cc
${protobuf_source_dir}/src/google/protobuf/port.cc
${protobuf_source_dir}/src/google/protobuf/raw_ptr.cc
${protobuf_source_dir}/src/google/protobuf/reflection_mode.cc
${protobuf_source_dir}/src/google/protobuf/reflection_ops.cc
${protobuf_source_dir}/src/google/protobuf/repeated_field.cc
${protobuf_source_dir}/src/google/protobuf/repeated_ptr_field.cc
${protobuf_source_dir}/src/google/protobuf/service.cc
${protobuf_source_dir}/src/google/protobuf/source_context.pb.cc
${protobuf_source_dir}/src/google/protobuf/struct.pb.cc
${protobuf_source_dir}/src/google/protobuf/stubs/substitute.cc
${protobuf_source_dir}/src/google/protobuf/stubs/common.cc
${protobuf_source_dir}/src/google/protobuf/text_format.cc
${protobuf_source_dir}/src/google/protobuf/timestamp.pb.cc
${protobuf_source_dir}/src/google/protobuf/type.pb.cc
${protobuf_source_dir}/src/google/protobuf/unknown_field_set.cc
${protobuf_source_dir}/src/google/protobuf/util/delimited_message_util.cc
${protobuf_source_dir}/src/google/protobuf/util/field_comparator.cc
${protobuf_source_dir}/src/google/protobuf/util/field_mask_util.cc
${protobuf_source_dir}/src/google/protobuf/util/internal/datapiece.cc
${protobuf_source_dir}/src/google/protobuf/util/internal/default_value_objectwriter.cc
${protobuf_source_dir}/src/google/protobuf/util/internal/error_listener.cc
${protobuf_source_dir}/src/google/protobuf/util/internal/field_mask_utility.cc
${protobuf_source_dir}/src/google/protobuf/util/internal/json_escaping.cc
${protobuf_source_dir}/src/google/protobuf/util/internal/json_objectwriter.cc
${protobuf_source_dir}/src/google/protobuf/util/internal/json_stream_parser.cc
${protobuf_source_dir}/src/google/protobuf/util/internal/object_writer.cc
${protobuf_source_dir}/src/google/protobuf/util/internal/proto_writer.cc
${protobuf_source_dir}/src/google/protobuf/util/internal/protostream_objectsource.cc
${protobuf_source_dir}/src/google/protobuf/util/internal/protostream_objectwriter.cc
${protobuf_source_dir}/src/google/protobuf/util/internal/type_info.cc
${protobuf_source_dir}/src/google/protobuf/util/internal/type_info_test_helper.cc
${protobuf_source_dir}/src/google/protobuf/util/internal/utility.cc
${protobuf_source_dir}/src/google/protobuf/util/json_util.cc
${protobuf_source_dir}/src/google/protobuf/util/message_differencer.cc
${protobuf_source_dir}/src/google/protobuf/util/time_util.cc
${protobuf_source_dir}/src/google/protobuf/util/type_resolver_util.cc
${protobuf_source_dir}/src/google/protobuf/wire_format.cc
${protobuf_source_dir}/src/google/protobuf/wrappers.pb.cc
${protobuf_source_dir}/src/google/protobuf/wire_format_lite.cc
)
add_library(_libprotobuf ${libprotobuf_lite_files} ${libprotobuf_files})
if (ENABLE_FUZZING)
target_compile_options(_libprotobuf PRIVATE "-fsanitize-recover=all")
endif()
target_link_libraries(_libprotobuf pthread)
target_link_libraries(_libprotobuf ch_contrib::zlib)
target_link_libraries(_libprotobuf
pthread
ch_contrib::zlib
utf8_validity
${protobuf_absl_used_targets})
if(${CMAKE_SYSTEM_NAME} STREQUAL "Android")
target_link_libraries(_libprotobuf log)
endif()
@ -141,23 +211,26 @@ add_library(protobuf::libprotobuf ALIAS _libprotobuf)
set(libprotoc_files
${protobuf_source_dir}/src/google/protobuf/compiler/allowlists/editions.cc
${protobuf_source_dir}/src/google/protobuf/compiler/code_generator.cc
${protobuf_source_dir}/src/google/protobuf/compiler/command_line_interface.cc
${protobuf_source_dir}/src/google/protobuf/compiler/cpp/cpp_enum.cc
${protobuf_source_dir}/src/google/protobuf/compiler/cpp/cpp_enum_field.cc
${protobuf_source_dir}/src/google/protobuf/compiler/cpp/cpp_extension.cc
${protobuf_source_dir}/src/google/protobuf/compiler/cpp/cpp_field.cc
${protobuf_source_dir}/src/google/protobuf/compiler/cpp/cpp_file.cc
${protobuf_source_dir}/src/google/protobuf/compiler/cpp/cpp_generator.cc
${protobuf_source_dir}/src/google/protobuf/compiler/cpp/cpp_helpers.cc
${protobuf_source_dir}/src/google/protobuf/compiler/cpp/cpp_map_field.cc
${protobuf_source_dir}/src/google/protobuf/compiler/cpp/cpp_message.cc
${protobuf_source_dir}/src/google/protobuf/compiler/cpp/cpp_message_field.cc
${protobuf_source_dir}/src/google/protobuf/compiler/cpp/cpp_padding_optimizer.cc
${protobuf_source_dir}/src/google/protobuf/compiler/cpp/cpp_parse_function_generator.cc
${protobuf_source_dir}/src/google/protobuf/compiler/cpp/cpp_primitive_field.cc
${protobuf_source_dir}/src/google/protobuf/compiler/cpp/cpp_service.cc
${protobuf_source_dir}/src/google/protobuf/compiler/cpp/cpp_string_field.cc
${protobuf_source_dir}/src/google/protobuf/compiler/cpp/enum.cc
${protobuf_source_dir}/src/google/protobuf/compiler/cpp/extension.cc
${protobuf_source_dir}/src/google/protobuf/compiler/cpp/field.cc
${protobuf_source_dir}/src/google/protobuf/compiler/cpp/field_generators/cord_field.cc
${protobuf_source_dir}/src/google/protobuf/compiler/cpp/field_generators/enum_field.cc
${protobuf_source_dir}/src/google/protobuf/compiler/cpp/field_generators/map_field.cc
${protobuf_source_dir}/src/google/protobuf/compiler/cpp/field_generators/message_field.cc
${protobuf_source_dir}/src/google/protobuf/compiler/cpp/field_generators/primitive_field.cc
${protobuf_source_dir}/src/google/protobuf/compiler/cpp/field_generators/string_field.cc
${protobuf_source_dir}/src/google/protobuf/compiler/cpp/file.cc
${protobuf_source_dir}/src/google/protobuf/compiler/cpp/generator.cc
${protobuf_source_dir}/src/google/protobuf/compiler/cpp/helpers.cc
${protobuf_source_dir}/src/google/protobuf/compiler/cpp/message.cc
${protobuf_source_dir}/src/google/protobuf/compiler/cpp/padding_optimizer.cc
${protobuf_source_dir}/src/google/protobuf/compiler/cpp/parse_function_generator.cc
${protobuf_source_dir}/src/google/protobuf/compiler/cpp/service.cc
${protobuf_source_dir}/src/google/protobuf/compiler/cpp/tracker.cc
${protobuf_source_dir}/src/google/protobuf/compiler/csharp/csharp_doc_comment.cc
${protobuf_source_dir}/src/google/protobuf/compiler/csharp/csharp_enum.cc
${protobuf_source_dir}/src/google/protobuf/compiler/csharp/csharp_enum_field.cc
@ -174,60 +247,78 @@ set(libprotoc_files
${protobuf_source_dir}/src/google/protobuf/compiler/csharp/csharp_repeated_primitive_field.cc
${protobuf_source_dir}/src/google/protobuf/compiler/csharp/csharp_source_generator_base.cc
${protobuf_source_dir}/src/google/protobuf/compiler/csharp/csharp_wrapper_field.cc
${protobuf_source_dir}/src/google/protobuf/compiler/java/java_context.cc
${protobuf_source_dir}/src/google/protobuf/compiler/java/java_doc_comment.cc
${protobuf_source_dir}/src/google/protobuf/compiler/java/java_enum.cc
${protobuf_source_dir}/src/google/protobuf/compiler/java/java_enum_field.cc
${protobuf_source_dir}/src/google/protobuf/compiler/java/java_enum_field_lite.cc
${protobuf_source_dir}/src/google/protobuf/compiler/java/java_enum_lite.cc
${protobuf_source_dir}/src/google/protobuf/compiler/java/java_extension.cc
${protobuf_source_dir}/src/google/protobuf/compiler/java/java_extension_lite.cc
${protobuf_source_dir}/src/google/protobuf/compiler/java/java_field.cc
${protobuf_source_dir}/src/google/protobuf/compiler/java/java_file.cc
${protobuf_source_dir}/src/google/protobuf/compiler/java/java_generator.cc
${protobuf_source_dir}/src/google/protobuf/compiler/java/java_generator_factory.cc
${protobuf_source_dir}/src/google/protobuf/compiler/java/java_helpers.cc
${protobuf_source_dir}/src/google/protobuf/compiler/java/java_kotlin_generator.cc
${protobuf_source_dir}/src/google/protobuf/compiler/java/java_map_field.cc
${protobuf_source_dir}/src/google/protobuf/compiler/java/java_map_field_lite.cc
${protobuf_source_dir}/src/google/protobuf/compiler/java/java_message.cc
${protobuf_source_dir}/src/google/protobuf/compiler/java/java_message_builder.cc
${protobuf_source_dir}/src/google/protobuf/compiler/java/java_message_builder_lite.cc
${protobuf_source_dir}/src/google/protobuf/compiler/java/java_message_field.cc
${protobuf_source_dir}/src/google/protobuf/compiler/java/java_message_field_lite.cc
${protobuf_source_dir}/src/google/protobuf/compiler/java/java_message_lite.cc
${protobuf_source_dir}/src/google/protobuf/compiler/java/java_name_resolver.cc
${protobuf_source_dir}/src/google/protobuf/compiler/java/java_primitive_field.cc
${protobuf_source_dir}/src/google/protobuf/compiler/java/java_primitive_field_lite.cc
${protobuf_source_dir}/src/google/protobuf/compiler/java/java_service.cc
${protobuf_source_dir}/src/google/protobuf/compiler/java/java_shared_code_generator.cc
${protobuf_source_dir}/src/google/protobuf/compiler/java/java_string_field.cc
${protobuf_source_dir}/src/google/protobuf/compiler/java/java_string_field_lite.cc
${protobuf_source_dir}/src/google/protobuf/compiler/js/js_generator.cc
${protobuf_source_dir}/src/google/protobuf/compiler/js/well_known_types_embed.cc
${protobuf_source_dir}/src/google/protobuf/compiler/objectivec/objectivec_enum.cc
${protobuf_source_dir}/src/google/protobuf/compiler/objectivec/objectivec_enum_field.cc
${protobuf_source_dir}/src/google/protobuf/compiler/objectivec/objectivec_extension.cc
${protobuf_source_dir}/src/google/protobuf/compiler/objectivec/objectivec_field.cc
${protobuf_source_dir}/src/google/protobuf/compiler/objectivec/objectivec_file.cc
${protobuf_source_dir}/src/google/protobuf/compiler/objectivec/objectivec_generator.cc
${protobuf_source_dir}/src/google/protobuf/compiler/objectivec/objectivec_helpers.cc
${protobuf_source_dir}/src/google/protobuf/compiler/objectivec/objectivec_map_field.cc
${protobuf_source_dir}/src/google/protobuf/compiler/objectivec/objectivec_message.cc
${protobuf_source_dir}/src/google/protobuf/compiler/objectivec/objectivec_message_field.cc
${protobuf_source_dir}/src/google/protobuf/compiler/objectivec/objectivec_oneof.cc
${protobuf_source_dir}/src/google/protobuf/compiler/objectivec/objectivec_primitive_field.cc
${protobuf_source_dir}/src/google/protobuf/compiler/csharp/names.cc
${protobuf_source_dir}/src/google/protobuf/compiler/java/context.cc
${protobuf_source_dir}/src/google/protobuf/compiler/java/doc_comment.cc
${protobuf_source_dir}/src/google/protobuf/compiler/java/enum.cc
${protobuf_source_dir}/src/google/protobuf/compiler/java/enum_field.cc
${protobuf_source_dir}/src/google/protobuf/compiler/java/enum_field_lite.cc
${protobuf_source_dir}/src/google/protobuf/compiler/java/enum_lite.cc
${protobuf_source_dir}/src/google/protobuf/compiler/java/extension.cc
${protobuf_source_dir}/src/google/protobuf/compiler/java/extension_lite.cc
${protobuf_source_dir}/src/google/protobuf/compiler/java/field.cc
${protobuf_source_dir}/src/google/protobuf/compiler/java/file.cc
${protobuf_source_dir}/src/google/protobuf/compiler/java/generator.cc
${protobuf_source_dir}/src/google/protobuf/compiler/java/generator_factory.cc
${protobuf_source_dir}/src/google/protobuf/compiler/java/helpers.cc
${protobuf_source_dir}/src/google/protobuf/compiler/java/kotlin_generator.cc
${protobuf_source_dir}/src/google/protobuf/compiler/java/map_field.cc
${protobuf_source_dir}/src/google/protobuf/compiler/java/map_field_lite.cc
${protobuf_source_dir}/src/google/protobuf/compiler/java/message.cc
${protobuf_source_dir}/src/google/protobuf/compiler/java/message_builder.cc
${protobuf_source_dir}/src/google/protobuf/compiler/java/message_builder_lite.cc
${protobuf_source_dir}/src/google/protobuf/compiler/java/message_field.cc
${protobuf_source_dir}/src/google/protobuf/compiler/java/message_field_lite.cc
${protobuf_source_dir}/src/google/protobuf/compiler/java/message_lite.cc
${protobuf_source_dir}/src/google/protobuf/compiler/java/message_serialization.cc
${protobuf_source_dir}/src/google/protobuf/compiler/java/name_resolver.cc
${protobuf_source_dir}/src/google/protobuf/compiler/java/names.cc
${protobuf_source_dir}/src/google/protobuf/compiler/java/primitive_field.cc
${protobuf_source_dir}/src/google/protobuf/compiler/java/primitive_field_lite.cc
${protobuf_source_dir}/src/google/protobuf/compiler/java/service.cc
${protobuf_source_dir}/src/google/protobuf/compiler/java/shared_code_generator.cc
${protobuf_source_dir}/src/google/protobuf/compiler/java/string_field.cc
${protobuf_source_dir}/src/google/protobuf/compiler/java/string_field_lite.cc
${protobuf_source_dir}/src/google/protobuf/compiler/objectivec/enum.cc
${protobuf_source_dir}/src/google/protobuf/compiler/objectivec/enum_field.cc
${protobuf_source_dir}/src/google/protobuf/compiler/objectivec/extension.cc
${protobuf_source_dir}/src/google/protobuf/compiler/objectivec/field.cc
${protobuf_source_dir}/src/google/protobuf/compiler/objectivec/file.cc
${protobuf_source_dir}/src/google/protobuf/compiler/objectivec/generator.cc
${protobuf_source_dir}/src/google/protobuf/compiler/objectivec/helpers.cc
${protobuf_source_dir}/src/google/protobuf/compiler/objectivec/import_writer.cc
${protobuf_source_dir}/src/google/protobuf/compiler/objectivec/line_consumer.cc
${protobuf_source_dir}/src/google/protobuf/compiler/objectivec/map_field.cc
${protobuf_source_dir}/src/google/protobuf/compiler/objectivec/message.cc
${protobuf_source_dir}/src/google/protobuf/compiler/objectivec/message_field.cc
${protobuf_source_dir}/src/google/protobuf/compiler/objectivec/names.cc
${protobuf_source_dir}/src/google/protobuf/compiler/objectivec/oneof.cc
${protobuf_source_dir}/src/google/protobuf/compiler/objectivec/primitive_field.cc
${protobuf_source_dir}/src/google/protobuf/compiler/objectivec/text_format_decode_data.cc
${protobuf_source_dir}/src/google/protobuf/compiler/php/names.cc
${protobuf_source_dir}/src/google/protobuf/compiler/php/php_generator.cc
${protobuf_source_dir}/src/google/protobuf/compiler/plugin.cc
${protobuf_source_dir}/src/google/protobuf/compiler/plugin.pb.cc
${protobuf_source_dir}/src/google/protobuf/compiler/python/python_generator.cc
${protobuf_source_dir}/src/google/protobuf/compiler/python/generator.cc
${protobuf_source_dir}/src/google/protobuf/compiler/python/helpers.cc
${protobuf_source_dir}/src/google/protobuf/compiler/python/pyi_generator.cc
${protobuf_source_dir}/src/google/protobuf/compiler/retention.cc
${protobuf_source_dir}/src/google/protobuf/compiler/ruby/ruby_generator.cc
${protobuf_source_dir}/src/google/protobuf/compiler/rust/accessors/accessors.cc
${protobuf_source_dir}/src/google/protobuf/compiler/rust/accessors/singular_bytes.cc
${protobuf_source_dir}/src/google/protobuf/compiler/rust/accessors/singular_scalar.cc
${protobuf_source_dir}/src/google/protobuf/compiler/rust/context.cc
${protobuf_source_dir}/src/google/protobuf/compiler/rust/generator.cc
${protobuf_source_dir}/src/google/protobuf/compiler/rust/message.cc
${protobuf_source_dir}/src/google/protobuf/compiler/rust/naming.cc
${protobuf_source_dir}/src/google/protobuf/compiler/subprocess.cc
${protobuf_source_dir}/src/google/protobuf/compiler/zip_writer.cc
)
add_library(_libprotoc ${libprotoc_files})
target_link_libraries(_libprotoc _libprotobuf)
target_link_libraries(_libprotoc
_libprotobuf
${protobuf_absl_used_targets})
add_library(protobuf::libprotoc ALIAS _libprotoc)
set(protoc_files ${protobuf_source_dir}/src/google/protobuf/compiler/main.cc)
@ -236,7 +327,11 @@ if (CMAKE_HOST_SYSTEM_NAME STREQUAL CMAKE_SYSTEM_NAME
AND CMAKE_HOST_SYSTEM_PROCESSOR STREQUAL CMAKE_SYSTEM_PROCESSOR)
add_executable(protoc ${protoc_files})
target_link_libraries(protoc _libprotoc _libprotobuf pthread)
target_link_libraries(protoc _libprotoc
_libprotobuf
pthread
utf8_validity
${protobuf_absl_used_targets})
add_executable(protobuf::protoc ALIAS protoc)
if (ENABLE_FUZZING)
@ -256,6 +351,8 @@ else ()
# This is quite ugly but I cannot make dependencies work propery.
set(abseil_source_dir "${ClickHouse_SOURCE_DIR}/contrib/abseil-cpp")
execute_process(
COMMAND mkdir -p ${PROTOC_BUILD_DIR}
COMMAND_ECHO STDOUT)
@ -270,7 +367,9 @@ else ()
"-Dprotobuf_BUILD_CONFORMANCE=0"
"-Dprotobuf_BUILD_EXAMPLES=0"
"-Dprotobuf_BUILD_PROTOC_BINARIES=1"
"${protobuf_source_dir}/cmake"
"-DABSL_ROOT_DIR=${abseil_source_dir}"
"-DABSL_ENABLE_INSTALL=0"
"${protobuf_source_dir}"
WORKING_DIRECTORY "${PROTOC_BUILD_DIR}"
COMMAND_ECHO STDOUT)
@ -279,38 +378,6 @@ else ()
COMMAND_ECHO STDOUT)
endif ()
# add_custom_command (
# OUTPUT ${PROTOC_BUILD_DIR}
# COMMAND mkdir -p ${PROTOC_BUILD_DIR})
#
# add_custom_command (
# OUTPUT "${PROTOC_BUILD_DIR}/CMakeCache.txt"
#
# COMMAND ${CMAKE_COMMAND}
# -G"${CMAKE_GENERATOR}"
# -DCMAKE_MAKE_PROGRAM="${CMAKE_MAKE_PROGRAM}"
# -DCMAKE_C_COMPILER="${CMAKE_C_COMPILER}"
# -DCMAKE_CXX_COMPILER="${CMAKE_CXX_COMPILER}"
# -Dprotobuf_BUILD_TESTS=0
# -Dprotobuf_BUILD_CONFORMANCE=0
# -Dprotobuf_BUILD_EXAMPLES=0
# -Dprotobuf_BUILD_PROTOC_BINARIES=1
# "${protobuf_source_dir}/cmake"
#
# DEPENDS "${PROTOC_BUILD_DIR}"
# WORKING_DIRECTORY "${PROTOC_BUILD_DIR}"
# COMMENT "Configuring 'protoc' for host architecture."
# USES_TERMINAL)
#
# add_custom_command (
# OUTPUT "${PROTOC_BUILD_DIR}/protoc"
# COMMAND ${CMAKE_COMMAND} --build "${PROTOC_BUILD_DIR}"
# DEPENDS "${PROTOC_BUILD_DIR}/CMakeCache.txt"
# COMMENT "Building 'protoc' for host architecture."
# USES_TERMINAL)
#
# add_custom_target (protoc-host DEPENDS "${PROTOC_BUILD_DIR}/protoc")
add_executable(protoc IMPORTED GLOBAL)
set_target_properties (protoc PROPERTIES IMPORTED_LOCATION "${PROTOC_BUILD_DIR}/protoc")
add_dependencies(protoc "${PROTOC_BUILD_DIR}/protoc")

2
contrib/googletest vendored

@ -1 +1 @@
Subproject commit 71140c3ca7a87bb1b5b9c9f1500fea8858cce344
Subproject commit e47544ad31cb3ceecd04cc13e8fe556f8df9fe0b

View File

@ -6,20 +6,14 @@ target_compile_definitions (_gtest PUBLIC GTEST_HAS_POSIX_RE=0)
target_include_directories(_gtest SYSTEM PUBLIC "${SRC_DIR}/googletest/include")
target_include_directories(_gtest PRIVATE "${SRC_DIR}/googletest")
add_library(_gtest_main "${SRC_DIR}/googletest/src/gtest_main.cc")
set_target_properties(_gtest_main PROPERTIES VERSION "1.0.0")
target_link_libraries(_gtest_main PUBLIC _gtest)
add_library(_gtest_all INTERFACE)
target_link_libraries(_gtest_all INTERFACE _gtest _gtest_main)
add_library(ch_contrib::gtest_all ALIAS _gtest_all)
add_library(ch_contrib::gtest ALIAS _gtest)
add_library(_gmock "${SRC_DIR}/googlemock/src/gmock-all.cc")
set_target_properties(_gmock PROPERTIES VERSION "1.0.0")
target_compile_definitions (_gmock PUBLIC GTEST_HAS_POSIX_RE=0)
target_include_directories(_gmock SYSTEM PUBLIC "${SRC_DIR}/googlemock/include" "${SRC_DIR}/googletest/include")
target_include_directories(_gmock PRIVATE "${SRC_DIR}/googlemock")
target_link_libraries(_gmock PUBLIC _gtest)
add_library(_gmock_main "${SRC_DIR}/googlemock/src/gmock_main.cc")
set_target_properties(_gmock_main PROPERTIES VERSION "1.0.0")

2
contrib/grpc vendored

@ -1 +1 @@
Subproject commit 3f975ecab377cd5f739af780566596128f17bb74
Subproject commit 740e3dfd97301a52ad8165b65285bcc149d9e817

View File

@ -3,4 +3,4 @@ It allows to integrate JEMalloc into CMake project.
- Remove JEMALLOC_HAVE_ATTR_FORMAT_GNU_PRINTF because it's non standard.
- Added JEMALLOC_CONFIG_MALLOC_CONF substitution
- Add musl support (USE_MUSL)
- Also note, that darwin build requires JEMALLOC_PREFIX, while others don not
- Also note, that darwin build requires JEMALLOC_PREFIX, while others do not

View File

@ -1,5 +1,3 @@
include(CheckCXXCompilerFlag)
set(LIBCXX_SOURCE_DIR "${ClickHouse_SOURCE_DIR}/contrib/llvm-project/libcxx")
set(SRCS

2
contrib/libhdfs3 vendored

@ -1 +1 @@
Subproject commit 377220ef351ae24994a5fcd2b5fa3930d00c4db0
Subproject commit bdcb91354b1c05b21e73043a112a6f1e3b013497

View File

@ -1,4 +1,4 @@
if(NOT OS_FREEBSD AND NOT APPLE AND NOT ARCH_PPC64LE AND NOT ARCH_S390X)
if(NOT OS_FREEBSD AND NOT APPLE AND NOT ARCH_PPC64LE)
option(ENABLE_HDFS "Enable HDFS" ${ENABLE_LIBRARIES})
elseif(ENABLE_HDFS)
message (${RECONFIGURE_MESSAGE_LEVEL} "Cannot use HDFS3 with current configuration")

1
contrib/libssh vendored Submodule

@ -0,0 +1 @@
Subproject commit 2c76332ef56d90f55965ab24da6b6dbcbef29c4c

View File

@ -0,0 +1,69 @@
option (ENABLE_SSH "Enable support for SSH keys and protocol" ON)
if (NOT ENABLE_SSH)
message(STATUS "Not using SSH")
return()
endif()
set(LIB_SOURCE_DIR "${ClickHouse_SOURCE_DIR}/contrib/libssh")
set(LIB_BINARY_DIR "${ClickHouse_BINARY_DIR}/contrib/libssh")
project(libssh VERSION 0.9.7 LANGUAGES C)
# global needed variable
set(APPLICATION_NAME ${PROJECT_NAME})
# SOVERSION scheme: CURRENT.AGE.REVISION
# If there was an incompatible interface change:
# Increment CURRENT. Set AGE and REVISION to 0
# If there was a compatible interface change:
# Increment AGE. Set REVISION to 0
# If the source code was changed, but there were no interface changes:
# Increment REVISION.
set(LIBRARY_VERSION "4.8.7")
set(LIBRARY_SOVERSION "4")
# Copy library files to a lib sub-directory
set(CMAKE_LIBRARY_OUTPUT_DIRECTORY "${LIB_BINARY_DIR}/lib")
set(CMAKE_THREAD_PREFER_PTHREADS ON)
set(THREADS_PREFER_PTHREAD_FLAG ON)
set(WITH_ZLIB OFF)
set(WITH_SYMBOL_VERSIONING OFF)
set(WITH_SERVER ON)
include(IncludeSources.cmake)
if (OS_LINUX)
if (ARCH_AMD64)
if (USE_MUSL)
target_include_directories(_ssh PRIVATE "${ClickHouse_SOURCE_DIR}/contrib/libssh-cmake/linux/x86-64-musl")
else()
target_include_directories(_ssh PRIVATE "${ClickHouse_SOURCE_DIR}/contrib/libssh-cmake/linux/x86-64")
endif ()
elseif (ARCH_AARCH64)
if (USE_MUSL)
target_include_directories(_ssh PRIVATE "${ClickHouse_SOURCE_DIR}/contrib/libssh-cmake/linux/aarch64-musl")
else()
target_include_directories(_ssh PRIVATE "${ClickHouse_SOURCE_DIR}/contrib/libssh-cmake/linux/aarch64")
endif ()
elseif (ARCH_PPC64LE)
target_include_directories(_ssh PRIVATE "${ClickHouse_SOURCE_DIR}/contrib/libssh-cmake/linux/ppc64le")
elseif (ARCH_S390X)
target_include_directories(_ssh PRIVATE "${ClickHouse_SOURCE_DIR}/contrib/libssh-cmake/linux/s390x")
elseif (ARCH_RISCV64)
target_include_directories(_ssh PRIVATE "${ClickHouse_SOURCE_DIR}/contrib/libssh-cmake/linux/riscv64")
else ()
message(FATAL_ERROR "Platform is not supported")
endif ()
elseif (OS_DARWIN)
target_include_directories(_ssh PRIVATE "${ClickHouse_SOURCE_DIR}/contrib/libssh-cmake/darwin")
elseif (OS_FREEBSD)
target_include_directories(_ssh PRIVATE "${ClickHouse_SOURCE_DIR}/contrib/libssh-cmake/freebsd")
else ()
message(FATAL_ERROR "Platform is not supported")
endif()
configure_file(${LIB_SOURCE_DIR}/include/libssh/libssh_version.h.cmake
${LIB_BINARY_DIR}/include/libssh/libssh_version.h
@ONLY)

View File

@ -0,0 +1,98 @@
set(LIBSSH_LINK_LIBRARIES
${LIBSSH_LINK_LIBRARIES}
OpenSSL::Crypto
)
set(libssh_SRCS
${LIB_SOURCE_DIR}/src/agent.c
${LIB_SOURCE_DIR}/src/auth.c
${LIB_SOURCE_DIR}/src/base64.c
${LIB_SOURCE_DIR}/src/bignum.c
${LIB_SOURCE_DIR}/src/buffer.c
${LIB_SOURCE_DIR}/src/callbacks.c
${LIB_SOURCE_DIR}/src/channels.c
${LIB_SOURCE_DIR}/src/client.c
${LIB_SOURCE_DIR}/src/config.c
${LIB_SOURCE_DIR}/src/connect.c
${LIB_SOURCE_DIR}/src/connector.c
${LIB_SOURCE_DIR}/src/curve25519.c
${LIB_SOURCE_DIR}/src/dh.c
${LIB_SOURCE_DIR}/src/ecdh.c
${LIB_SOURCE_DIR}/src/error.c
${LIB_SOURCE_DIR}/src/getpass.c
${LIB_SOURCE_DIR}/src/init.c
${LIB_SOURCE_DIR}/src/kdf.c
${LIB_SOURCE_DIR}/src/kex.c
${LIB_SOURCE_DIR}/src/known_hosts.c
${LIB_SOURCE_DIR}/src/knownhosts.c
${LIB_SOURCE_DIR}/src/legacy.c
${LIB_SOURCE_DIR}/src/log.c
${LIB_SOURCE_DIR}/src/match.c
${LIB_SOURCE_DIR}/src/messages.c
${LIB_SOURCE_DIR}/src/misc.c
${LIB_SOURCE_DIR}/src/options.c
${LIB_SOURCE_DIR}/src/packet.c
${LIB_SOURCE_DIR}/src/packet_cb.c
${LIB_SOURCE_DIR}/src/packet_crypt.c
${LIB_SOURCE_DIR}/src/pcap.c
${LIB_SOURCE_DIR}/src/pki.c
${LIB_SOURCE_DIR}/src/pki_container_openssh.c
${LIB_SOURCE_DIR}/src/poll.c
${LIB_SOURCE_DIR}/src/session.c
${LIB_SOURCE_DIR}/src/scp.c
${LIB_SOURCE_DIR}/src/socket.c
${LIB_SOURCE_DIR}/src/string.c
${LIB_SOURCE_DIR}/src/threads.c
${LIB_SOURCE_DIR}/src/wrapper.c
${LIB_SOURCE_DIR}/src/external/bcrypt_pbkdf.c
${LIB_SOURCE_DIR}/src/external/blowfish.c
${LIB_SOURCE_DIR}/src/external/chacha.c
${LIB_SOURCE_DIR}/src/external/poly1305.c
${LIB_SOURCE_DIR}/src/chachapoly.c
${LIB_SOURCE_DIR}/src/config_parser.c
${LIB_SOURCE_DIR}/src/token.c
${LIB_SOURCE_DIR}/src/pki_ed25519_common.c
)
set(libssh_SRCS
${libssh_SRCS}
${LIB_SOURCE_DIR}/src/threads/noop.c
${LIB_SOURCE_DIR}/src/threads/pthread.c
)
# LIBCRYPT specific
set(libssh_SRCS
${libssh_SRCS}
${LIB_SOURCE_DIR}/src/threads/libcrypto.c
${LIB_SOURCE_DIR}/src/pki_crypto.c
${LIB_SOURCE_DIR}/src/ecdh_crypto.c
${LIB_SOURCE_DIR}/src/libcrypto.c
${LIB_SOURCE_DIR}/src/dh_crypto.c
)
if (NOT (ENABLE_OPENSSL OR ENABLE_OPENSSL_DYNAMIC))
add_compile_definitions(USE_BORINGSSL=1)
endif()
set(libssh_SRCS
${libssh_SRCS}
${LIB_SOURCE_DIR}/src/options.c
${LIB_SOURCE_DIR}/src/server.c
${LIB_SOURCE_DIR}/src/bind.c
${LIB_SOURCE_DIR}/src/bind_config.c
)
add_library(_ssh STATIC ${libssh_SRCS})
target_include_directories(_ssh PRIVATE ${LIB_BINARY_DIR})
target_include_directories(_ssh PUBLIC "${LIB_SOURCE_DIR}/include" "${LIB_BINARY_DIR}/include")
target_link_libraries(_ssh
PRIVATE ${LIBSSH_LINK_LIBRARIES})
add_library(ch_contrib::ssh ALIAS _ssh)
target_compile_options(_ssh
PRIVATE
${DEFAULT_C_COMPILE_FLAGS}
-D_GNU_SOURCE)

View File

@ -0,0 +1,287 @@
/* Name of package */
#define PACKAGE "libssh"
/* Version number of package */
#define VERSION "0.9.7"
#define SYSCONFDIR "etc"
#define BINARYDIR "/home/ubuntu/workdir/ClickHouse/build/darwin"
#define SOURCEDIR "/home/ubuntu/workdir/ClickHouse"
/* Global bind configuration file path */
#define GLOBAL_BIND_CONFIG "/etc/ssh/libssh_server_config"
/* Global client configuration file path */
#define GLOBAL_CLIENT_CONFIG "/etc/ssh/ssh_config"
/************************** HEADER FILES *************************/
/* Define to 1 if you have the <argp.h> header file. */
/* #undef HAVE_ARGP_H */
/* Define to 1 if you have the <aprpa/inet.h> header file. */
#define HAVE_ARPA_INET_H 1
/* Define to 1 if you have the <glob.h> header file. */
#define HAVE_GLOB_H 1
/* Define to 1 if you have the <valgrind/valgrind.h> header file. */
/* #undef HAVE_VALGRIND_VALGRIND_H */
/* Define to 1 if you have the <pty.h> header file. */
/* #undef HAVE_PTY_H */
/* Define to 1 if you have the <utmp.h> header file. */
#define HAVE_UTMP_H 1
/* Define to 1 if you have the <util.h> header file. */
#define HAVE_UTIL_H 1
/* Define to 1 if you have the <libutil.h> header file. */
/* #undef HAVE_LIBUTIL_H */
/* Define to 1 if you have the <sys/time.h> header file. */
#define HAVE_SYS_TIME_H 1
/* Define to 1 if you have the <sys/utime.h> header file. */
/* #undef HAVE_SYS_UTIME_H */
/* Define to 1 if you have the <io.h> header file. */
/* #undef HAVE_IO_H */
/* Define to 1 if you have the <termios.h> header file. */
#define HAVE_TERMIOS_H 1
/* Define to 1 if you have the <unistd.h> header file. */
#define HAVE_UNISTD_H 1
/* Define to 1 if you have the <stdint.h> header file. */
#define HAVE_STDINT_H 1
/* Define to 1 if you have the <openssl/aes.h> header file. */
#define HAVE_OPENSSL_AES_H 1
/* Define to 1 if you have the <wspiapi.h> header file. */
/* #undef HAVE_WSPIAPI_H */
/* Define to 1 if you have the <openssl/blowfish.h> header file. */
/* #undef HAVE_OPENSSL_BLOWFISH_H */
/* Define to 1 if you have the <openssl/des.h> header file. */
#define HAVE_OPENSSL_DES_H 1
/* Define to 1 if you have the <openssl/ecdh.h> header file. */
#define HAVE_OPENSSL_ECDH_H 1
/* Define to 1 if you have the <openssl/ec.h> header file. */
#define HAVE_OPENSSL_EC_H 1
/* Define to 1 if you have the <openssl/ecdsa.h> header file. */
#define HAVE_OPENSSL_ECDSA_H 1
/* Define to 1 if you have the <pthread.h> header file. */
#define HAVE_PTHREAD_H 1
/* Define to 1 if you have eliptic curve cryptography in openssl */
#define HAVE_OPENSSL_ECC 1
/* Define to 1 if you have eliptic curve cryptography in gcrypt */
/* #undef HAVE_GCRYPT_ECC */
/* Define to 1 if you have eliptic curve cryptography */
#define HAVE_ECC 1
/* Define to 1 if you have DSA */
/* #undef HAVE_DSA */
/* Define to 1 if you have gl_flags as a glob_t sturct member */
#define HAVE_GLOB_GL_FLAGS_MEMBER 1
/* Define to 1 if you have OpenSSL with Ed25519 support */
#define HAVE_OPENSSL_ED25519 1
/* Define to 1 if you have OpenSSL with X25519 support */
#define HAVE_OPENSSL_X25519 1
/*************************** FUNCTIONS ***************************/
/* Define to 1 if you have the `EVP_aes128_ctr' function. */
#define HAVE_OPENSSL_EVP_AES_CTR 1
/* Define to 1 if you have the `EVP_aes128_cbc' function. */
#define HAVE_OPENSSL_EVP_AES_CBC 1
/* Define to 1 if you have the `EVP_aes128_gcm' function. */
/* #undef HAVE_OPENSSL_EVP_AES_GCM */
/* Define to 1 if you have the `CRYPTO_THREADID_set_callback' function. */
#define HAVE_OPENSSL_CRYPTO_THREADID_SET_CALLBACK 1
/* Define to 1 if you have the `CRYPTO_ctr128_encrypt' function. */
#define HAVE_OPENSSL_CRYPTO_CTR128_ENCRYPT 1
/* Define to 1 if you have the `EVP_CIPHER_CTX_new' function. */
#define HAVE_OPENSSL_EVP_CIPHER_CTX_NEW 1
/* Define to 1 if you have the `EVP_KDF_CTX_new_id' function. */
/* #undef HAVE_OPENSSL_EVP_KDF_CTX_NEW_ID */
/* Define to 1 if you have the `FIPS_mode' function. */
#if USE_BORINGSSL
#define HAVE_OPENSSL_FIPS_MODE 1
#endif
/* Define to 1 if you have the `EVP_DigestSign' function. */
#define HAVE_OPENSSL_EVP_DIGESTSIGN 1
/* Define to 1 if you have the `EVP_DigestVerify' function. */
#define HAVE_OPENSSL_EVP_DIGESTVERIFY 1
/* Define to 1 if you have the `OPENSSL_ia32cap_loc' function. */
/* #undef HAVE_OPENSSL_IA32CAP_LOC */
/* Define to 1 if you have the `snprintf' function. */
#define HAVE_SNPRINTF 1
/* Define to 1 if you have the `_snprintf' function. */
/* #undef HAVE__SNPRINTF */
/* Define to 1 if you have the `_snprintf_s' function. */
/* #undef HAVE__SNPRINTF_S */
/* Define to 1 if you have the `vsnprintf' function. */
#define HAVE_VSNPRINTF 1
/* Define to 1 if you have the `_vsnprintf' function. */
/* #undef HAVE__VSNPRINTF */
/* Define to 1 if you have the `_vsnprintf_s' function. */
/* #undef HAVE__VSNPRINTF_S */
/* Define to 1 if you have the `isblank' function. */
#define HAVE_ISBLANK 1
/* Define to 1 if you have the `strncpy' function. */
#define HAVE_STRNCPY 1
/* Define to 1 if you have the `strndup' function. */
#define HAVE_STRNDUP 1
/* Define to 1 if you have the `cfmakeraw' function. */
/* #undef HAVE_CFMAKERAW */
/* Define to 1 if you have the `getaddrinfo' function. */
#define HAVE_GETADDRINFO 1
/* Define to 1 if you have the `poll' function. */
#define HAVE_POLL 1
/* Define to 1 if you have the `select' function. */
#define HAVE_SELECT 1
/* Define to 1 if you have the `clock_gettime' function. */
/* #undef HAVE_CLOCK_GETTIME */
/* Define to 1 if you have the `ntohll' function. */
#define HAVE_NTOHLL 1
/* Define to 1 if you have the `htonll' function. */
#define HAVE_HTONLL 1
/* Define to 1 if you have the `strtoull' function. */
#define HAVE_STRTOULL 1
/* Define to 1 if you have the `__strtoull' function. */
/* #undef HAVE___STRTOULL */
/* Define to 1 if you have the `_strtoui64' function. */
/* #undef HAVE__STRTOUI64 */
/* Define to 1 if you have the `glob' function. */
#define HAVE_GLOB 1
/* Define to 1 if you have the `explicit_bzero' function. */
/* #undef HAVE_EXPLICIT_BZERO */
/* Define to 1 if you have the `memset_s' function. */
#define HAVE_MEMSET_S 1
/* Define to 1 if you have the `SecureZeroMemory' function. */
/* #undef HAVE_SECURE_ZERO_MEMORY */
/* Define to 1 if you have the `cmocka_set_test_filter' function. */
/* #undef HAVE_CMOCKA_SET_TEST_FILTER */
/*************************** LIBRARIES ***************************/
/* Define to 1 if you have the `crypto' library (-lcrypto). */
#define HAVE_LIBCRYPTO 1
/* Define to 1 if you have the `gcrypt' library (-lgcrypt). */
/* #undef HAVE_LIBGCRYPT */
/* Define to 1 if you have the 'mbedTLS' library (-lmbedtls). */
/* #undef HAVE_LIBMBEDCRYPTO */
/* Define to 1 if you have the `pthread' library (-lpthread). */
#define HAVE_PTHREAD 1
/* Define to 1 if you have the `cmocka' library (-lcmocka). */
/* #undef HAVE_CMOCKA */
/**************************** OPTIONS ****************************/
#define HAVE_GCC_THREAD_LOCAL_STORAGE 1
/* #undef HAVE_MSC_THREAD_LOCAL_STORAGE */
#define HAVE_FALLTHROUGH_ATTRIBUTE 1
#define HAVE_UNUSED_ATTRIBUTE 1
#define HAVE_CONSTRUCTOR_ATTRIBUTE 1
#define HAVE_DESTRUCTOR_ATTRIBUTE 1
#define HAVE_GCC_VOLATILE_MEMORY_PROTECTION 1
#define HAVE_COMPILER__FUNC__ 1
#define HAVE_COMPILER__FUNCTION__ 1
/* #undef HAVE_GCC_BOUNDED_ATTRIBUTE */
/* Define to 1 if you want to enable GSSAPI */
/* #undef WITH_GSSAPI */
/* Define to 1 if you want to enable ZLIB */
/* #undef WITH_ZLIB */
/* Define to 1 if you want to enable SFTP */
/* #undef WITH_SFTP */
/* Define to 1 if you want to enable server support */
#define WITH_SERVER 1
/* Define to 1 if you want to enable DH group exchange algorithms */
/* #undef WITH_GEX */
/* Define to 1 if you want to enable blowfish cipher support */
/* #undef WITH_BLOWFISH_CIPHER */
/* Define to 1 if you want to enable debug output for crypto functions */
/* #undef DEBUG_CRYPTO */
/* Define to 1 if you want to enable debug output for packet functions */
/* #undef DEBUG_PACKET */
/* Define to 1 if you want to enable pcap output support (experimental) */
/* #undef WITH_PCAP */
/* Define to 1 if you want to enable calltrace debug output */
/* #undef DEBUG_CALLTRACE */
/* Define to 1 if you want to enable NaCl support */
/* #undef WITH_NACL */
/*************************** ENDIAN *****************************/
/* Define WORDS_BIGENDIAN to 1 if your processor stores words with the most
significant byte first (like Motorola and SPARC, unlike Intel). */
/* #undef WORDS_BIGENDIAN */

View File

@ -0,0 +1,287 @@
/* Name of package */
#define PACKAGE "libssh"
/* Version number of package */
#define VERSION "0.9.7"
#define SYSCONFDIR "etc"
#define BINARYDIR "/home/ubuntu/workdir/ClickHouse/build/freebsd"
#define SOURCEDIR "/home/ubuntu/workdir/ClickHouse"
/* Global bind configuration file path */
#define GLOBAL_BIND_CONFIG "/etc/ssh/libssh_server_config"
/* Global client configuration file path */
#define GLOBAL_CLIENT_CONFIG "/etc/ssh/ssh_config"
/************************** HEADER FILES *************************/
/* Define to 1 if you have the <argp.h> header file. */
/* #undef HAVE_ARGP_H */
/* Define to 1 if you have the <aprpa/inet.h> header file. */
#define HAVE_ARPA_INET_H 1
/* Define to 1 if you have the <glob.h> header file. */
#define HAVE_GLOB_H 1
/* Define to 1 if you have the <valgrind/valgrind.h> header file. */
/* #undef HAVE_VALGRIND_VALGRIND_H */
/* Define to 1 if you have the <pty.h> header file. */
/* #undef HAVE_PTY_H */
/* Define to 1 if you have the <utmp.h> header file. */
/* #undef HAVE_UTMP_H */
/* Define to 1 if you have the <util.h> header file. */
/* #undef HAVE_UTIL_H */
/* Define to 1 if you have the <libutil.h> header file. */
/* #undef HAVE_LIBUTIL_H */
/* Define to 1 if you have the <sys/time.h> header file. */
#define HAVE_SYS_TIME_H 1
/* Define to 1 if you have the <sys/utime.h> header file. */
/* #undef HAVE_SYS_UTIME_H */
/* Define to 1 if you have the <io.h> header file. */
/* #undef HAVE_IO_H */
/* Define to 1 if you have the <termios.h> header file. */
#define HAVE_TERMIOS_H 1
/* Define to 1 if you have the <unistd.h> header file. */
#define HAVE_UNISTD_H 1
/* Define to 1 if you have the <stdint.h> header file. */
#define HAVE_STDINT_H 1
/* Define to 1 if you have the <openssl/aes.h> header file. */
#define HAVE_OPENSSL_AES_H 1
/* Define to 1 if you have the <wspiapi.h> header file. */
/* #undef HAVE_WSPIAPI_H */
/* Define to 1 if you have the <openssl/blowfish.h> header file. */
/* #undef HAVE_OPENSSL_BLOWFISH_H */
/* Define to 1 if you have the <openssl/des.h> header file. */
#define HAVE_OPENSSL_DES_H 1
/* Define to 1 if you have the <openssl/ecdh.h> header file. */
#define HAVE_OPENSSL_ECDH_H 1
/* Define to 1 if you have the <openssl/ec.h> header file. */
#define HAVE_OPENSSL_EC_H 1
/* Define to 1 if you have the <openssl/ecdsa.h> header file. */
#define HAVE_OPENSSL_ECDSA_H 1
/* Define to 1 if you have the <pthread.h> header file. */
#define HAVE_PTHREAD_H 1
/* Define to 1 if you have eliptic curve cryptography in openssl */
#define HAVE_OPENSSL_ECC 1
/* Define to 1 if you have eliptic curve cryptography in gcrypt */
/* #undef HAVE_GCRYPT_ECC */
/* Define to 1 if you have eliptic curve cryptography */
#define HAVE_ECC 1
/* Define to 1 if you have DSA */
/* #undef HAVE_DSA */
/* Define to 1 if you have gl_flags as a glob_t sturct member */
#define HAVE_GLOB_GL_FLAGS_MEMBER 1
/* Define to 1 if you have OpenSSL with Ed25519 support */
#define HAVE_OPENSSL_ED25519 1
/* Define to 1 if you have OpenSSL with X25519 support */
#define HAVE_OPENSSL_X25519 1
/*************************** FUNCTIONS ***************************/
/* Define to 1 if you have the `EVP_aes128_ctr' function. */
#define HAVE_OPENSSL_EVP_AES_CTR 1
/* Define to 1 if you have the `EVP_aes128_cbc' function. */
#define HAVE_OPENSSL_EVP_AES_CBC 1
/* Define to 1 if you have the `EVP_aes128_gcm' function. */
/* #undef HAVE_OPENSSL_EVP_AES_GCM */
/* Define to 1 if you have the `CRYPTO_THREADID_set_callback' function. */
#define HAVE_OPENSSL_CRYPTO_THREADID_SET_CALLBACK 1
/* Define to 1 if you have the `CRYPTO_ctr128_encrypt' function. */
#define HAVE_OPENSSL_CRYPTO_CTR128_ENCRYPT 1
/* Define to 1 if you have the `EVP_CIPHER_CTX_new' function. */
#define HAVE_OPENSSL_EVP_CIPHER_CTX_NEW 1
/* Define to 1 if you have the `EVP_KDF_CTX_new_id' function. */
/* #undef HAVE_OPENSSL_EVP_KDF_CTX_NEW_ID */
/* Define to 1 if you have the `FIPS_mode' function. */
#if USE_BORINGSSL
#define HAVE_OPENSSL_FIPS_MODE 1
#endif
/* Define to 1 if you have the `EVP_DigestSign' function. */
#define HAVE_OPENSSL_EVP_DIGESTSIGN 1
/* Define to 1 if you have the `EVP_DigestVerify' function. */
#define HAVE_OPENSSL_EVP_DIGESTVERIFY 1
/* Define to 1 if you have the `OPENSSL_ia32cap_loc' function. */
/* #undef HAVE_OPENSSL_IA32CAP_LOC */
/* Define to 1 if you have the `snprintf' function. */
#define HAVE_SNPRINTF 1
/* Define to 1 if you have the `_snprintf' function. */
/* #undef HAVE__SNPRINTF */
/* Define to 1 if you have the `_snprintf_s' function. */
/* #undef HAVE__SNPRINTF_S */
/* Define to 1 if you have the `vsnprintf' function. */
#define HAVE_VSNPRINTF 1
/* Define to 1 if you have the `_vsnprintf' function. */
/* #undef HAVE__VSNPRINTF */
/* Define to 1 if you have the `_vsnprintf_s' function. */
/* #undef HAVE__VSNPRINTF_S */
/* Define to 1 if you have the `isblank' function. */
#define HAVE_ISBLANK 1
/* Define to 1 if you have the `strncpy' function. */
#define HAVE_STRNCPY 1
/* Define to 1 if you have the `strndup' function. */
#define HAVE_STRNDUP 1
/* Define to 1 if you have the `cfmakeraw' function. */
/* #undef HAVE_CFMAKERAW */
/* Define to 1 if you have the `getaddrinfo' function. */
#define HAVE_GETADDRINFO 1
/* Define to 1 if you have the `poll' function. */
#define HAVE_POLL 1
/* Define to 1 if you have the `select' function. */
#define HAVE_SELECT 1
/* Define to 1 if you have the `clock_gettime' function. */
/* #undef HAVE_CLOCK_GETTIME */
/* Define to 1 if you have the `ntohll' function. */
/* #undef HAVE_NTOHLL */
/* Define to 1 if you have the `htonll' function. */
/* #undef HAVE_HTONLL */
/* Define to 1 if you have the `strtoull' function. */
#define HAVE_STRTOULL 1
/* Define to 1 if you have the `__strtoull' function. */
/* #undef HAVE___STRTOULL */
/* Define to 1 if you have the `_strtoui64' function. */
/* #undef HAVE__STRTOUI64 */
/* Define to 1 if you have the `glob' function. */
#define HAVE_GLOB 1
/* Define to 1 if you have the `explicit_bzero' function. */
#define HAVE_EXPLICIT_BZERO 1
/* Define to 1 if you have the `memset_s' function. */
/* #undef HAVE_MEMSET_S */
/* Define to 1 if you have the `SecureZeroMemory' function. */
/* #undef HAVE_SECURE_ZERO_MEMORY */
/* Define to 1 if you have the `cmocka_set_test_filter' function. */
/* #undef HAVE_CMOCKA_SET_TEST_FILTER */
/*************************** LIBRARIES ***************************/
/* Define to 1 if you have the `crypto' library (-lcrypto). */
#define HAVE_LIBCRYPTO 1
/* Define to 1 if you have the `gcrypt' library (-lgcrypt). */
/* #undef HAVE_LIBGCRYPT */
/* Define to 1 if you have the 'mbedTLS' library (-lmbedtls). */
/* #undef HAVE_LIBMBEDCRYPTO */
/* Define to 1 if you have the `pthread' library (-lpthread). */
#define HAVE_PTHREAD 1
/* Define to 1 if you have the `cmocka' library (-lcmocka). */
/* #undef HAVE_CMOCKA */
/**************************** OPTIONS ****************************/
#define HAVE_GCC_THREAD_LOCAL_STORAGE 1
/* #undef HAVE_MSC_THREAD_LOCAL_STORAGE */
#define HAVE_FALLTHROUGH_ATTRIBUTE 1
#define HAVE_UNUSED_ATTRIBUTE 1
#define HAVE_CONSTRUCTOR_ATTRIBUTE 1
#define HAVE_DESTRUCTOR_ATTRIBUTE 1
#define HAVE_GCC_VOLATILE_MEMORY_PROTECTION 1
#define HAVE_COMPILER__FUNC__ 1
#define HAVE_COMPILER__FUNCTION__ 1
/* #undef HAVE_GCC_BOUNDED_ATTRIBUTE */
/* Define to 1 if you want to enable GSSAPI */
/* #undef WITH_GSSAPI */
/* Define to 1 if you want to enable ZLIB */
/* #undef WITH_ZLIB */
/* Define to 1 if you want to enable SFTP */
/* #undef WITH_SFTP */
/* Define to 1 if you want to enable server support */
#define WITH_SERVER 1
/* Define to 1 if you want to enable DH group exchange algorithms */
/* #undef WITH_GEX */
/* Define to 1 if you want to enable blowfish cipher support */
/* #undef WITH_BLOWFISH_CIPHER */
/* Define to 1 if you want to enable debug output for crypto functions */
/* #undef DEBUG_CRYPTO */
/* Define to 1 if you want to enable debug output for packet functions */
/* #undef DEBUG_PACKET */
/* Define to 1 if you want to enable pcap output support (experimental) */
/* #undef WITH_PCAP */
/* Define to 1 if you want to enable calltrace debug output */
/* #undef DEBUG_CALLTRACE */
/* Define to 1 if you want to enable NaCl support */
/* #undef WITH_NACL */
/*************************** ENDIAN *****************************/
/* Define WORDS_BIGENDIAN to 1 if your processor stores words with the most
significant byte first (like Motorola and SPARC, unlike Intel). */
/* #undef WORDS_BIGENDIAN */

View File

@ -0,0 +1,287 @@
/* Name of package */
#define PACKAGE "libssh"
/* Version number of package */
#define VERSION "0.9.7"
#define SYSCONFDIR "etc"
#define BINARYDIR "/home/ubuntu/workdir/ClickHouse/build/aarch64-musl"
#define SOURCEDIR "/home/ubuntu/workdir/ClickHouse"
/* Global bind configuration file path */
#define GLOBAL_BIND_CONFIG "/etc/ssh/libssh_server_config"
/* Global client configuration file path */
#define GLOBAL_CLIENT_CONFIG "/etc/ssh/ssh_config"
/************************** HEADER FILES *************************/
/* Define to 1 if you have the <argp.h> header file. */
/* #undef HAVE_ARGP_H */
/* Define to 1 if you have the <aprpa/inet.h> header file. */
#define HAVE_ARPA_INET_H 1
/* Define to 1 if you have the <glob.h> header file. */
#define HAVE_GLOB_H 1
/* Define to 1 if you have the <valgrind/valgrind.h> header file. */
/* #undef HAVE_VALGRIND_VALGRIND_H */
/* Define to 1 if you have the <pty.h> header file. */
/* #undef HAVE_PTY_H */
/* Define to 1 if you have the <utmp.h> header file. */
/* #undef HAVE_UTMP_H */
/* Define to 1 if you have the <util.h> header file. */
/* #undef HAVE_UTIL_H */
/* Define to 1 if you have the <libutil.h> header file. */
/* #undef HAVE_LIBUTIL_H */
/* Define to 1 if you have the <sys/time.h> header file. */
#define HAVE_SYS_TIME_H 1
/* Define to 1 if you have the <sys/utime.h> header file. */
/* #undef HAVE_SYS_UTIME_H */
/* Define to 1 if you have the <io.h> header file. */
/* #undef HAVE_IO_H */
/* Define to 1 if you have the <termios.h> header file. */
#define HAVE_TERMIOS_H 1
/* Define to 1 if you have the <unistd.h> header file. */
#define HAVE_UNISTD_H 1
/* Define to 1 if you have the <stdint.h> header file. */
#define HAVE_STDINT_H 1
/* Define to 1 if you have the <openssl/aes.h> header file. */
#define HAVE_OPENSSL_AES_H 1
/* Define to 1 if you have the <wspiapi.h> header file. */
/* #undef HAVE_WSPIAPI_H */
/* Define to 1 if you have the <openssl/blowfish.h> header file. */
/* #undef HAVE_OPENSSL_BLOWFISH_H */
/* Define to 1 if you have the <openssl/des.h> header file. */
#define HAVE_OPENSSL_DES_H 1
/* Define to 1 if you have the <openssl/ecdh.h> header file. */
#define HAVE_OPENSSL_ECDH_H 1
/* Define to 1 if you have the <openssl/ec.h> header file. */
#define HAVE_OPENSSL_EC_H 1
/* Define to 1 if you have the <openssl/ecdsa.h> header file. */
#define HAVE_OPENSSL_ECDSA_H 1
/* Define to 1 if you have the <pthread.h> header file. */
#define HAVE_PTHREAD_H 1
/* Define to 1 if you have eliptic curve cryptography in openssl */
#define HAVE_OPENSSL_ECC 1
/* Define to 1 if you have eliptic curve cryptography in gcrypt */
/* #undef HAVE_GCRYPT_ECC */
/* Define to 1 if you have eliptic curve cryptography */
#define HAVE_ECC 1
/* Define to 1 if you have DSA */
/* #undef HAVE_DSA */
/* Define to 1 if you have gl_flags as a glob_t sturct member */
/* #undef HAVE_GLOB_GL_FLAGS_MEMBER
/* Define to 1 if you have OpenSSL with Ed25519 support */
#define HAVE_OPENSSL_ED25519 1
/* Define to 1 if you have OpenSSL with X25519 support */
#define HAVE_OPENSSL_X25519 1
/*************************** FUNCTIONS ***************************/
/* Define to 1 if you have the `EVP_aes128_ctr' function. */
#define HAVE_OPENSSL_EVP_AES_CTR 1
/* Define to 1 if you have the `EVP_aes128_cbc' function. */
#define HAVE_OPENSSL_EVP_AES_CBC 1
/* Define to 1 if you have the `EVP_aes128_gcm' function. */
/* #undef HAVE_OPENSSL_EVP_AES_GCM */
/* Define to 1 if you have the `CRYPTO_THREADID_set_callback' function. */
#define HAVE_OPENSSL_CRYPTO_THREADID_SET_CALLBACK 1
/* Define to 1 if you have the `CRYPTO_ctr128_encrypt' function. */
#define HAVE_OPENSSL_CRYPTO_CTR128_ENCRYPT 1
/* Define to 1 if you have the `EVP_CIPHER_CTX_new' function. */
#define HAVE_OPENSSL_EVP_CIPHER_CTX_NEW 1
/* Define to 1 if you have the `EVP_KDF_CTX_new_id' function. */
/* #undef HAVE_OPENSSL_EVP_KDF_CTX_NEW_ID */
/* Define to 1 if you have the `FIPS_mode' function. */
#if USE_BORINGSSL
#define HAVE_OPENSSL_FIPS_MODE 1
#endif
/* Define to 1 if you have the `EVP_DigestSign' function. */
#define HAVE_OPENSSL_EVP_DIGESTSIGN 1
/* Define to 1 if you have the `EVP_DigestVerify' function. */
#define HAVE_OPENSSL_EVP_DIGESTVERIFY 1
/* Define to 1 if you have the `OPENSSL_ia32cap_loc' function. */
/* #undef HAVE_OPENSSL_IA32CAP_LOC */
/* Define to 1 if you have the `snprintf' function. */
#define HAVE_SNPRINTF 1
/* Define to 1 if you have the `_snprintf' function. */
/* #undef HAVE__SNPRINTF */
/* Define to 1 if you have the `_snprintf_s' function. */
/* #undef HAVE__SNPRINTF_S */
/* Define to 1 if you have the `vsnprintf' function. */
#define HAVE_VSNPRINTF 1
/* Define to 1 if you have the `_vsnprintf' function. */
/* #undef HAVE__VSNPRINTF */
/* Define to 1 if you have the `_vsnprintf_s' function. */
/* #undef HAVE__VSNPRINTF_S */
/* Define to 1 if you have the `isblank' function. */
#define HAVE_ISBLANK 1
/* Define to 1 if you have the `strncpy' function. */
#define HAVE_STRNCPY 1
/* Define to 1 if you have the `strndup' function. */
#define HAVE_STRNDUP 1
/* Define to 1 if you have the `cfmakeraw' function. */
/* #undef HAVE_CFMAKERAW */
/* Define to 1 if you have the `getaddrinfo' function. */
#define HAVE_GETADDRINFO 1
/* Define to 1 if you have the `poll' function. */
#define HAVE_POLL 1
/* Define to 1 if you have the `select' function. */
#define HAVE_SELECT 1
/* Define to 1 if you have the `clock_gettime' function. */
/* #undef HAVE_CLOCK_GETTIME */
/* Define to 1 if you have the `ntohll' function. */
/* #undef HAVE_NTOHLL */
/* Define to 1 if you have the `htonll' function. */
/* #undef HAVE_HTONLL */
/* Define to 1 if you have the `strtoull' function. */
#define HAVE_STRTOULL 1
/* Define to 1 if you have the `__strtoull' function. */
/* #undef HAVE___STRTOULL */
/* Define to 1 if you have the `_strtoui64' function. */
/* #undef HAVE__STRTOUI64 */
/* Define to 1 if you have the `glob' function. */
#define HAVE_GLOB 1
/* Define to 1 if you have the `explicit_bzero' function. */
#define HAVE_EXPLICIT_BZERO 1
/* Define to 1 if you have the `memset_s' function. */
/* #undef HAVE_MEMSET_S */
/* Define to 1 if you have the `SecureZeroMemory' function. */
/* #undef HAVE_SECURE_ZERO_MEMORY */
/* Define to 1 if you have the `cmocka_set_test_filter' function. */
/* #undef HAVE_CMOCKA_SET_TEST_FILTER */
/*************************** LIBRARIES ***************************/
/* Define to 1 if you have the `crypto' library (-lcrypto). */
#define HAVE_LIBCRYPTO 1
/* Define to 1 if you have the `gcrypt' library (-lgcrypt). */
/* #undef HAVE_LIBGCRYPT */
/* Define to 1 if you have the 'mbedTLS' library (-lmbedtls). */
/* #undef HAVE_LIBMBEDCRYPTO */
/* Define to 1 if you have the `pthread' library (-lpthread). */
#define HAVE_PTHREAD 1
/* Define to 1 if you have the `cmocka' library (-lcmocka). */
/* #undef HAVE_CMOCKA */
/**************************** OPTIONS ****************************/
#define HAVE_GCC_THREAD_LOCAL_STORAGE 1
/* #undef HAVE_MSC_THREAD_LOCAL_STORAGE */
#define HAVE_FALLTHROUGH_ATTRIBUTE 1
#define HAVE_UNUSED_ATTRIBUTE 1
#define HAVE_CONSTRUCTOR_ATTRIBUTE 1
#define HAVE_DESTRUCTOR_ATTRIBUTE 1
#define HAVE_GCC_VOLATILE_MEMORY_PROTECTION 1
#define HAVE_COMPILER__FUNC__ 1
#define HAVE_COMPILER__FUNCTION__ 1
/* #undef HAVE_GCC_BOUNDED_ATTRIBUTE */
/* Define to 1 if you want to enable GSSAPI */
/* #undef WITH_GSSAPI */
/* Define to 1 if you want to enable ZLIB */
/* #undef WITH_ZLIB */
/* Define to 1 if you want to enable SFTP */
/* #undef WITH_SFTP */
/* Define to 1 if you want to enable server support */
#define WITH_SERVER 1
/* Define to 1 if you want to enable DH group exchange algorithms */
/* #undef WITH_GEX */
/* Define to 1 if you want to enable blowfish cipher support */
/* #undef WITH_BLOWFISH_CIPHER */
/* Define to 1 if you want to enable debug output for crypto functions */
/* #undef DEBUG_CRYPTO */
/* Define to 1 if you want to enable debug output for packet functions */
/* #undef DEBUG_PACKET */
/* Define to 1 if you want to enable pcap output support (experimental) */
/* #undef WITH_PCAP */
/* Define to 1 if you want to enable calltrace debug output */
/* #undef DEBUG_CALLTRACE */
/* Define to 1 if you want to enable NaCl support */
/* #undef WITH_NACL */
/*************************** ENDIAN *****************************/
/* Define WORDS_BIGENDIAN to 1 if your processor stores words with the most
significant byte first (like Motorola and SPARC, unlike Intel). */
/* #undef WORDS_BIGENDIAN */

View File

@ -0,0 +1,287 @@
/* Name of package */
#define PACKAGE "libssh"
/* Version number of package */
#define VERSION "0.9.7"
#define SYSCONFDIR "etc"
#define BINARYDIR "/home/ubuntu/workdir/ClickHouse/build/aarch64"
#define SOURCEDIR "/home/ubuntu/workdir/ClickHouse"
/* Global bind configuration file path */
#define GLOBAL_BIND_CONFIG "/etc/ssh/libssh_server_config"
/* Global client configuration file path */
#define GLOBAL_CLIENT_CONFIG "/etc/ssh/ssh_config"
/************************** HEADER FILES *************************/
/* Define to 1 if you have the <argp.h> header file. */
/* #undef HAVE_ARGP_H */
/* Define to 1 if you have the <aprpa/inet.h> header file. */
#define HAVE_ARPA_INET_H 1
/* Define to 1 if you have the <glob.h> header file. */
#define HAVE_GLOB_H 1
/* Define to 1 if you have the <valgrind/valgrind.h> header file. */
/* #undef HAVE_VALGRIND_VALGRIND_H */
/* Define to 1 if you have the <pty.h> header file. */
/* #undef HAVE_PTY_H */
/* Define to 1 if you have the <utmp.h> header file. */
/* #undef HAVE_UTMP_H */
/* Define to 1 if you have the <util.h> header file. */
/* #undef HAVE_UTIL_H */
/* Define to 1 if you have the <libutil.h> header file. */
/* #undef HAVE_LIBUTIL_H */
/* Define to 1 if you have the <sys/time.h> header file. */
#define HAVE_SYS_TIME_H 1
/* Define to 1 if you have the <sys/utime.h> header file. */
/* #undef HAVE_SYS_UTIME_H */
/* Define to 1 if you have the <io.h> header file. */
/* #undef HAVE_IO_H */
/* Define to 1 if you have the <termios.h> header file. */
#define HAVE_TERMIOS_H 1
/* Define to 1 if you have the <unistd.h> header file. */
#define HAVE_UNISTD_H 1
/* Define to 1 if you have the <stdint.h> header file. */
#define HAVE_STDINT_H 1
/* Define to 1 if you have the <openssl/aes.h> header file. */
#define HAVE_OPENSSL_AES_H 1
/* Define to 1 if you have the <wspiapi.h> header file. */
/* #undef HAVE_WSPIAPI_H */
/* Define to 1 if you have the <openssl/blowfish.h> header file. */
/* #undef HAVE_OPENSSL_BLOWFISH_H */
/* Define to 1 if you have the <openssl/des.h> header file. */
#define HAVE_OPENSSL_DES_H 1
/* Define to 1 if you have the <openssl/ecdh.h> header file. */
#define HAVE_OPENSSL_ECDH_H 1
/* Define to 1 if you have the <openssl/ec.h> header file. */
#define HAVE_OPENSSL_EC_H 1
/* Define to 1 if you have the <openssl/ecdsa.h> header file. */
#define HAVE_OPENSSL_ECDSA_H 1
/* Define to 1 if you have the <pthread.h> header file. */
#define HAVE_PTHREAD_H 1
/* Define to 1 if you have eliptic curve cryptography in openssl */
#define HAVE_OPENSSL_ECC 1
/* Define to 1 if you have eliptic curve cryptography in gcrypt */
/* #undef HAVE_GCRYPT_ECC */
/* Define to 1 if you have eliptic curve cryptography */
#define HAVE_ECC 1
/* Define to 1 if you have DSA */
/* #undef HAVE_DSA */
/* Define to 1 if you have gl_flags as a glob_t sturct member */
#define HAVE_GLOB_GL_FLAGS_MEMBER 1
/* Define to 1 if you have OpenSSL with Ed25519 support */
#define HAVE_OPENSSL_ED25519 1
/* Define to 1 if you have OpenSSL with X25519 support */
#define HAVE_OPENSSL_X25519 1
/*************************** FUNCTIONS ***************************/
/* Define to 1 if you have the `EVP_aes128_ctr' function. */
#define HAVE_OPENSSL_EVP_AES_CTR 1
/* Define to 1 if you have the `EVP_aes128_cbc' function. */
#define HAVE_OPENSSL_EVP_AES_CBC 1
/* Define to 1 if you have the `EVP_aes128_gcm' function. */
/* #undef HAVE_OPENSSL_EVP_AES_GCM */
/* Define to 1 if you have the `CRYPTO_THREADID_set_callback' function. */
#define HAVE_OPENSSL_CRYPTO_THREADID_SET_CALLBACK 1
/* Define to 1 if you have the `CRYPTO_ctr128_encrypt' function. */
#define HAVE_OPENSSL_CRYPTO_CTR128_ENCRYPT 1
/* Define to 1 if you have the `EVP_CIPHER_CTX_new' function. */
#define HAVE_OPENSSL_EVP_CIPHER_CTX_NEW 1
/* Define to 1 if you have the `EVP_KDF_CTX_new_id' function. */
/* #undef HAVE_OPENSSL_EVP_KDF_CTX_NEW_ID */
/* Define to 1 if you have the `FIPS_mode' function. */
#if USE_BORINGSSL
#define HAVE_OPENSSL_FIPS_MODE 1
#endif
/* Define to 1 if you have the `EVP_DigestSign' function. */
#define HAVE_OPENSSL_EVP_DIGESTSIGN 1
/* Define to 1 if you have the `EVP_DigestVerify' function. */
#define HAVE_OPENSSL_EVP_DIGESTVERIFY 1
/* Define to 1 if you have the `OPENSSL_ia32cap_loc' function. */
/* #undef HAVE_OPENSSL_IA32CAP_LOC */
/* Define to 1 if you have the `snprintf' function. */
#define HAVE_SNPRINTF 1
/* Define to 1 if you have the `_snprintf' function. */
/* #undef HAVE__SNPRINTF */
/* Define to 1 if you have the `_snprintf_s' function. */
/* #undef HAVE__SNPRINTF_S */
/* Define to 1 if you have the `vsnprintf' function. */
#define HAVE_VSNPRINTF 1
/* Define to 1 if you have the `_vsnprintf' function. */
/* #undef HAVE__VSNPRINTF */
/* Define to 1 if you have the `_vsnprintf_s' function. */
/* #undef HAVE__VSNPRINTF_S */
/* Define to 1 if you have the `isblank' function. */
#define HAVE_ISBLANK 1
/* Define to 1 if you have the `strncpy' function. */
#define HAVE_STRNCPY 1
/* Define to 1 if you have the `strndup' function. */
#define HAVE_STRNDUP 1
/* Define to 1 if you have the `cfmakeraw' function. */
/* #undef HAVE_CFMAKERAW */
/* Define to 1 if you have the `getaddrinfo' function. */
#define HAVE_GETADDRINFO 1
/* Define to 1 if you have the `poll' function. */
#define HAVE_POLL 1
/* Define to 1 if you have the `select' function. */
#define HAVE_SELECT 1
/* Define to 1 if you have the `clock_gettime' function. */
/* #undef HAVE_CLOCK_GETTIME */
/* Define to 1 if you have the `ntohll' function. */
/* #undef HAVE_NTOHLL */
/* Define to 1 if you have the `htonll' function. */
/* #undef HAVE_HTONLL */
/* Define to 1 if you have the `strtoull' function. */
#define HAVE_STRTOULL 1
/* Define to 1 if you have the `__strtoull' function. */
/* #undef HAVE___STRTOULL */
/* Define to 1 if you have the `_strtoui64' function. */
/* #undef HAVE__STRTOUI64 */
/* Define to 1 if you have the `glob' function. */
#define HAVE_GLOB 1
/* Define to 1 if you have the `explicit_bzero' function. */
#define HAVE_EXPLICIT_BZERO 1
/* Define to 1 if you have the `memset_s' function. */
#define HAVE_MEMSET_S 1
/* Define to 1 if you have the `SecureZeroMemory' function. */
/* #undef HAVE_SECURE_ZERO_MEMORY */
/* Define to 1 if you have the `cmocka_set_test_filter' function. */
/* #undef HAVE_CMOCKA_SET_TEST_FILTER */
/*************************** LIBRARIES ***************************/
/* Define to 1 if you have the `crypto' library (-lcrypto). */
#define HAVE_LIBCRYPTO 1
/* Define to 1 if you have the `gcrypt' library (-lgcrypt). */
/* #undef HAVE_LIBGCRYPT */
/* Define to 1 if you have the 'mbedTLS' library (-lmbedtls). */
/* #undef HAVE_LIBMBEDCRYPTO */
/* Define to 1 if you have the `pthread' library (-lpthread). */
#define HAVE_PTHREAD 1
/* Define to 1 if you have the `cmocka' library (-lcmocka). */
/* #undef HAVE_CMOCKA */
/**************************** OPTIONS ****************************/
#define HAVE_GCC_THREAD_LOCAL_STORAGE 1
/* #undef HAVE_MSC_THREAD_LOCAL_STORAGE */
#define HAVE_FALLTHROUGH_ATTRIBUTE 1
#define HAVE_UNUSED_ATTRIBUTE 1
#define HAVE_CONSTRUCTOR_ATTRIBUTE 1
#define HAVE_DESTRUCTOR_ATTRIBUTE 1
#define HAVE_GCC_VOLATILE_MEMORY_PROTECTION 1
#define HAVE_COMPILER__FUNC__ 1
#define HAVE_COMPILER__FUNCTION__ 1
/* #undef HAVE_GCC_BOUNDED_ATTRIBUTE */
/* Define to 1 if you want to enable GSSAPI */
/* #undef WITH_GSSAPI */
/* Define to 1 if you want to enable ZLIB */
/* #undef WITH_ZLIB */
/* Define to 1 if you want to enable SFTP */
/* #undef WITH_SFTP */
/* Define to 1 if you want to enable server support */
#define WITH_SERVER 1
/* Define to 1 if you want to enable DH group exchange algorithms */
/* #undef WITH_GEX */
/* Define to 1 if you want to enable blowfish cipher support */
/* #undef WITH_BLOWFISH_CIPHER */
/* Define to 1 if you want to enable debug output for crypto functions */
/* #undef DEBUG_CRYPTO */
/* Define to 1 if you want to enable debug output for packet functions */
/* #undef DEBUG_PACKET */
/* Define to 1 if you want to enable pcap output support (experimental) */
/* #undef WITH_PCAP */
/* Define to 1 if you want to enable calltrace debug output */
/* #undef DEBUG_CALLTRACE */
/* Define to 1 if you want to enable NaCl support */
/* #undef WITH_NACL */
/*************************** ENDIAN *****************************/
/* Define WORDS_BIGENDIAN to 1 if your processor stores words with the most
significant byte first (like Motorola and SPARC, unlike Intel). */
/* #undef WORDS_BIGENDIAN */

View File

@ -0,0 +1,287 @@
/* Name of package */
#define PACKAGE "libssh"
/* Version number of package */
#define VERSION "0.9.7"
#define SYSCONFDIR "etc"
#define BINARYDIR "/home/ubuntu/workdir/ClickHouse/build/ppc64le"
#define SOURCEDIR "/home/ubuntu/workdir/ClickHouse"
/* Global bind configuration file path */
#define GLOBAL_BIND_CONFIG "/etc/ssh/libssh_server_config"
/* Global client configuration file path */
#define GLOBAL_CLIENT_CONFIG "/etc/ssh/ssh_config"
/************************** HEADER FILES *************************/
/* Define to 1 if you have the <argp.h> header file. */
/* #undef HAVE_ARGP_H */
/* Define to 1 if you have the <aprpa/inet.h> header file. */
#define HAVE_ARPA_INET_H 1
/* Define to 1 if you have the <glob.h> header file. */
#define HAVE_GLOB_H 1
/* Define to 1 if you have the <valgrind/valgrind.h> header file. */
/* #undef HAVE_VALGRIND_VALGRIND_H */
/* Define to 1 if you have the <pty.h> header file. */
/* #undef HAVE_PTY_H */
/* Define to 1 if you have the <utmp.h> header file. */
/* #undef HAVE_UTMP_H */
/* Define to 1 if you have the <util.h> header file. */
/* #undef HAVE_UTIL_H */
/* Define to 1 if you have the <libutil.h> header file. */
/* #undef HAVE_LIBUTIL_H */
/* Define to 1 if you have the <sys/time.h> header file. */
#define HAVE_SYS_TIME_H 1
/* Define to 1 if you have the <sys/utime.h> header file. */
/* #undef HAVE_SYS_UTIME_H */
/* Define to 1 if you have the <io.h> header file. */
/* #undef HAVE_IO_H */
/* Define to 1 if you have the <termios.h> header file. */
#define HAVE_TERMIOS_H 1
/* Define to 1 if you have the <unistd.h> header file. */
#define HAVE_UNISTD_H 1
/* Define to 1 if you have the <stdint.h> header file. */
#define HAVE_STDINT_H 1
/* Define to 1 if you have the <openssl/aes.h> header file. */
#define HAVE_OPENSSL_AES_H 1
/* Define to 1 if you have the <wspiapi.h> header file. */
/* #undef HAVE_WSPIAPI_H */
/* Define to 1 if you have the <openssl/blowfish.h> header file. */
/* #undef HAVE_OPENSSL_BLOWFISH_H */
/* Define to 1 if you have the <openssl/des.h> header file. */
#define HAVE_OPENSSL_DES_H 1
/* Define to 1 if you have the <openssl/ecdh.h> header file. */
#define HAVE_OPENSSL_ECDH_H 1
/* Define to 1 if you have the <openssl/ec.h> header file. */
#define HAVE_OPENSSL_EC_H 1
/* Define to 1 if you have the <openssl/ecdsa.h> header file. */
#define HAVE_OPENSSL_ECDSA_H 1
/* Define to 1 if you have the <pthread.h> header file. */
#define HAVE_PTHREAD_H 1
/* Define to 1 if you have eliptic curve cryptography in openssl */
#define HAVE_OPENSSL_ECC 1
/* Define to 1 if you have eliptic curve cryptography in gcrypt */
/* #undef HAVE_GCRYPT_ECC */
/* Define to 1 if you have eliptic curve cryptography */
#define HAVE_ECC 1
/* Define to 1 if you have DSA */
/* #undef HAVE_DSA */
/* Define to 1 if you have gl_flags as a glob_t sturct member */
#define HAVE_GLOB_GL_FLAGS_MEMBER 1
/* Define to 1 if you have OpenSSL with Ed25519 support */
#define HAVE_OPENSSL_ED25519 1
/* Define to 1 if you have OpenSSL with X25519 support */
#define HAVE_OPENSSL_X25519 1
/*************************** FUNCTIONS ***************************/
/* Define to 1 if you have the `EVP_aes128_ctr' function. */
#define HAVE_OPENSSL_EVP_AES_CTR 1
/* Define to 1 if you have the `EVP_aes128_cbc' function. */
#define HAVE_OPENSSL_EVP_AES_CBC 1
/* Define to 1 if you have the `EVP_aes128_gcm' function. */
/* #undef HAVE_OPENSSL_EVP_AES_GCM */
/* Define to 1 if you have the `CRYPTO_THREADID_set_callback' function. */
#define HAVE_OPENSSL_CRYPTO_THREADID_SET_CALLBACK 1
/* Define to 1 if you have the `CRYPTO_ctr128_encrypt' function. */
#define HAVE_OPENSSL_CRYPTO_CTR128_ENCRYPT 1
/* Define to 1 if you have the `EVP_CIPHER_CTX_new' function. */
#define HAVE_OPENSSL_EVP_CIPHER_CTX_NEW 1
/* Define to 1 if you have the `EVP_KDF_CTX_new_id' function. */
/* #undef HAVE_OPENSSL_EVP_KDF_CTX_NEW_ID */
/* Define to 1 if you have the `FIPS_mode' function. */
#if USE_BORINGSSL
#define HAVE_OPENSSL_FIPS_MODE 1
#endif
/* Define to 1 if you have the `EVP_DigestSign' function. */
#define HAVE_OPENSSL_EVP_DIGESTSIGN 1
/* Define to 1 if you have the `EVP_DigestVerify' function. */
#define HAVE_OPENSSL_EVP_DIGESTVERIFY 1
/* Define to 1 if you have the `OPENSSL_ia32cap_loc' function. */
/* #undef HAVE_OPENSSL_IA32CAP_LOC */
/* Define to 1 if you have the `snprintf' function. */
#define HAVE_SNPRINTF 1
/* Define to 1 if you have the `_snprintf' function. */
/* #undef HAVE__SNPRINTF */
/* Define to 1 if you have the `_snprintf_s' function. */
/* #undef HAVE__SNPRINTF_S */
/* Define to 1 if you have the `vsnprintf' function. */
#define HAVE_VSNPRINTF 1
/* Define to 1 if you have the `_vsnprintf' function. */
/* #undef HAVE__VSNPRINTF */
/* Define to 1 if you have the `_vsnprintf_s' function. */
/* #undef HAVE__VSNPRINTF_S */
/* Define to 1 if you have the `isblank' function. */
#define HAVE_ISBLANK 1
/* Define to 1 if you have the `strncpy' function. */
#define HAVE_STRNCPY 1
/* Define to 1 if you have the `strndup' function. */
#define HAVE_STRNDUP 1
/* Define to 1 if you have the `cfmakeraw' function. */
/* #undef HAVE_CFMAKERAW */
/* Define to 1 if you have the `getaddrinfo' function. */
#define HAVE_GETADDRINFO 1
/* Define to 1 if you have the `poll' function. */
#define HAVE_POLL 1
/* Define to 1 if you have the `select' function. */
#define HAVE_SELECT 1
/* Define to 1 if you have the `clock_gettime' function. */
/* #undef HAVE_CLOCK_GETTIME */
/* Define to 1 if you have the `ntohll' function. */
/* #undef HAVE_NTOHLL */
/* Define to 1 if you have the `htonll' function. */
/* #undef HAVE_HTONLL */
/* Define to 1 if you have the `strtoull' function. */
#define HAVE_STRTOULL 1
/* Define to 1 if you have the `__strtoull' function. */
/* #undef HAVE___STRTOULL */
/* Define to 1 if you have the `_strtoui64' function. */
/* #undef HAVE__STRTOUI64 */
/* Define to 1 if you have the `glob' function. */
#define HAVE_GLOB 1
/* Define to 1 if you have the `explicit_bzero' function. */
/* #undef HAVE_EXPLICIT_BZERO 1 */
/* Define to 1 if you have the `memset_s' function. */
/* #undef HAVE_MEMSET_S */
/* Define to 1 if you have the `SecureZeroMemory' function. */
/* #undef HAVE_SECURE_ZERO_MEMORY */
/* Define to 1 if you have the `cmocka_set_test_filter' function. */
/* #undef HAVE_CMOCKA_SET_TEST_FILTER */
/*************************** LIBRARIES ***************************/
/* Define to 1 if you have the `crypto' library (-lcrypto). */
#define HAVE_LIBCRYPTO 1
/* Define to 1 if you have the `gcrypt' library (-lgcrypt). */
/* #undef HAVE_LIBGCRYPT */
/* Define to 1 if you have the 'mbedTLS' library (-lmbedtls). */
/* #undef HAVE_LIBMBEDCRYPTO */
/* Define to 1 if you have the `pthread' library (-lpthread). */
#define HAVE_PTHREAD 1
/* Define to 1 if you have the `cmocka' library (-lcmocka). */
/* #undef HAVE_CMOCKA */
/**************************** OPTIONS ****************************/
#define HAVE_GCC_THREAD_LOCAL_STORAGE 1
/* #undef HAVE_MSC_THREAD_LOCAL_STORAGE */
#define HAVE_FALLTHROUGH_ATTRIBUTE 1
#define HAVE_UNUSED_ATTRIBUTE 1
#define HAVE_CONSTRUCTOR_ATTRIBUTE 1
#define HAVE_DESTRUCTOR_ATTRIBUTE 1
#define HAVE_GCC_VOLATILE_MEMORY_PROTECTION 1
#define HAVE_COMPILER__FUNC__ 1
#define HAVE_COMPILER__FUNCTION__ 1
/* #undef HAVE_GCC_BOUNDED_ATTRIBUTE */
/* Define to 1 if you want to enable GSSAPI */
/* #undef WITH_GSSAPI */
/* Define to 1 if you want to enable ZLIB */
/* #undef WITH_ZLIB */
/* Define to 1 if you want to enable SFTP */
/* #undef WITH_SFTP */
/* Define to 1 if you want to enable server support */
#define WITH_SERVER 1
/* Define to 1 if you want to enable DH group exchange algorithms */
/* #undef WITH_GEX */
/* Define to 1 if you want to enable blowfish cipher support */
/* #undef WITH_BLOWFISH_CIPHER */
/* Define to 1 if you want to enable debug output for crypto functions */
/* #undef DEBUG_CRYPTO */
/* Define to 1 if you want to enable debug output for packet functions */
/* #undef DEBUG_PACKET */
/* Define to 1 if you want to enable pcap output support (experimental) */
/* #undef WITH_PCAP */
/* Define to 1 if you want to enable calltrace debug output */
/* #undef DEBUG_CALLTRACE */
/* Define to 1 if you want to enable NaCl support */
/* #undef WITH_NACL */
/*************************** ENDIAN *****************************/
/* Define WORDS_BIGENDIAN to 1 if your processor stores words with the most
significant byte first (like Motorola and SPARC, unlike Intel). */
/* #undef WORDS_BIGENDIAN */

View File

@ -0,0 +1,287 @@
/* Name of package */
#define PACKAGE "libssh"
/* Version number of package */
#define VERSION "0.9.7"
#define SYSCONFDIR "etc"
#define BINARYDIR "/home/ubuntu/workdir/ClickHouse/build/riscv64"
#define SOURCEDIR "/home/ubuntu/workdir/ClickHouse"
/* Global bind configuration file path */
#define GLOBAL_BIND_CONFIG "/etc/ssh/libssh_server_config"
/* Global client configuration file path */
#define GLOBAL_CLIENT_CONFIG "/etc/ssh/ssh_config"
/************************** HEADER FILES *************************/
/* Define to 1 if you have the <argp.h> header file. */
/* #undef HAVE_ARGP_H */
/* Define to 1 if you have the <aprpa/inet.h> header file. */
#define HAVE_ARPA_INET_H 1
/* Define to 1 if you have the <glob.h> header file. */
#define HAVE_GLOB_H 1
/* Define to 1 if you have the <valgrind/valgrind.h> header file. */
/* #undef HAVE_VALGRIND_VALGRIND_H */
/* Define to 1 if you have the <pty.h> header file. */
/* #undef HAVE_PTY_H */
/* Define to 1 if you have the <utmp.h> header file. */
/* #undef HAVE_UTMP_H */
/* Define to 1 if you have the <util.h> header file. */
/* #undef HAVE_UTIL_H */
/* Define to 1 if you have the <libutil.h> header file. */
/* #undef HAVE_LIBUTIL_H */
/* Define to 1 if you have the <sys/time.h> header file. */
#define HAVE_SYS_TIME_H 1
/* Define to 1 if you have the <sys/utime.h> header file. */
/* #undef HAVE_SYS_UTIME_H */
/* Define to 1 if you have the <io.h> header file. */
/* #undef HAVE_IO_H */
/* Define to 1 if you have the <termios.h> header file. */
#define HAVE_TERMIOS_H 1
/* Define to 1 if you have the <unistd.h> header file. */
#define HAVE_UNISTD_H 1
/* Define to 1 if you have the <stdint.h> header file. */
#define HAVE_STDINT_H 1
/* Define to 1 if you have the <openssl/aes.h> header file. */
#define HAVE_OPENSSL_AES_H 1
/* Define to 1 if you have the <wspiapi.h> header file. */
/* #undef HAVE_WSPIAPI_H */
/* Define to 1 if you have the <openssl/blowfish.h> header file. */
/* #undef HAVE_OPENSSL_BLOWFISH_H */
/* Define to 1 if you have the <openssl/des.h> header file. */
#define HAVE_OPENSSL_DES_H 1
/* Define to 1 if you have the <openssl/ecdh.h> header file. */
#define HAVE_OPENSSL_ECDH_H 1
/* Define to 1 if you have the <openssl/ec.h> header file. */
#define HAVE_OPENSSL_EC_H 1
/* Define to 1 if you have the <openssl/ecdsa.h> header file. */
#define HAVE_OPENSSL_ECDSA_H 1
/* Define to 1 if you have the <pthread.h> header file. */
#define HAVE_PTHREAD_H 1
/* Define to 1 if you have eliptic curve cryptography in openssl */
#define HAVE_OPENSSL_ECC 1
/* Define to 1 if you have eliptic curve cryptography in gcrypt */
/* #undef HAVE_GCRYPT_ECC */
/* Define to 1 if you have eliptic curve cryptography */
#define HAVE_ECC 1
/* Define to 1 if you have DSA */
/* #undef HAVE_DSA */
/* Define to 1 if you have gl_flags as a glob_t sturct member */
#define HAVE_GLOB_GL_FLAGS_MEMBER 1
/* Define to 1 if you have OpenSSL with Ed25519 support */
#define HAVE_OPENSSL_ED25519 1
/* Define to 1 if you have OpenSSL with X25519 support */
#define HAVE_OPENSSL_X25519 1
/*************************** FUNCTIONS ***************************/
/* Define to 1 if you have the `EVP_aes128_ctr' function. */
#define HAVE_OPENSSL_EVP_AES_CTR 1
/* Define to 1 if you have the `EVP_aes128_cbc' function. */
#define HAVE_OPENSSL_EVP_AES_CBC 1
/* Define to 1 if you have the `EVP_aes128_gcm' function. */
/* #undef HAVE_OPENSSL_EVP_AES_GCM */
/* Define to 1 if you have the `CRYPTO_THREADID_set_callback' function. */
#define HAVE_OPENSSL_CRYPTO_THREADID_SET_CALLBACK 1
/* Define to 1 if you have the `CRYPTO_ctr128_encrypt' function. */
#define HAVE_OPENSSL_CRYPTO_CTR128_ENCRYPT 1
/* Define to 1 if you have the `EVP_CIPHER_CTX_new' function. */
#define HAVE_OPENSSL_EVP_CIPHER_CTX_NEW 1
/* Define to 1 if you have the `EVP_KDF_CTX_new_id' function. */
/* #undef HAVE_OPENSSL_EVP_KDF_CTX_NEW_ID */
/* Define to 1 if you have the `FIPS_mode' function. */
#if USE_BORINGSSL
#define HAVE_OPENSSL_FIPS_MODE 1
#endif
/* Define to 1 if you have the `EVP_DigestSign' function. */
#define HAVE_OPENSSL_EVP_DIGESTSIGN 1
/* Define to 1 if you have the `EVP_DigestVerify' function. */
#define HAVE_OPENSSL_EVP_DIGESTVERIFY 1
/* Define to 1 if you have the `OPENSSL_ia32cap_loc' function. */
/* #undef HAVE_OPENSSL_IA32CAP_LOC */
/* Define to 1 if you have the `snprintf' function. */
#define HAVE_SNPRINTF 1
/* Define to 1 if you have the `_snprintf' function. */
/* #undef HAVE__SNPRINTF */
/* Define to 1 if you have the `_snprintf_s' function. */
/* #undef HAVE__SNPRINTF_S */
/* Define to 1 if you have the `vsnprintf' function. */
#define HAVE_VSNPRINTF 1
/* Define to 1 if you have the `_vsnprintf' function. */
/* #undef HAVE__VSNPRINTF */
/* Define to 1 if you have the `_vsnprintf_s' function. */
/* #undef HAVE__VSNPRINTF_S */
/* Define to 1 if you have the `isblank' function. */
#define HAVE_ISBLANK 1
/* Define to 1 if you have the `strncpy' function. */
#define HAVE_STRNCPY 1
/* Define to 1 if you have the `strndup' function. */
#define HAVE_STRNDUP 1
/* Define to 1 if you have the `cfmakeraw' function. */
/* #undef HAVE_CFMAKERAW */
/* Define to 1 if you have the `getaddrinfo' function. */
#define HAVE_GETADDRINFO 1
/* Define to 1 if you have the `poll' function. */
#define HAVE_POLL 1
/* Define to 1 if you have the `select' function. */
#define HAVE_SELECT 1
/* Define to 1 if you have the `clock_gettime' function. */
/* #undef HAVE_CLOCK_GETTIME */
/* Define to 1 if you have the `ntohll' function. */
/* #undef HAVE_NTOHLL */
/* Define to 1 if you have the `htonll' function. */
/* #undef HAVE_HTONLL */
/* Define to 1 if you have the `strtoull' function. */
#define HAVE_STRTOULL 1
/* Define to 1 if you have the `__strtoull' function. */
/* #undef HAVE___STRTOULL */
/* Define to 1 if you have the `_strtoui64' function. */
/* #undef HAVE__STRTOUI64 */
/* Define to 1 if you have the `glob' function. */
#define HAVE_GLOB 1
/* Define to 1 if you have the `explicit_bzero' function. */
/* #undef HAVE_EXPLICIT_BZERO 1 */
/* Define to 1 if you have the `memset_s' function. */
/* #undef HAVE_MEMSET_S */
/* Define to 1 if you have the `SecureZeroMemory' function. */
/* #undef HAVE_SECURE_ZERO_MEMORY */
/* Define to 1 if you have the `cmocka_set_test_filter' function. */
/* #undef HAVE_CMOCKA_SET_TEST_FILTER */
/*************************** LIBRARIES ***************************/
/* Define to 1 if you have the `crypto' library (-lcrypto). */
#define HAVE_LIBCRYPTO 1
/* Define to 1 if you have the `gcrypt' library (-lgcrypt). */
/* #undef HAVE_LIBGCRYPT */
/* Define to 1 if you have the 'mbedTLS' library (-lmbedtls). */
/* #undef HAVE_LIBMBEDCRYPTO */
/* Define to 1 if you have the `pthread' library (-lpthread). */
#define HAVE_PTHREAD 1
/* Define to 1 if you have the `cmocka' library (-lcmocka). */
/* #undef HAVE_CMOCKA */
/**************************** OPTIONS ****************************/
#define HAVE_GCC_THREAD_LOCAL_STORAGE 1
/* #undef HAVE_MSC_THREAD_LOCAL_STORAGE */
#define HAVE_FALLTHROUGH_ATTRIBUTE 1
#define HAVE_UNUSED_ATTRIBUTE 1
#define HAVE_CONSTRUCTOR_ATTRIBUTE 1
#define HAVE_DESTRUCTOR_ATTRIBUTE 1
#define HAVE_GCC_VOLATILE_MEMORY_PROTECTION 1
#define HAVE_COMPILER__FUNC__ 1
#define HAVE_COMPILER__FUNCTION__ 1
/* #undef HAVE_GCC_BOUNDED_ATTRIBUTE */
/* Define to 1 if you want to enable GSSAPI */
/* #undef WITH_GSSAPI */
/* Define to 1 if you want to enable ZLIB */
/* #undef WITH_ZLIB */
/* Define to 1 if you want to enable SFTP */
/* #undef WITH_SFTP */
/* Define to 1 if you want to enable server support */
#define WITH_SERVER 1
/* Define to 1 if you want to enable DH group exchange algorithms */
/* #undef WITH_GEX */
/* Define to 1 if you want to enable blowfish cipher support */
/* #undef WITH_BLOWFISH_CIPHER */
/* Define to 1 if you want to enable debug output for crypto functions */
/* #undef DEBUG_CRYPTO */
/* Define to 1 if you want to enable debug output for packet functions */
/* #undef DEBUG_PACKET */
/* Define to 1 if you want to enable pcap output support (experimental) */
/* #undef WITH_PCAP */
/* Define to 1 if you want to enable calltrace debug output */
/* #undef DEBUG_CALLTRACE */
/* Define to 1 if you want to enable NaCl support */
/* #undef WITH_NACL */
/*************************** ENDIAN *****************************/
/* Define WORDS_BIGENDIAN to 1 if your processor stores words with the most
significant byte first (like Motorola and SPARC, unlike Intel). */
/* #undef WORDS_BIGENDIAN */

View File

@ -0,0 +1,287 @@
/* Name of package */
#define PACKAGE "libssh"
/* Version number of package */
#define VERSION "0.9.7"
#define SYSCONFDIR "etc"
#define BINARYDIR "/home/ubuntu/workdir/ClickHouse/build/s390x"
#define SOURCEDIR "/home/ubuntu/workdir/ClickHouse"
/* Global bind configuration file path */
#define GLOBAL_BIND_CONFIG "/etc/ssh/libssh_server_config"
/* Global client configuration file path */
#define GLOBAL_CLIENT_CONFIG "/etc/ssh/ssh_config"
/************************** HEADER FILES *************************/
/* Define to 1 if you have the <argp.h> header file. */
/* #undef HAVE_ARGP_H */
/* Define to 1 if you have the <aprpa/inet.h> header file. */
#define HAVE_ARPA_INET_H 1
/* Define to 1 if you have the <glob.h> header file. */
#define HAVE_GLOB_H 1
/* Define to 1 if you have the <valgrind/valgrind.h> header file. */
/* #undef HAVE_VALGRIND_VALGRIND_H */
/* Define to 1 if you have the <pty.h> header file. */
/* #undef HAVE_PTY_H */
/* Define to 1 if you have the <utmp.h> header file. */
/* #undef HAVE_UTMP_H */
/* Define to 1 if you have the <util.h> header file. */
/* #undef HAVE_UTIL_H */
/* Define to 1 if you have the <libutil.h> header file. */
/* #undef HAVE_LIBUTIL_H */
/* Define to 1 if you have the <sys/time.h> header file. */
#define HAVE_SYS_TIME_H 1
/* Define to 1 if you have the <sys/utime.h> header file. */
/* #undef HAVE_SYS_UTIME_H */
/* Define to 1 if you have the <io.h> header file. */
/* #undef HAVE_IO_H */
/* Define to 1 if you have the <termios.h> header file. */
#define HAVE_TERMIOS_H 1
/* Define to 1 if you have the <unistd.h> header file. */
#define HAVE_UNISTD_H 1
/* Define to 1 if you have the <stdint.h> header file. */
#define HAVE_STDINT_H 1
/* Define to 1 if you have the <openssl/aes.h> header file. */
#define HAVE_OPENSSL_AES_H 1
/* Define to 1 if you have the <wspiapi.h> header file. */
/* #undef HAVE_WSPIAPI_H */
/* Define to 1 if you have the <openssl/blowfish.h> header file. */
/* #undef HAVE_OPENSSL_BLOWFISH_H */
/* Define to 1 if you have the <openssl/des.h> header file. */
#define HAVE_OPENSSL_DES_H 1
/* Define to 1 if you have the <openssl/ecdh.h> header file. */
#define HAVE_OPENSSL_ECDH_H 1
/* Define to 1 if you have the <openssl/ec.h> header file. */
#define HAVE_OPENSSL_EC_H 1
/* Define to 1 if you have the <openssl/ecdsa.h> header file. */
#define HAVE_OPENSSL_ECDSA_H 1
/* Define to 1 if you have the <pthread.h> header file. */
#define HAVE_PTHREAD_H 1
/* Define to 1 if you have eliptic curve cryptography in openssl */
#define HAVE_OPENSSL_ECC 1
/* Define to 1 if you have eliptic curve cryptography in gcrypt */
/* #undef HAVE_GCRYPT_ECC */
/* Define to 1 if you have eliptic curve cryptography */
#define HAVE_ECC 1
/* Define to 1 if you have DSA */
/* #undef HAVE_DSA */
/* Define to 1 if you have gl_flags as a glob_t sturct member */
#define HAVE_GLOB_GL_FLAGS_MEMBER 1
/* Define to 1 if you have OpenSSL with Ed25519 support */
#define HAVE_OPENSSL_ED25519 1
/* Define to 1 if you have OpenSSL with X25519 support */
#define HAVE_OPENSSL_X25519 1
/*************************** FUNCTIONS ***************************/
/* Define to 1 if you have the `EVP_aes128_ctr' function. */
#define HAVE_OPENSSL_EVP_AES_CTR 1
/* Define to 1 if you have the `EVP_aes128_cbc' function. */
#define HAVE_OPENSSL_EVP_AES_CBC 1
/* Define to 1 if you have the `EVP_aes128_gcm' function. */
/* #undef HAVE_OPENSSL_EVP_AES_GCM */
/* Define to 1 if you have the `CRYPTO_THREADID_set_callback' function. */
#define HAVE_OPENSSL_CRYPTO_THREADID_SET_CALLBACK 1
/* Define to 1 if you have the `CRYPTO_ctr128_encrypt' function. */
#define HAVE_OPENSSL_CRYPTO_CTR128_ENCRYPT 1
/* Define to 1 if you have the `EVP_CIPHER_CTX_new' function. */
#define HAVE_OPENSSL_EVP_CIPHER_CTX_NEW 1
/* Define to 1 if you have the `EVP_KDF_CTX_new_id' function. */
/* #undef HAVE_OPENSSL_EVP_KDF_CTX_NEW_ID */
/* Define to 1 if you have the `FIPS_mode' function. */
#if USE_BORINGSSL
#define HAVE_OPENSSL_FIPS_MODE 1
#endif
/* Define to 1 if you have the `EVP_DigestSign' function. */
#define HAVE_OPENSSL_EVP_DIGESTSIGN 1
/* Define to 1 if you have the `EVP_DigestVerify' function. */
#define HAVE_OPENSSL_EVP_DIGESTVERIFY 1
/* Define to 1 if you have the `OPENSSL_ia32cap_loc' function. */
/* #undef HAVE_OPENSSL_IA32CAP_LOC */
/* Define to 1 if you have the `snprintf' function. */
#define HAVE_SNPRINTF 1
/* Define to 1 if you have the `_snprintf' function. */
/* #undef HAVE__SNPRINTF */
/* Define to 1 if you have the `_snprintf_s' function. */
/* #undef HAVE__SNPRINTF_S */
/* Define to 1 if you have the `vsnprintf' function. */
#define HAVE_VSNPRINTF 1
/* Define to 1 if you have the `_vsnprintf' function. */
/* #undef HAVE__VSNPRINTF */
/* Define to 1 if you have the `_vsnprintf_s' function. */
/* #undef HAVE__VSNPRINTF_S */
/* Define to 1 if you have the `isblank' function. */
#define HAVE_ISBLANK 1
/* Define to 1 if you have the `strncpy' function. */
#define HAVE_STRNCPY 1
/* Define to 1 if you have the `strndup' function. */
#define HAVE_STRNDUP 1
/* Define to 1 if you have the `cfmakeraw' function. */
/* #undef HAVE_CFMAKERAW */
/* Define to 1 if you have the `getaddrinfo' function. */
#define HAVE_GETADDRINFO 1
/* Define to 1 if you have the `poll' function. */
#define HAVE_POLL 1
/* Define to 1 if you have the `select' function. */
#define HAVE_SELECT 1
/* Define to 1 if you have the `clock_gettime' function. */
/* #undef HAVE_CLOCK_GETTIME */
/* Define to 1 if you have the `ntohll' function. */
/* #undef HAVE_NTOHLL */
/* Define to 1 if you have the `htonll' function. */
/* #undef HAVE_HTONLL */
/* Define to 1 if you have the `strtoull' function. */
#define HAVE_STRTOULL 1
/* Define to 1 if you have the `__strtoull' function. */
/* #undef HAVE___STRTOULL */
/* Define to 1 if you have the `_strtoui64' function. */
/* #undef HAVE__STRTOUI64 */
/* Define to 1 if you have the `glob' function. */
#define HAVE_GLOB 1
/* Define to 1 if you have the `explicit_bzero' function. */
/* #undef HAVE_EXPLICIT_BZERO 1 */
/* Define to 1 if you have the `memset_s' function. */
/* #undef HAVE_MEMSET_S */
/* Define to 1 if you have the `SecureZeroMemory' function. */
/* #undef HAVE_SECURE_ZERO_MEMORY */
/* Define to 1 if you have the `cmocka_set_test_filter' function. */
/* #undef HAVE_CMOCKA_SET_TEST_FILTER */
/*************************** LIBRARIES ***************************/
/* Define to 1 if you have the `crypto' library (-lcrypto). */
#define HAVE_LIBCRYPTO 1
/* Define to 1 if you have the `gcrypt' library (-lgcrypt). */
/* #undef HAVE_LIBGCRYPT */
/* Define to 1 if you have the 'mbedTLS' library (-lmbedtls). */
/* #undef HAVE_LIBMBEDCRYPTO */
/* Define to 1 if you have the `pthread' library (-lpthread). */
#define HAVE_PTHREAD 1
/* Define to 1 if you have the `cmocka' library (-lcmocka). */
/* #undef HAVE_CMOCKA */
/**************************** OPTIONS ****************************/
#define HAVE_GCC_THREAD_LOCAL_STORAGE 1
/* #undef HAVE_MSC_THREAD_LOCAL_STORAGE */
#define HAVE_FALLTHROUGH_ATTRIBUTE 1
#define HAVE_UNUSED_ATTRIBUTE 1
#define HAVE_CONSTRUCTOR_ATTRIBUTE 1
#define HAVE_DESTRUCTOR_ATTRIBUTE 1
#define HAVE_GCC_VOLATILE_MEMORY_PROTECTION 1
#define HAVE_COMPILER__FUNC__ 1
#define HAVE_COMPILER__FUNCTION__ 1
/* #undef HAVE_GCC_BOUNDED_ATTRIBUTE */
/* Define to 1 if you want to enable GSSAPI */
/* #undef WITH_GSSAPI */
/* Define to 1 if you want to enable ZLIB */
/* #undef WITH_ZLIB */
/* Define to 1 if you want to enable SFTP */
/* #undef WITH_SFTP */
/* Define to 1 if you want to enable server support */
#define WITH_SERVER 1
/* Define to 1 if you want to enable DH group exchange algorithms */
/* #undef WITH_GEX */
/* Define to 1 if you want to enable blowfish cipher support */
/* #undef WITH_BLOWFISH_CIPHER */
/* Define to 1 if you want to enable debug output for crypto functions */
/* #undef DEBUG_CRYPTO */
/* Define to 1 if you want to enable debug output for packet functions */
/* #undef DEBUG_PACKET */
/* Define to 1 if you want to enable pcap output support (experimental) */
/* #undef WITH_PCAP */
/* Define to 1 if you want to enable calltrace debug output */
/* #undef DEBUG_CALLTRACE */
/* Define to 1 if you want to enable NaCl support */
/* #undef WITH_NACL */
/*************************** ENDIAN *****************************/
/* Define WORDS_BIGENDIAN to 1 if your processor stores words with the most
significant byte first (like Motorola and SPARC, unlike Intel). */
#define WORDS_BIGENDIAN 1

View File

@ -0,0 +1,287 @@
/* Name of package */
#define PACKAGE "libssh"
/* Version number of package */
#define VERSION "0.9.7"
#define SYSCONFDIR "etc"
#define BINARYDIR "/home/ubuntu/workdir/ClickHouse/build/musl"
#define SOURCEDIR "/home/ubuntu/workdir/ClickHouse"
/* Global bind configuration file path */
#define GLOBAL_BIND_CONFIG "/etc/ssh/libssh_server_config"
/* Global client configuration file path */
#define GLOBAL_CLIENT_CONFIG "/etc/ssh/ssh_config"
/************************** HEADER FILES *************************/
/* Define to 1 if you have the <argp.h> header file. */
/* #undef HAVE_ARGP_H */
/* Define to 1 if you have the <aprpa/inet.h> header file. */
#define HAVE_ARPA_INET_H 1
/* Define to 1 if you have the <glob.h> header file. */
#define HAVE_GLOB_H 1
/* Define to 1 if you have the <valgrind/valgrind.h> header file. */
/* #undef HAVE_VALGRIND_VALGRIND_H */
/* Define to 1 if you have the <pty.h> header file. */
/* #undef HAVE_PTY_H */
/* Define to 1 if you have the <utmp.h> header file. */
/* #undef HAVE_UTMP_H */
/* Define to 1 if you have the <util.h> header file. */
/* #undef HAVE_UTIL_H */
/* Define to 1 if you have the <libutil.h> header file. */
/* #undef HAVE_LIBUTIL_H */
/* Define to 1 if you have the <sys/time.h> header file. */
#define HAVE_SYS_TIME_H 1
/* Define to 1 if you have the <sys/utime.h> header file. */
/* #undef HAVE_SYS_UTIME_H */
/* Define to 1 if you have the <io.h> header file. */
/* #undef HAVE_IO_H */
/* Define to 1 if you have the <termios.h> header file. */
#define HAVE_TERMIOS_H 1
/* Define to 1 if you have the <unistd.h> header file. */
#define HAVE_UNISTD_H 1
/* Define to 1 if you have the <stdint.h> header file. */
#define HAVE_STDINT_H 1
/* Define to 1 if you have the <openssl/aes.h> header file. */
#define HAVE_OPENSSL_AES_H 1
/* Define to 1 if you have the <wspiapi.h> header file. */
/* #undef HAVE_WSPIAPI_H */
/* Define to 1 if you have the <openssl/blowfish.h> header file. */
/* #undef HAVE_OPENSSL_BLOWFISH_H */
/* Define to 1 if you have the <openssl/des.h> header file. */
#define HAVE_OPENSSL_DES_H 1
/* Define to 1 if you have the <openssl/ecdh.h> header file. */
#define HAVE_OPENSSL_ECDH_H 1
/* Define to 1 if you have the <openssl/ec.h> header file. */
#define HAVE_OPENSSL_EC_H 1
/* Define to 1 if you have the <openssl/ecdsa.h> header file. */
#define HAVE_OPENSSL_ECDSA_H 1
/* Define to 1 if you have the <pthread.h> header file. */
#define HAVE_PTHREAD_H 1
/* Define to 1 if you have eliptic curve cryptography in openssl */
#define HAVE_OPENSSL_ECC 1
/* Define to 1 if you have eliptic curve cryptography in gcrypt */
/* #undef HAVE_GCRYPT_ECC */
/* Define to 1 if you have eliptic curve cryptography */
#define HAVE_ECC 1
/* Define to 1 if you have DSA */
/* #undef HAVE_DSA */
/* Define to 1 if you have gl_flags as a glob_t sturct member */
/* #undef HAVE_GLOB_GL_FLAGS_MEMBER
/* Define to 1 if you have OpenSSL with Ed25519 support */
#define HAVE_OPENSSL_ED25519 1
/* Define to 1 if you have OpenSSL with X25519 support */
#define HAVE_OPENSSL_X25519 1
/*************************** FUNCTIONS ***************************/
/* Define to 1 if you have the `EVP_aes128_ctr' function. */
#define HAVE_OPENSSL_EVP_AES_CTR 1
/* Define to 1 if you have the `EVP_aes128_cbc' function. */
#define HAVE_OPENSSL_EVP_AES_CBC 1
/* Define to 1 if you have the `EVP_aes128_gcm' function. */
/* #undef HAVE_OPENSSL_EVP_AES_GCM */
/* Define to 1 if you have the `CRYPTO_THREADID_set_callback' function. */
#define HAVE_OPENSSL_CRYPTO_THREADID_SET_CALLBACK 1
/* Define to 1 if you have the `CRYPTO_ctr128_encrypt' function. */
#define HAVE_OPENSSL_CRYPTO_CTR128_ENCRYPT 1
/* Define to 1 if you have the `EVP_CIPHER_CTX_new' function. */
#define HAVE_OPENSSL_EVP_CIPHER_CTX_NEW 1
/* Define to 1 if you have the `EVP_KDF_CTX_new_id' function. */
/* #undef HAVE_OPENSSL_EVP_KDF_CTX_NEW_ID */
/* Define to 1 if you have the `FIPS_mode' function. */
#if USE_BORINGSSL
#define HAVE_OPENSSL_FIPS_MODE 1
#endif
/* Define to 1 if you have the `EVP_DigestSign' function. */
#define HAVE_OPENSSL_EVP_DIGESTSIGN 1
/* Define to 1 if you have the `EVP_DigestVerify' function. */
#define HAVE_OPENSSL_EVP_DIGESTVERIFY 1
/* Define to 1 if you have the `OPENSSL_ia32cap_loc' function. */
/* #undef HAVE_OPENSSL_IA32CAP_LOC */
/* Define to 1 if you have the `snprintf' function. */
#define HAVE_SNPRINTF 1
/* Define to 1 if you have the `_snprintf' function. */
/* #undef HAVE__SNPRINTF */
/* Define to 1 if you have the `_snprintf_s' function. */
/* #undef HAVE__SNPRINTF_S */
/* Define to 1 if you have the `vsnprintf' function. */
#define HAVE_VSNPRINTF 1
/* Define to 1 if you have the `_vsnprintf' function. */
/* #undef HAVE__VSNPRINTF */
/* Define to 1 if you have the `_vsnprintf_s' function. */
/* #undef HAVE__VSNPRINTF_S */
/* Define to 1 if you have the `isblank' function. */
#define HAVE_ISBLANK 1
/* Define to 1 if you have the `strncpy' function. */
#define HAVE_STRNCPY 1
/* Define to 1 if you have the `strndup' function. */
#define HAVE_STRNDUP 1
/* Define to 1 if you have the `cfmakeraw' function. */
/* #undef HAVE_CFMAKERAW */
/* Define to 1 if you have the `getaddrinfo' function. */
#define HAVE_GETADDRINFO 1
/* Define to 1 if you have the `poll' function. */
#define HAVE_POLL 1
/* Define to 1 if you have the `select' function. */
#define HAVE_SELECT 1
/* Define to 1 if you have the `clock_gettime' function. */
/* #undef HAVE_CLOCK_GETTIME */
/* Define to 1 if you have the `ntohll' function. */
/* #undef HAVE_NTOHLL */
/* Define to 1 if you have the `htonll' function. */
/* #undef HAVE_HTONLL */
/* Define to 1 if you have the `strtoull' function. */
#define HAVE_STRTOULL 1
/* Define to 1 if you have the `__strtoull' function. */
/* #undef HAVE___STRTOULL */
/* Define to 1 if you have the `_strtoui64' function. */
/* #undef HAVE__STRTOUI64 */
/* Define to 1 if you have the `glob' function. */
#define HAVE_GLOB 1
/* Define to 1 if you have the `explicit_bzero' function. */
#define HAVE_EXPLICIT_BZERO 1
/* Define to 1 if you have the `memset_s' function. */
/* #undef HAVE_MEMSET_S */
/* Define to 1 if you have the `SecureZeroMemory' function. */
/* #undef HAVE_SECURE_ZERO_MEMORY */
/* Define to 1 if you have the `cmocka_set_test_filter' function. */
/* #undef HAVE_CMOCKA_SET_TEST_FILTER */
/*************************** LIBRARIES ***************************/
/* Define to 1 if you have the `crypto' library (-lcrypto). */
#define HAVE_LIBCRYPTO 1
/* Define to 1 if you have the `gcrypt' library (-lgcrypt). */
/* #undef HAVE_LIBGCRYPT */
/* Define to 1 if you have the 'mbedTLS' library (-lmbedtls). */
/* #undef HAVE_LIBMBEDCRYPTO */
/* Define to 1 if you have the `pthread' library (-lpthread). */
#define HAVE_PTHREAD 1
/* Define to 1 if you have the `cmocka' library (-lcmocka). */
/* #undef HAVE_CMOCKA */
/**************************** OPTIONS ****************************/
#define HAVE_GCC_THREAD_LOCAL_STORAGE 1
/* #undef HAVE_MSC_THREAD_LOCAL_STORAGE */
#define HAVE_FALLTHROUGH_ATTRIBUTE 1
#define HAVE_UNUSED_ATTRIBUTE 1
#define HAVE_CONSTRUCTOR_ATTRIBUTE 1
#define HAVE_DESTRUCTOR_ATTRIBUTE 1
#define HAVE_GCC_VOLATILE_MEMORY_PROTECTION 1
#define HAVE_COMPILER__FUNC__ 1
#define HAVE_COMPILER__FUNCTION__ 1
/* #undef HAVE_GCC_BOUNDED_ATTRIBUTE */
/* Define to 1 if you want to enable GSSAPI */
/* #undef WITH_GSSAPI */
/* Define to 1 if you want to enable ZLIB */
/* #undef WITH_ZLIB */
/* Define to 1 if you want to enable SFTP */
/* #undef WITH_SFTP */
/* Define to 1 if you want to enable server support */
#define WITH_SERVER 1
/* Define to 1 if you want to enable DH group exchange algorithms */
/* #undef WITH_GEX */
/* Define to 1 if you want to enable blowfish cipher support */
/* #undef WITH_BLOWFISH_CIPHER */
/* Define to 1 if you want to enable debug output for crypto functions */
/* #undef DEBUG_CRYPTO */
/* Define to 1 if you want to enable debug output for packet functions */
/* #undef DEBUG_PACKET */
/* Define to 1 if you want to enable pcap output support (experimental) */
/* #undef WITH_PCAP */
/* Define to 1 if you want to enable calltrace debug output */
/* #undef DEBUG_CALLTRACE */
/* Define to 1 if you want to enable NaCl support */
/* #undef WITH_NACL */
/*************************** ENDIAN *****************************/
/* Define WORDS_BIGENDIAN to 1 if your processor stores words with the most
significant byte first (like Motorola and SPARC, unlike Intel). */
/* #undef WORDS_BIGENDIAN */

View File

@ -0,0 +1,287 @@
/* Name of package */
#define PACKAGE "libssh"
/* Version number of package */
#define VERSION "0.9.7"
#define SYSCONFDIR "etc"
#define BINARYDIR "/home/ubuntu/workdir/ClickHouse/build/Debug"
#define SOURCEDIR "/home/ubuntu/workdir/ClickHouse"
/* Global bind configuration file path */
#define GLOBAL_BIND_CONFIG "/etc/ssh/libssh_server_config"
/* Global client configuration file path */
#define GLOBAL_CLIENT_CONFIG "/etc/ssh/ssh_config"
/************************** HEADER FILES *************************/
/* Define to 1 if you have the <argp.h> header file. */
/* #undef HAVE_ARGP_H */
/* Define to 1 if you have the <aprpa/inet.h> header file. */
#define HAVE_ARPA_INET_H 1
/* Define to 1 if you have the <glob.h> header file. */
#define HAVE_GLOB_H 1
/* Define to 1 if you have the <valgrind/valgrind.h> header file. */
/* #undef HAVE_VALGRIND_VALGRIND_H */
/* Define to 1 if you have the <pty.h> header file. */
/* #undef HAVE_PTY_H */
/* Define to 1 if you have the <utmp.h> header file. */
/* #undef HAVE_UTMP_H */
/* Define to 1 if you have the <util.h> header file. */
/* #undef HAVE_UTIL_H */
/* Define to 1 if you have the <libutil.h> header file. */
/* #undef HAVE_LIBUTIL_H */
/* Define to 1 if you have the <sys/time.h> header file. */
#define HAVE_SYS_TIME_H 1
/* Define to 1 if you have the <sys/utime.h> header file. */
/* #undef HAVE_SYS_UTIME_H */
/* Define to 1 if you have the <io.h> header file. */
/* #undef HAVE_IO_H */
/* Define to 1 if you have the <termios.h> header file. */
#define HAVE_TERMIOS_H 1
/* Define to 1 if you have the <unistd.h> header file. */
#define HAVE_UNISTD_H 1
/* Define to 1 if you have the <stdint.h> header file. */
#define HAVE_STDINT_H 1
/* Define to 1 if you have the <openssl/aes.h> header file. */
#define HAVE_OPENSSL_AES_H 1
/* Define to 1 if you have the <wspiapi.h> header file. */
/* #undef HAVE_WSPIAPI_H */
/* Define to 1 if you have the <openssl/blowfish.h> header file. */
/* #undef HAVE_OPENSSL_BLOWFISH_H */
/* Define to 1 if you have the <openssl/des.h> header file. */
#define HAVE_OPENSSL_DES_H 1
/* Define to 1 if you have the <openssl/ecdh.h> header file. */
#define HAVE_OPENSSL_ECDH_H 1
/* Define to 1 if you have the <openssl/ec.h> header file. */
#define HAVE_OPENSSL_EC_H 1
/* Define to 1 if you have the <openssl/ecdsa.h> header file. */
#define HAVE_OPENSSL_ECDSA_H 1
/* Define to 1 if you have the <pthread.h> header file. */
#define HAVE_PTHREAD_H 1
/* Define to 1 if you have eliptic curve cryptography in openssl */
#define HAVE_OPENSSL_ECC 1
/* Define to 1 if you have eliptic curve cryptography in gcrypt */
/* #undef HAVE_GCRYPT_ECC */
/* Define to 1 if you have eliptic curve cryptography */
#define HAVE_ECC 1
/* Define to 1 if you have DSA */
/* #undef HAVE_DSA */
/* Define to 1 if you have gl_flags as a glob_t sturct member */
#define HAVE_GLOB_GL_FLAGS_MEMBER 1
/* Define to 1 if you have OpenSSL with Ed25519 support */
#define HAVE_OPENSSL_ED25519 1
/* Define to 1 if you have OpenSSL with X25519 support */
#define HAVE_OPENSSL_X25519 1
/*************************** FUNCTIONS ***************************/
/* Define to 1 if you have the `EVP_aes128_ctr' function. */
#define HAVE_OPENSSL_EVP_AES_CTR 1
/* Define to 1 if you have the `EVP_aes128_cbc' function. */
#define HAVE_OPENSSL_EVP_AES_CBC 1
/* Define to 1 if you have the `EVP_aes128_gcm' function. */
/* #undef HAVE_OPENSSL_EVP_AES_GCM */
/* Define to 1 if you have the `CRYPTO_THREADID_set_callback' function. */
#define HAVE_OPENSSL_CRYPTO_THREADID_SET_CALLBACK 1
/* Define to 1 if you have the `CRYPTO_ctr128_encrypt' function. */
#define HAVE_OPENSSL_CRYPTO_CTR128_ENCRYPT 1
/* Define to 1 if you have the `EVP_CIPHER_CTX_new' function. */
#define HAVE_OPENSSL_EVP_CIPHER_CTX_NEW 1
/* Define to 1 if you have the `EVP_KDF_CTX_new_id' function. */
/* #undef HAVE_OPENSSL_EVP_KDF_CTX_NEW_ID */
/* Define to 1 if you have the `FIPS_mode' function. */
#if USE_BORINGSSL
#define HAVE_OPENSSL_FIPS_MODE 1
#endif
/* Define to 1 if you have the `EVP_DigestSign' function. */
#define HAVE_OPENSSL_EVP_DIGESTSIGN 1
/* Define to 1 if you have the `EVP_DigestVerify' function. */
#define HAVE_OPENSSL_EVP_DIGESTVERIFY 1
/* Define to 1 if you have the `OPENSSL_ia32cap_loc' function. */
/* #undef HAVE_OPENSSL_IA32CAP_LOC */
/* Define to 1 if you have the `snprintf' function. */
#define HAVE_SNPRINTF 1
/* Define to 1 if you have the `_snprintf' function. */
/* #undef HAVE__SNPRINTF */
/* Define to 1 if you have the `_snprintf_s' function. */
/* #undef HAVE__SNPRINTF_S */
/* Define to 1 if you have the `vsnprintf' function. */
#define HAVE_VSNPRINTF 1
/* Define to 1 if you have the `_vsnprintf' function. */
/* #undef HAVE__VSNPRINTF */
/* Define to 1 if you have the `_vsnprintf_s' function. */
/* #undef HAVE__VSNPRINTF_S */
/* Define to 1 if you have the `isblank' function. */
#define HAVE_ISBLANK 1
/* Define to 1 if you have the `strncpy' function. */
#define HAVE_STRNCPY 1
/* Define to 1 if you have the `strndup' function. */
#define HAVE_STRNDUP 1
/* Define to 1 if you have the `cfmakeraw' function. */
/* #undef HAVE_CFMAKERAW */
/* Define to 1 if you have the `getaddrinfo' function. */
#define HAVE_GETADDRINFO 1
/* Define to 1 if you have the `poll' function. */
#define HAVE_POLL 1
/* Define to 1 if you have the `select' function. */
#define HAVE_SELECT 1
/* Define to 1 if you have the `clock_gettime' function. */
/* #undef HAVE_CLOCK_GETTIME */
/* Define to 1 if you have the `ntohll' function. */
/* #undef HAVE_NTOHLL */
/* Define to 1 if you have the `htonll' function. */
/* #undef HAVE_HTONLL */
/* Define to 1 if you have the `strtoull' function. */
#define HAVE_STRTOULL 1
/* Define to 1 if you have the `__strtoull' function. */
/* #undef HAVE___STRTOULL */
/* Define to 1 if you have the `_strtoui64' function. */
/* #undef HAVE__STRTOUI64 */
/* Define to 1 if you have the `glob' function. */
#define HAVE_GLOB 1
/* Define to 1 if you have the `explicit_bzero' function. */
#define HAVE_EXPLICIT_BZERO 1
/* Define to 1 if you have the `memset_s' function. */
#define HAVE_MEMSET_S 1
/* Define to 1 if you have the `SecureZeroMemory' function. */
/* #undef HAVE_SECURE_ZERO_MEMORY */
/* Define to 1 if you have the `cmocka_set_test_filter' function. */
/* #undef HAVE_CMOCKA_SET_TEST_FILTER */
/*************************** LIBRARIES ***************************/
/* Define to 1 if you have the `crypto' library (-lcrypto). */
#define HAVE_LIBCRYPTO 1
/* Define to 1 if you have the `gcrypt' library (-lgcrypt). */
/* #undef HAVE_LIBGCRYPT */
/* Define to 1 if you have the 'mbedTLS' library (-lmbedtls). */
/* #undef HAVE_LIBMBEDCRYPTO */
/* Define to 1 if you have the `pthread' library (-lpthread). */
#define HAVE_PTHREAD 1
/* Define to 1 if you have the `cmocka' library (-lcmocka). */
/* #undef HAVE_CMOCKA */
/**************************** OPTIONS ****************************/
#define HAVE_GCC_THREAD_LOCAL_STORAGE 1
/* #undef HAVE_MSC_THREAD_LOCAL_STORAGE */
#define HAVE_FALLTHROUGH_ATTRIBUTE 1
#define HAVE_UNUSED_ATTRIBUTE 1
#define HAVE_CONSTRUCTOR_ATTRIBUTE 1
#define HAVE_DESTRUCTOR_ATTRIBUTE 1
#define HAVE_GCC_VOLATILE_MEMORY_PROTECTION 1
#define HAVE_COMPILER__FUNC__ 1
#define HAVE_COMPILER__FUNCTION__ 1
/* #undef HAVE_GCC_BOUNDED_ATTRIBUTE */
/* Define to 1 if you want to enable GSSAPI */
/* #undef WITH_GSSAPI */
/* Define to 1 if you want to enable ZLIB */
/* #undef WITH_ZLIB */
/* Define to 1 if you want to enable SFTP */
/* #undef WITH_SFTP */
/* Define to 1 if you want to enable server support */
#define WITH_SERVER 1
/* Define to 1 if you want to enable DH group exchange algorithms */
/* #undef WITH_GEX */
/* Define to 1 if you want to enable blowfish cipher support */
/* #undef WITH_BLOWFISH_CIPHER */
/* Define to 1 if you want to enable debug output for crypto functions */
/* #undef DEBUG_CRYPTO */
/* Define to 1 if you want to enable debug output for packet functions */
/* #undef DEBUG_PACKET */
/* Define to 1 if you want to enable pcap output support (experimental) */
/* #undef WITH_PCAP */
/* Define to 1 if you want to enable calltrace debug output */
/* #undef DEBUG_CALLTRACE */
/* Define to 1 if you want to enable NaCl support */
/* #undef WITH_NACL */
/*************************** ENDIAN *****************************/
/* Define WORDS_BIGENDIAN to 1 if your processor stores words with the most
significant byte first (like Motorola and SPARC, unlike Intel). */
/* #undef WORDS_BIGENDIAN */

View File

@ -1,6 +1,3 @@
include(CheckCCompilerFlag)
include(CheckCXXCompilerFlag)
set(LIBUNWIND_SOURCE_DIR "${ClickHouse_SOURCE_DIR}/contrib/libunwind")
set(LIBUNWIND_CXX_SOURCES
@ -48,27 +45,11 @@ target_compile_definitions(unwind PRIVATE -D_LIBUNWIND_NO_HEAP=1 -D_DEBUG -D_LIB
# and disable sanitizers (otherwise infinite loop may happen)
target_compile_options(unwind PRIVATE -O3 -fno-exceptions -funwind-tables -fno-sanitize=all $<$<COMPILE_LANGUAGE:CXX>:-nostdinc++ -fno-rtti>)
check_c_compiler_flag(-Wunused-but-set-variable HAVE_WARNING_UNUSED_BUT_SET_VARIABLE)
if (HAVE_WARNING_UNUSED_BUT_SET_VARIABLE)
target_compile_options(unwind PRIVATE -Wno-unused-but-set-variable)
endif ()
check_cxx_compiler_flag(-Wmissing-attributes HAVE_WARNING_MISSING_ATTRIBUTES)
if (HAVE_WARNING_MISSING_ATTRIBUTES)
target_compile_options(unwind PRIVATE -Wno-missing-attributes)
endif ()
check_cxx_compiler_flag(-Wmaybe-uninitialized HAVE_WARNING_MAYBE_UNINITIALIZED)
if (HAVE_WARNING_MAYBE_UNINITIALIZED)
target_compile_options(unwind PRIVATE -Wno-maybe-uninitialized)
endif ()
target_compile_options(unwind PRIVATE -Wno-unused-but-set-variable)
# The library is using register variables that are bound to specific registers
# Example: DwarfInstructions.hpp: register unsigned long long x16 __asm("x16") = cfa;
check_cxx_compiler_flag(-Wregister HAVE_WARNING_REGISTER)
if (HAVE_WARNING_REGISTER)
target_compile_options(unwind PRIVATE "$<$<COMPILE_LANGUAGE:CXX>:-Wno-register>")
endif ()
target_compile_options(unwind PRIVATE "$<$<COMPILE_LANGUAGE:CXX>:-Wno-register>")
install(
TARGETS unwind

View File

@ -1,12 +1,17 @@
if (APPLE OR SANITIZE STREQUAL "undefined" OR SANITIZE STREQUAL "memory")
# llvm-tblgen, that is used during LLVM build, doesn't work with UBSan.
set (ENABLE_EMBEDDED_COMPILER_DEFAULT OFF)
set (ENABLE_DWARF_PARSER_DEFAULT OFF)
else()
set (ENABLE_EMBEDDED_COMPILER_DEFAULT ON)
set (ENABLE_EMBEDDED_COMPILER_DEFAULT ${ENABLE_LIBRARIES})
set (ENABLE_DWARF_PARSER_DEFAULT ${ENABLE_LIBRARIES})
endif()
option (ENABLE_EMBEDDED_COMPILER "Enable support for JIT compilation during query execution" ${ENABLE_EMBEDDED_COMPILER_DEFAULT})
option (ENABLE_EMBEDDED_COMPILER "Enable support for JIT compilation during query execution (uses LLVM library)" ${ENABLE_EMBEDDED_COMPILER_DEFAULT})
if (NOT ENABLE_EMBEDDED_COMPILER)
option (ENABLE_DWARF_PARSER "Enable support for DWARF input format (uses LLVM library)" ${ENABLE_DWARF_PARSER_DEFAULT})
if (NOT ENABLE_EMBEDDED_COMPILER AND NOT ENABLE_DWARF_PARSER)
message(STATUS "Not using LLVM")
return()
endif()

2
contrib/orc vendored

@ -1 +1 @@
Subproject commit a20d1d9d7ad4a4be7b7ba97588e16ca8b9abb2b6
Subproject commit e24f2c2a3ca0769c96704ab20ad6f512a83ea2ad

View File

@ -54,8 +54,10 @@ namespace pdqsort_detail {
block_size = 64,
// Cacheline size, assumes power of two.
cacheline_size = 64
cacheline_size = 64,
/// Try sort allowed iterations
try_sort_iterations = 3,
};
#if __cplusplus >= 201103L
@ -501,6 +503,167 @@ namespace pdqsort_detail {
leftmost = false;
}
}
template<class Iter, class Compare, bool Branchless>
inline bool pdqsort_try_sort_loop(Iter begin,
Iter end,
Compare comp,
size_t bad_allowed,
size_t iterations_allowed,
bool force_sort = false,
bool leftmost = true) {
typedef typename std::iterator_traits<Iter>::difference_type diff_t;
// Use a while loop for tail recursion elimination.
while (true) {
if (!force_sort && iterations_allowed == 0) {
return false;
}
diff_t size = end - begin;
// Insertion sort is faster for small arrays.
if (size < insertion_sort_threshold) {
if (leftmost) insertion_sort(begin, end, comp);
else unguarded_insertion_sort(begin, end, comp);
return true;
}
// Choose pivot as median of 3 or pseudomedian of 9.
diff_t s2 = size / 2;
if (size > ninther_threshold) {
sort3(begin, begin + s2, end - 1, comp);
sort3(begin + 1, begin + (s2 - 1), end - 2, comp);
sort3(begin + 2, begin + (s2 + 1), end - 3, comp);
sort3(begin + (s2 - 1), begin + s2, begin + (s2 + 1), comp);
std::iter_swap(begin, begin + s2);
} else sort3(begin + s2, begin, end - 1, comp);
// If *(begin - 1) is the end of the right partition of a previous partition operation
// there is no element in [begin, end) that is smaller than *(begin - 1). Then if our
// pivot compares equal to *(begin - 1) we change strategy, putting equal elements in
// the left partition, greater elements in the right partition. We do not have to
// recurse on the left partition, since it's sorted (all equal).
if (!leftmost && !comp(*(begin - 1), *begin)) {
begin = partition_left(begin, end, comp) + 1;
continue;
}
// Partition and get results.
std::pair<Iter, bool> part_result =
Branchless ? partition_right_branchless(begin, end, comp)
: partition_right(begin, end, comp);
Iter pivot_pos = part_result.first;
bool already_partitioned = part_result.second;
// Check for a highly unbalanced partition.
diff_t l_size = pivot_pos - begin;
diff_t r_size = end - (pivot_pos + 1);
bool highly_unbalanced = l_size < size / 8 || r_size < size / 8;
// If we got a highly unbalanced partition we shuffle elements to break many patterns.
if (highly_unbalanced) {
if (!force_sort) {
return false;
}
// If we had too many bad partitions, switch to heapsort to guarantee O(n log n).
if (--bad_allowed == 0) {
std::make_heap(begin, end, comp);
std::sort_heap(begin, end, comp);
return true;
}
if (l_size >= insertion_sort_threshold) {
std::iter_swap(begin, begin + l_size / 4);
std::iter_swap(pivot_pos - 1, pivot_pos - l_size / 4);
if (l_size > ninther_threshold) {
std::iter_swap(begin + 1, begin + (l_size / 4 + 1));
std::iter_swap(begin + 2, begin + (l_size / 4 + 2));
std::iter_swap(pivot_pos - 2, pivot_pos - (l_size / 4 + 1));
std::iter_swap(pivot_pos - 3, pivot_pos - (l_size / 4 + 2));
}
}
if (r_size >= insertion_sort_threshold) {
std::iter_swap(pivot_pos + 1, pivot_pos + (1 + r_size / 4));
std::iter_swap(end - 1, end - r_size / 4);
if (r_size > ninther_threshold) {
std::iter_swap(pivot_pos + 2, pivot_pos + (2 + r_size / 4));
std::iter_swap(pivot_pos + 3, pivot_pos + (3 + r_size / 4));
std::iter_swap(end - 2, end - (1 + r_size / 4));
std::iter_swap(end - 3, end - (2 + r_size / 4));
}
}
} else {
// If we were decently balanced and we tried to sort an already partitioned
// sequence try to use insertion sort.
if (already_partitioned && partial_insertion_sort(begin, pivot_pos, comp)
&& partial_insertion_sort(pivot_pos + 1, end, comp)) {
return true;
}
}
// Sort the left partition first using recursion and do tail recursion elimination for
// the right-hand partition.
if (pdqsort_try_sort_loop<Iter, Compare, Branchless>(begin,
pivot_pos,
comp,
bad_allowed,
iterations_allowed - 1,
force_sort,
leftmost)) {
force_sort = true;
} else {
return false;
}
--iterations_allowed;
begin = pivot_pos + 1;
leftmost = false;
}
return false;
}
template<class Iter, class Compare, bool Branchless>
inline bool pdqsort_try_sort_impl(Iter begin, Iter end, Compare comp, size_t bad_allowed)
{
typedef typename std::iterator_traits<Iter>::difference_type diff_t;
static constexpr size_t iterations_allowed = pdqsort_detail::try_sort_iterations;
static constexpr size_t num_to_try = 16;
diff_t size = end - begin;
if (size > num_to_try * 10)
{
size_t out_of_order_elements = 0;
for (size_t i = 1; i < num_to_try; ++i)
{
diff_t offset = size / num_to_try;
diff_t prev_position = offset * (i - 1);
diff_t curr_position = offset * i;
diff_t next_position = offset * (i + 1) - 1;
bool prev_less_than_curr = comp(*(begin + prev_position), *(begin + curr_position));
bool curr_less_than_next = comp(*(begin + curr_position), *(begin + next_position));
if ((prev_less_than_curr && curr_less_than_next) || (!prev_less_than_curr && !curr_less_than_next))
continue;
++out_of_order_elements;
if (out_of_order_elements > iterations_allowed)
return false;
}
}
return pdqsort_try_sort_loop<Iter, Compare, Branchless>(begin, end, comp, bad_allowed, iterations_allowed);
}
}
@ -538,6 +701,41 @@ inline void pdqsort_branchless(Iter begin, Iter end) {
pdqsort_branchless(begin, end, std::less<T>());
}
template<class Iter, class Compare>
inline bool pdqsort_try_sort(Iter begin, Iter end, Compare comp) {
if (begin == end) return true;
#if __cplusplus >= 201103L
return pdqsort_detail::pdqsort_try_sort_impl<Iter, Compare,
pdqsort_detail::is_default_compare<typename std::decay<Compare>::type>::value &&
std::is_arithmetic<typename std::iterator_traits<Iter>::value_type>::value>(
begin, end, comp, pdqsort_detail::log2(end - begin));
#else
return pdqsort_detail::pdqsort_try_sort_impl<Iter, Compare, false>(
begin, end, comp, pdqsort_detail::log2(end - begin));
#endif
}
template<class Iter>
inline bool pdqsort_try_sort(Iter begin, Iter end) {
typedef typename std::iterator_traits<Iter>::value_type T;
return pdqsort_try_sort(begin, end, std::less<T>());
}
template<class Iter, class Compare>
inline bool pdqsort_try_sort_branchless(Iter begin, Iter end, Compare comp) {
if (begin == end) return true;
return pdqsort_detail::pdqsort_try_sort_impl<Iter, Compare, true>(
begin, end, comp, pdqsort_detail::log2(end - begin));
}
template<class Iter>
inline bool pdqsort_try_sort_branchless(Iter begin, Iter end) {
typedef typename std::iterator_traits<Iter>::value_type T;
return pdqsort_try_sort_branchless(begin, end, std::less<T>());
}
#undef PDQSORT_PREFER_MOVE

View File

@ -76,7 +76,6 @@ else()
endif()
endif()
include(CheckCCompilerFlag)
if(CMAKE_SYSTEM_PROCESSOR MATCHES "^(powerpc|ppc)64")
if(POWER9)
set(HAS_POWER9 1)
@ -88,21 +87,12 @@ if(CMAKE_SYSTEM_PROCESSOR MATCHES "^(powerpc|ppc)64")
endif(CMAKE_SYSTEM_PROCESSOR MATCHES "^(powerpc|ppc)64")
if(CMAKE_SYSTEM_PROCESSOR MATCHES "aarch64|AARCH64|arm64|ARM64")
CHECK_C_COMPILER_FLAG("-march=armv8-a+crc+crypto" HAS_ARMV8_CRC)
if(HAS_ARMV8_CRC)
message(STATUS " HAS_ARMV8_CRC yes")
set(HAS_ARMV8_CRC 1)
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -march=armv8-a+crc+crypto -Wno-unused-function")
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -march=armv8-a+crc+crypto -Wno-unused-function")
endif(HAS_ARMV8_CRC)
endif(CMAKE_SYSTEM_PROCESSOR MATCHES "aarch64|AARCH64|arm64|ARM64")
include(CheckCXXSourceCompiles)
if(NOT MSVC)
set(CMAKE_REQUIRED_FLAGS "-msse4.2 -mpclmul")
endif()
unset(CMAKE_REQUIRED_FLAGS)
if(HAVE_SSE42)
add_definitions(-DHAVE_SSE42)
add_definitions(-DHAVE_PCLMUL)
@ -121,75 +111,18 @@ elseif(CMAKE_SYSTEM_NAME MATCHES "Linux")
add_definitions(-DOS_LINUX)
elseif(CMAKE_SYSTEM_NAME MATCHES "SunOS")
add_definitions(-DOS_SOLARIS)
elseif(CMAKE_SYSTEM_NAME MATCHES "kFreeBSD")
add_definitions(-DOS_GNU_KFREEBSD)
elseif(CMAKE_SYSTEM_NAME MATCHES "FreeBSD")
add_definitions(-DOS_FREEBSD)
elseif(CMAKE_SYSTEM_NAME MATCHES "NetBSD")
add_definitions(-DOS_NETBSD)
elseif(CMAKE_SYSTEM_NAME MATCHES "OpenBSD")
add_definitions(-DOS_OPENBSD)
elseif(CMAKE_SYSTEM_NAME MATCHES "DragonFly")
add_definitions(-DOS_DRAGONFLYBSD)
elseif(CMAKE_SYSTEM_NAME MATCHES "Android")
add_definitions(-DOS_ANDROID)
elseif(CMAKE_SYSTEM_NAME MATCHES "Windows")
add_definitions(-DWIN32 -DOS_WIN -D_MBCS -DWIN64 -DNOMINMAX)
if(MINGW)
add_definitions(-D_WIN32_WINNT=_WIN32_WINNT_VISTA)
endif()
endif()
if(NOT WIN32)
add_definitions(-DROCKSDB_PLATFORM_POSIX -DROCKSDB_LIB_IO_POSIX)
endif()
add_definitions(-DROCKSDB_PLATFORM_POSIX -DROCKSDB_LIB_IO_POSIX)
option(WITH_FALLOCATE "build with fallocate" ON)
if(WITH_FALLOCATE)
CHECK_C_SOURCE_COMPILES("
#include <fcntl.h>
#include <linux/falloc.h>
int main() {
int fd = open(\"/dev/null\", 0);
fallocate(fd, FALLOC_FL_KEEP_SIZE, 0, 1024);
}
" HAVE_FALLOCATE)
if(HAVE_FALLOCATE)
add_definitions(-DROCKSDB_FALLOCATE_PRESENT)
endif()
endif()
CHECK_C_SOURCE_COMPILES("
#include <fcntl.h>
int main() {
int fd = open(\"/dev/null\", 0);
sync_file_range(fd, 0, 1024, SYNC_FILE_RANGE_WRITE);
}
" HAVE_SYNC_FILE_RANGE_WRITE)
if(HAVE_SYNC_FILE_RANGE_WRITE)
add_definitions(-DROCKSDB_RANGESYNC_PRESENT)
endif()
CHECK_C_SOURCE_COMPILES("
#include <pthread.h>
int main() {
(void) PTHREAD_MUTEX_ADAPTIVE_NP;
}
" HAVE_PTHREAD_MUTEX_ADAPTIVE_NP)
if(HAVE_PTHREAD_MUTEX_ADAPTIVE_NP)
if (OS_LINUX OR OS_FREEBSD)
add_definitions(-DROCKSDB_PTHREAD_ADAPTIVE_MUTEX)
endif()
include(CheckCXXSymbolExists)
if (OS_FREEBSD)
check_cxx_symbol_exists(malloc_usable_size "${ROCKSDB_SOURCE_DIR}/malloc_np.h" HAVE_MALLOC_USABLE_SIZE)
else()
check_cxx_symbol_exists(malloc_usable_size "${ROCKSDB_SOURCE_DIR}/malloc.h" HAVE_MALLOC_USABLE_SIZE)
endif()
if(HAVE_MALLOC_USABLE_SIZE)
add_definitions(-DROCKSDB_MALLOC_USABLE_SIZE)
endif()
if (OS_LINUX)
add_definitions(-DROCKSDB_SCHED_GETCPU_PRESENT)
add_definitions(-DROCKSDB_AUXV_SYSAUXV_PRESENT)
@ -204,7 +137,6 @@ include_directories("${ROCKSDB_SOURCE_DIR}/include")
if(WITH_FOLLY_DISTRIBUTED_MUTEX)
include_directories("${ROCKSDB_SOURCE_DIR}/third-party/folly")
endif()
find_package(Threads REQUIRED)
# Main library source code
@ -497,7 +429,7 @@ set(SOURCES
${ROCKSDB_SOURCE_DIR}/utilities/transactions/lock/range/range_tree/lib/util/memarena.cc
rocksdb_build_version.cc)
if(HAVE_SSE42 AND NOT MSVC)
if(HAVE_SSE42)
set_source_files_properties(
"${ROCKSDB_SOURCE_DIR}/util/crc32c.cc"
PROPERTIES COMPILE_FLAGS "-msse4.2 -mpclmul")

View File

@ -24,6 +24,12 @@ else ()
set (SNAPPY_HAVE_SSSE3 0)
endif ()
if (ARCH_AMD64 AND ENABLE_SSE42)
set (SNAPPY_HAVE_X86_CRC32 1)
else ()
set (SNAPPY_HAVE_X86_CRC32 0)
endif ()
configure_file(
"${SOURCE_DIR}/cmake/config.h.in"
"${CMAKE_CURRENT_BINARY_DIR}/config.h")

View File

@ -6,12 +6,13 @@ FILES_TO_CHECKOUT=$(git rev-parse --git-dir)/info/sparse-checkout
echo '/*' > $FILES_TO_CHECKOUT
echo '!/test/*' >> $FILES_TO_CHECKOUT
echo '/test/build/*' >> $FILES_TO_CHECKOUT
echo '/test/core/tsi/alts/fake_handshaker/*' >> $FILES_TO_CHECKOUT
echo '/test/core/event_engine/fuzzing_event_engine/*' >> $FILES_TO_CHECKOUT
echo '!/tools/*' >> $FILES_TO_CHECKOUT
echo '/tools/codegen/*' >> $FILES_TO_CHECKOUT
echo '!/examples/*' >> $FILES_TO_CHECKOUT
echo '!/doc/*' >> $FILES_TO_CHECKOUT
# FIXME why do we need csharp?
#echo '!/src/csharp/*' >> $FILES_TO_CHECKOUT
echo '!/src/csharp/*' >> $FILES_TO_CHECKOUT
echo '!/src/python/*' >> $FILES_TO_CHECKOUT
echo '!/src/objective-c/*' >> $FILES_TO_CHECKOUT
echo '!/src/php/*' >> $FILES_TO_CHECKOUT

View File

@ -1,11 +1,12 @@
#!/bin/sh
set -e
WORKDIR=$(dirname "$0")
WORKDIR=$(readlink -f "${WORKDIR}")
SCRIPT_PATH=$(realpath "$0")
SCRIPT_DIR=$(dirname "${SCRIPT_PATH}")
GIT_DIR=$(git -C "$SCRIPT_DIR" rev-parse --show-toplevel)
cd $GIT_DIR
"$WORKDIR/sparse-checkout/setup-sparse-checkout.sh"
contrib/sparse-checkout/setup-sparse-checkout.sh
git submodule init
git submodule sync
git submodule update --depth=1
git config --file .gitmodules --get-regexp .*path | sed 's/[^ ]* //' | xargs -I _ --max-procs 64 git submodule update --depth=1 --single-branch _

View File

@ -98,8 +98,6 @@ if (ARCH_S390X)
add_compile_definitions(WORDS_BIGENDIAN)
endif ()
find_package(Threads REQUIRED)
add_library(_liblzma
${SRC_DIR}/src/common/mythread.h

View File

@ -4,8 +4,8 @@ FROM node:16-alpine
RUN apk add --no-cache git openssh bash
# At this point we want to really update /opt/clickhouse-docs
# despite the cached images
# At this point we want to really update /opt/clickhouse-docs directory
# So we reset the cache
ARG CACHE_INVALIDATOR=0
RUN git clone https://github.com/ClickHouse/clickhouse-docs.git \

View File

@ -2,8 +2,8 @@
# If the image is built from Dockerfile.alpine, then the `-alpine` suffix is added automatically,
# so the only purpose of Dockerfile.ubuntu is to push `latest`, `head` and so on w/o suffixes
FROM ubuntu:20.04 AS glibc-donor
ARG TARGETARCH
RUN arch=${TARGETARCH:-amd64} \
&& case $arch in \
amd64) rarch=x86_64 ;; \
@ -31,8 +31,10 @@ RUN arch=${TARGETARCH:-amd64} \
arm64) ln -sf /lib/ld-2.31.so /lib/ld-linux-aarch64.so.1 ;; \
esac
ARG REPOSITORY="https://s3.amazonaws.com/clickhouse-builds/22.4/31c367d3cd3aefd316778601ff6565119fe36682/package_release"
ARG VERSION="23.8.2.7"
# lts / testing / prestable / etc
ARG REPO_CHANNEL="stable"
ARG REPOSITORY="https://packages.clickhouse.com/tgz/${REPO_CHANNEL}"
ARG VERSION="23.10.4.25"
ARG PACKAGES="clickhouse-keeper"
# user/group precreated explicitly with fixed uid/gid on purpose.
@ -46,16 +48,14 @@ ARG PACKAGES="clickhouse-keeper"
ARG TARGETARCH
RUN arch=${TARGETARCH:-amd64} \
&& for package in ${PACKAGES}; do \
{ \
{ echo "Get ${REPOSITORY}/${package}-${VERSION}-${arch}.tgz" \
&& wget -c -q "${REPOSITORY}/${package}-${VERSION}-${arch}.tgz" -O "/tmp/${package}-${VERSION}-${arch}.tgz" \
&& tar xvzf "/tmp/${package}-${VERSION}-${arch}.tgz" --strip-components=1 -C / ; \
} || \
{ echo "Fallback to ${REPOSITORY}/${package}-${VERSION}.tgz" \
&& wget -c -q "${REPOSITORY}/${package}-${VERSION}.tgz" -O "/tmp/${package}-${VERSION}.tgz" \
&& tar xvzf "/tmp/${package}-${VERSION}.tgz" --strip-components=2 -C / ; \
} ; \
} || exit 1 \
( \
cd /tmp \
&& echo "Get ${REPOSITORY}/${package}-${VERSION}-${arch}.tgz" \
&& wget -c -q "${REPOSITORY}/${package}-${VERSION}-${arch}.tgz" \
&& wget -c -q "${REPOSITORY}/${package}-${VERSION}-${arch}.tgz.sha512" \
&& sed 's:/output/:/tmp/:' < "${package}-${VERSION}-${arch}.tgz.sha512" | sha512sum -c \
&& tar xvzf "${package}-${VERSION}-${arch}.tgz" --strip-components=1 -C / \
) \
; done \
&& rm /tmp/*.tgz /install -r \
&& addgroup -S -g 101 clickhouse \

View File

@ -15,6 +15,11 @@ if [ "$EXTRACT_TOOLCHAIN_DARWIN" = "1" ]; then
mkdir -p /build/cmake/toolchain/darwin-x86_64
tar xJf /MacOSX11.0.sdk.tar.xz -C /build/cmake/toolchain/darwin-x86_64 --strip-components=1
ln -sf darwin-x86_64 /build/cmake/toolchain/darwin-aarch64
if [ "$EXPORT_SOURCES_WITH_SUBMODULES" = "1" ]; then
cd /build
tar --exclude-vcs-ignores --exclude-vcs --exclude build --exclude build_docker --exclude debian --exclude .git --exclude .github --exclude .cache --exclude docs --exclude tests/integration -c . | pigz -9 > /output/source_sub.tar.gz
fi
fi
# Uncomment to debug ccache. Don't put ccache log in /output right away, or it
@ -26,9 +31,6 @@ fi
mkdir -p /build/build_docker
cd /build/build_docker
rm -f CMakeCache.txt
# Read cmake arguments into array (possibly empty)
read -ra CMAKE_FLAGS <<< "${CMAKE_FLAGS:-}"
env
if [ -n "$MAKE_DEB" ]; then
rm -rf /build/packages/root
@ -55,11 +57,36 @@ ccache_status
# clear cache stats
ccache --zero-stats ||:
function check_prebuild_exists() {
local path="$1"
[ -d "$path" ] && [ "$(ls -A "$path")" ]
}
# Check whether the directory with pre-build scripts exists and not empty.
if check_prebuild_exists /build/packages/pre-build
then
# Execute all commands
for file in /build/packages/pre-build/*.sh ;
do
# The script may want to modify environment variables. Why not to allow it to do so?
# shellcheck disable=SC1090
source "$file"
done
else
echo "There are no subcommands to execute :)"
fi
# Read cmake arguments into array (possibly empty)
# The name of local variable has to be different from the name of environment variable
# not to override it. And make it usable for other processes.
read -ra CMAKE_FLAGS_ARRAY <<< "${CMAKE_FLAGS:-}"
env
if [ "$BUILD_MUSL_KEEPER" == "1" ]
then
# build keeper with musl separately
# and without rust bindings
cmake --debug-trycompile -DENABLE_RUST=OFF -DBUILD_STANDALONE_KEEPER=1 -DENABLE_CLICKHOUSE_KEEPER=1 -DCMAKE_VERBOSE_MAKEFILE=1 -DUSE_MUSL=1 -LA -DCMAKE_TOOLCHAIN_FILE=/build/cmake/linux/toolchain-x86_64-musl.cmake "-DCMAKE_BUILD_TYPE=$BUILD_TYPE" "-DSANITIZE=$SANITIZER" -DENABLE_CHECK_HEAVY_BUILDS=1 "${CMAKE_FLAGS[@]}" ..
cmake --debug-trycompile -DENABLE_RUST=OFF -DBUILD_STANDALONE_KEEPER=1 -DENABLE_CLICKHOUSE_KEEPER=1 -DCMAKE_VERBOSE_MAKEFILE=1 -DUSE_MUSL=1 -LA -DCMAKE_TOOLCHAIN_FILE=/build/cmake/linux/toolchain-x86_64-musl.cmake "-DCMAKE_BUILD_TYPE=$BUILD_TYPE" "-DSANITIZE=$SANITIZER" -DENABLE_CHECK_HEAVY_BUILDS=1 "${CMAKE_FLAGS_ARRAY[@]}" ..
# shellcheck disable=SC2086 # No quotes because I want it to expand to nothing if empty.
ninja $NINJA_FLAGS clickhouse-keeper
@ -73,13 +100,13 @@ then
fi
rm -f CMakeCache.txt
# Build the rest of binaries
cmake --debug-trycompile -DBUILD_STANDALONE_KEEPER=0 -DCREATE_KEEPER_SYMLINK=0 -DCMAKE_VERBOSE_MAKEFILE=1 -LA "-DCMAKE_BUILD_TYPE=$BUILD_TYPE" "-DSANITIZE=$SANITIZER" -DENABLE_CHECK_HEAVY_BUILDS=1 "${CMAKE_FLAGS[@]}" ..
else
# Build everything
cmake --debug-trycompile -DCMAKE_VERBOSE_MAKEFILE=1 -LA "-DCMAKE_BUILD_TYPE=$BUILD_TYPE" "-DSANITIZE=$SANITIZER" -DENABLE_CHECK_HEAVY_BUILDS=1 "${CMAKE_FLAGS[@]}" ..
# Modify CMake flags, so we won't overwrite standalone keeper with symlinks
CMAKE_FLAGS_ARRAY+=(-DBUILD_STANDALONE_KEEPER=0 -DCREATE_KEEPER_SYMLINK=0)
fi
# Build everything
cmake --debug-trycompile -DCMAKE_VERBOSE_MAKEFILE=1 -LA "-DCMAKE_BUILD_TYPE=$BUILD_TYPE" "-DSANITIZE=$SANITIZER" -DENABLE_CHECK_HEAVY_BUILDS=1 "${CMAKE_FLAGS_ARRAY[@]}" ..
# No quotes because I want it to expand to nothing if empty.
# shellcheck disable=SC2086 # No quotes because I want it to expand to nothing if empty.
ninja $NINJA_FLAGS $BUILD_TARGET
@ -99,6 +126,7 @@ fi
mv ./programs/clickhouse* /output || mv ./programs/*_fuzzer /output
[ -x ./programs/self-extracting/clickhouse ] && mv ./programs/self-extracting/clickhouse /output
[ -x ./programs/self-extracting/clickhouse-stripped ] && mv ./programs/self-extracting/clickhouse-stripped /output
mv ./src/unit_tests_dbms /output ||: # may not exist for some binary builds
mv ./programs/*.dict ./programs/*.options ./programs/*_seed_corpus.zip /output ||: # libFuzzer oss-fuzz compatible infrastructure
@ -145,10 +173,16 @@ then
# This is why we add this repository snapshot from CI to the performance test
# package.
mkdir "$PERF_OUTPUT"/ch
git -C "$PERF_OUTPUT"/ch init --bare
git -C "$PERF_OUTPUT"/ch remote add origin /build
git -C "$PERF_OUTPUT"/ch fetch --no-tags --depth 50 origin HEAD:pr
git -C "$PERF_OUTPUT"/ch fetch --no-tags --depth 50 origin master:master
# Copy .git only, but skip modules, using tar
tar c -C /build/ --exclude='.git/modules/**' .git | tar x -C "$PERF_OUTPUT"/ch
# Create branch pr and origin/master to have them for the following performance comparison
git -C "$PERF_OUTPUT"/ch branch pr
git -C "$PERF_OUTPUT"/ch fetch --no-tags --no-recurse-submodules --depth 50 origin master:origin/master
# Clean remote, to not have it stale
git -C "$PERF_OUTPUT"/ch remote | xargs -n1 git -C "$PERF_OUTPUT"/ch remote remove
# And clean all tags
echo "Deleting $(git -C "$PERF_OUTPUT"/ch tag | wc -l) tags"
git -C "$PERF_OUTPUT"/ch tag | xargs git -C "$PERF_OUTPUT"/ch tag -d >/dev/null
git -C "$PERF_OUTPUT"/ch reset --soft pr
git -C "$PERF_OUTPUT"/ch log -5
(

View File

@ -105,7 +105,7 @@ def run_docker_image_with_env(
ccache_mount = ""
cmd = (
f"docker run --network=host --user={user} --rm {ccache_mount}"
f"docker run --network=host --user={user} --rm {ccache_mount} "
f"--volume={output_dir}:/output --volume={ch_root}:/build {env_part} "
f"--volume={cargo_cache_dir}:/rust/cargo/registry {interactive} {image_name}"
)
@ -179,6 +179,7 @@ def parse_env_variables(
"-DCMAKE_TOOLCHAIN_FILE=/build/cmake/darwin/toolchain-x86_64.cmake"
)
result.append("EXTRACT_TOOLCHAIN_DARWIN=1")
result.append("EXPORT_SOURCES_WITH_SUBMODULES=1")
elif is_cross_darwin_arm:
cc = compiler[: -len(DARWIN_ARM_SUFFIX)]
cmake_flags.append("-DCMAKE_AR:FILEPATH=/cctools/bin/aarch64-apple-darwin-ar")

View File

@ -23,7 +23,6 @@ COPY docker_related_config.xml /etc/clickhouse-server/config.d/
COPY entrypoint.sh /entrypoint.sh
ARG TARGETARCH
RUN arch=${TARGETARCH:-amd64} \
&& case $arch in \
amd64) mkdir -p /lib64 && ln -sf /lib/ld-2.31.so /lib64/ld-linux-x86-64.so.2 ;; \
@ -33,7 +32,7 @@ RUN arch=${TARGETARCH:-amd64} \
# lts / testing / prestable / etc
ARG REPO_CHANNEL="stable"
ARG REPOSITORY="https://packages.clickhouse.com/tgz/${REPO_CHANNEL}"
ARG VERSION="23.8.2.7"
ARG VERSION="23.10.4.25"
ARG PACKAGES="clickhouse-client clickhouse-server clickhouse-common-static"
# user/group precreated explicitly with fixed uid/gid on purpose.
@ -45,16 +44,14 @@ ARG PACKAGES="clickhouse-client clickhouse-server clickhouse-common-static"
RUN arch=${TARGETARCH:-amd64} \
&& for package in ${PACKAGES}; do \
{ \
{ echo "Get ${REPOSITORY}/${package}-${VERSION}-${arch}.tgz" \
&& wget -c -q "${REPOSITORY}/${package}-${VERSION}-${arch}.tgz" -O "/tmp/${package}-${VERSION}-${arch}.tgz" \
&& tar xvzf "/tmp/${package}-${VERSION}-${arch}.tgz" --strip-components=1 -C / ; \
} || \
{ echo "Fallback to ${REPOSITORY}/${package}-${VERSION}.tgz" \
&& wget -c -q "${REPOSITORY}/${package}-${VERSION}.tgz" -O "/tmp/${package}-${VERSION}.tgz" \
&& tar xvzf "/tmp/${package}-${VERSION}.tgz" --strip-components=2 -C / ; \
} ; \
} || exit 1 \
( \
cd /tmp \
&& echo "Get ${REPOSITORY}/${package}-${VERSION}-${arch}.tgz" \
&& wget -c -q "${REPOSITORY}/${package}-${VERSION}-${arch}.tgz" \
&& wget -c -q "${REPOSITORY}/${package}-${VERSION}-${arch}.tgz.sha512" \
&& sed 's:/output/:/tmp/:' < "${package}-${VERSION}-${arch}.tgz.sha512" | sha512sum -c \
&& tar xvzf "${package}-${VERSION}-${arch}.tgz" --strip-components=1 -C / \
) \
; done \
&& rm /tmp/*.tgz /install -r \
&& addgroup -S -g 101 clickhouse \

Some files were not shown because too many files have changed in this diff Show More