diff --git a/.github/actions/clean/action.yml b/.github/actions/clean/action.yml new file mode 100644 index 00000000000..547738b17cc --- /dev/null +++ b/.github/actions/clean/action.yml @@ -0,0 +1,11 @@ +name: Clean runner +description: Clean the runner's temp path on ending +runs: + using: "composite" + steps: + - name: Clean + shell: bash + run: | + docker ps --quiet | xargs --no-run-if-empty docker kill ||: + docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: + sudo rm -fr "${{runner.temp}}" diff --git a/.github/actions/common_setup/action.yml b/.github/actions/common_setup/action.yml new file mode 100644 index 00000000000..b02413adc44 --- /dev/null +++ b/.github/actions/common_setup/action.yml @@ -0,0 +1,35 @@ +name: Common setup +description: Setup necessary environments +inputs: + job_type: + description: the name to use in the TEMP_PATH and REPO_COPY + default: common + type: string + nested_job: + description: the fuse for unintended use inside of the reusable callable jobs + default: true + type: boolean +runs: + using: "composite" + steps: + - name: Setup and check ENV + shell: bash + run: | + echo "Setup the common ENV variables" + cat >> "$GITHUB_ENV" << 'EOF' + TEMP_PATH=${{runner.temp}}/${{inputs.job_type}} + REPO_COPY=${{runner.temp}}/${{inputs.job_type}}/git-repo-copy + IMAGES_PATH=${{runner.temp}}/images_path + REPORTS_PATH=${{runner.temp}}/reports_dir + EOF + if [ -z "${{env.GITHUB_JOB_OVERRIDDEN}}" ] && [ "true" == "${{inputs.nested_job}}" ]; then + echo "The GITHUB_JOB_OVERRIDDEN ENV is unset, and must be set for the nested jobs" + exit 1 + fi + - name: Setup $TEMP_PATH + shell: bash + run: | + # to remove every leftovers + sudo rm -fr "$TEMP_PATH" + mkdir -p "$REPO_COPY" + cp -a "$GITHUB_WORKSPACE"/. "$REPO_COPY"/ diff --git a/.github/workflows/backport_branches.yml b/.github/workflows/backport_branches.yml index eb4c29130c4..f6af4778cf1 100644 --- a/.github/workflows/backport_branches.yml +++ b/.github/workflows/backport_branches.yml @@ -1,3 +1,4 @@ +# yamllint disable rule:comments-indentation name: BackportPR env: @@ -33,7 +34,12 @@ jobs: - name: Python unit tests run: | cd "$GITHUB_WORKSPACE/tests/ci" - python3 -m unittest discover -s . -p '*_test.py' + echo "Testing the main ci directory" + python3 -m unittest discover -s . -p 'test_*.py' + for dir in *_lambda/; do + echo "Testing $dir" + python3 -m unittest discover -s "$dir" -p 'test_*.py' + done DockerHubPushAarch64: runs-on: [self-hosted, style-checker-aarch64] needs: CheckLabels @@ -69,7 +75,7 @@ jobs: name: changed_images_amd64 path: ${{ runner.temp }}/docker_images_check/changed_images_amd64.json DockerHubPush: - needs: [DockerHubPushAmd64, DockerHubPushAarch64] + needs: [DockerHubPushAmd64, DockerHubPushAarch64, PythonUnitTests] runs-on: [self-hosted, style-checker] steps: - name: Check out repository code @@ -99,385 +105,64 @@ jobs: path: ${{ runner.temp }}/changed_images.json CompatibilityCheckX86: needs: [BuilderDebRelease] - runs-on: [self-hosted, style-checker] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/compatibility_check - REPO_COPY=${{runner.temp}}/compatibility_check/ClickHouse - REPORTS_PATH=${{runner.temp}}/reports_dir - EOF - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: CompatibilityCheckX86 - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" && python3 compatibility_check.py --check-name "Compatibility check (amd64)" --check-glibc --check-distributions - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Compatibility check X86 + runner_type: style-checker + run_command: | + cd "$REPO_COPY/tests/ci" + python3 compatibility_check.py --check-name "Compatibility check (amd64)" --check-glibc --check-distributions CompatibilityCheckAarch64: needs: [BuilderDebAarch64] - runs-on: [self-hosted, style-checker] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/compatibility_check - REPO_COPY=${{runner.temp}}/compatibility_check/ClickHouse - REPORTS_PATH=${{runner.temp}}/reports_dir - EOF - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: CompatibilityCheckAarch64 - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" && python3 compatibility_check.py --check-name "Compatibility check (aarch64)" --check-glibc - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Compatibility check X86 + runner_type: style-checker + run_command: | + cd "$REPO_COPY/tests/ci" + python3 compatibility_check.py --check-name "Compatibility check (aarch64)" --check-glibc ######################################################################################### #################################### ORDINARY BUILDS #################################### ######################################################################################### BuilderDebRelease: needs: [DockerHubPush] - runs-on: [self-hosted, builder] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/build_check - IMAGES_PATH=${{runner.temp}}/images_path - REPO_COPY=${{runner.temp}}/build_check/ClickHouse - CACHES_PATH=${{runner.temp}}/../ccaches - BUILD_NAME=package_release - EOF - - name: Download changed images - uses: actions/download-artifact@v3 - with: - name: changed_images - path: ${{ env.IMAGES_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - submodules: true - fetch-depth: 0 # For a proper version and performance artifacts - filter: tree:0 - - name: Build - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME" - - name: Upload build URLs to artifacts - if: ${{ success() || failure() }} - uses: actions/upload-artifact@v3 - with: - name: ${{ env.BUILD_URLS }} - path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" "$CACHES_PATH" + uses: ./.github/workflows/reusable_build.yml + with: + build_name: package_release + checkout_depth: 0 BuilderDebAarch64: needs: [DockerHubPush] - runs-on: [self-hosted, builder] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/build_check - IMAGES_PATH=${{runner.temp}}/images_path - REPO_COPY=${{runner.temp}}/build_check/ClickHouse - CACHES_PATH=${{runner.temp}}/../ccaches - BUILD_NAME=package_aarch64 - EOF - - name: Download changed images - uses: actions/download-artifact@v3 - with: - name: changed_images - path: ${{ env.IMAGES_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - submodules: true - fetch-depth: 0 # For a proper version and performance artifacts - filter: tree:0 - - name: Build - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME" - - name: Upload build URLs to artifacts - if: ${{ success() || failure() }} - uses: actions/upload-artifact@v3 - with: - name: ${{ env.BUILD_URLS }} - path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" "$CACHES_PATH" + uses: ./.github/workflows/reusable_build.yml + with: + build_name: package_aarch64 + checkout_depth: 0 BuilderDebAsan: needs: [DockerHubPush] - runs-on: [self-hosted, builder] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/build_check - IMAGES_PATH=${{runner.temp}}/images_path - REPO_COPY=${{runner.temp}}/build_check/ClickHouse - CACHES_PATH=${{runner.temp}}/../ccaches - BUILD_NAME=package_asan - EOF - - name: Download changed images - uses: actions/download-artifact@v3 - with: - name: changed_images - path: ${{ env.IMAGES_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - submodules: true - - name: Build - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME" - - name: Upload build URLs to artifacts - if: ${{ success() || failure() }} - uses: actions/upload-artifact@v3 - with: - name: ${{ env.BUILD_URLS }} - path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" "$CACHES_PATH" + uses: ./.github/workflows/reusable_build.yml + with: + build_name: package_asan BuilderDebTsan: needs: [DockerHubPush] - runs-on: [self-hosted, builder] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/build_check - IMAGES_PATH=${{runner.temp}}/images_path - REPO_COPY=${{runner.temp}}/build_check/ClickHouse - CACHES_PATH=${{runner.temp}}/../ccaches - BUILD_NAME=package_tsan - EOF - - name: Download changed images - uses: actions/download-artifact@v3 - with: - name: changed_images - path: ${{ env.IMAGES_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - submodules: true - - name: Build - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME" - - name: Upload build URLs to artifacts - if: ${{ success() || failure() }} - uses: actions/upload-artifact@v3 - with: - name: ${{ env.BUILD_URLS }} - path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" "$CACHES_PATH" + uses: ./.github/workflows/reusable_build.yml + with: + build_name: package_tsan BuilderDebDebug: needs: [DockerHubPush] - runs-on: [self-hosted, builder] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/build_check - IMAGES_PATH=${{runner.temp}}/images_path - REPO_COPY=${{runner.temp}}/build_check/ClickHouse - CACHES_PATH=${{runner.temp}}/../ccaches - BUILD_NAME=package_debug - EOF - - name: Download changed images - uses: actions/download-artifact@v3 - with: - name: changed_images - path: ${{ env.IMAGES_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - submodules: true - - name: Apply sparse checkout for contrib # in order to check that it doesn't break build - run: | - rm -rf "$GITHUB_WORKSPACE/contrib" && echo 'removed' - git -C "$GITHUB_WORKSPACE" checkout . && echo 'restored' - "$GITHUB_WORKSPACE/contrib/update-submodules.sh" && echo 'OK' - du -hs "$GITHUB_WORKSPACE/contrib" ||: - find "$GITHUB_WORKSPACE/contrib" -type f | wc -l ||: - - name: Build - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME" - - name: Upload build URLs to artifacts - if: ${{ success() || failure() }} - uses: actions/upload-artifact@v3 - with: - name: ${{ env.BUILD_URLS }} - path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" "$CACHES_PATH" + uses: ./.github/workflows/reusable_build.yml + with: + build_name: package_debug BuilderBinDarwin: needs: [DockerHubPush] - runs-on: [self-hosted, builder] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/build_check - IMAGES_PATH=${{runner.temp}}/images_path - REPO_COPY=${{runner.temp}}/build_check/ClickHouse - CACHES_PATH=${{runner.temp}}/../ccaches - BUILD_NAME=binary_darwin - EOF - - name: Download changed images - uses: actions/download-artifact@v3 - with: - name: changed_images - path: ${{ env.IMAGES_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - submodules: true - fetch-depth: 0 # otherwise we will have no info about contributors - filter: tree:0 - - name: Apply sparse checkout for contrib # in order to check that it doesn't break build - run: | - rm -rf "$GITHUB_WORKSPACE/contrib" && echo 'removed' - git -C "$GITHUB_WORKSPACE" checkout . && echo 'restored' - "$GITHUB_WORKSPACE/contrib/update-submodules.sh" && echo 'OK' - du -hs "$GITHUB_WORKSPACE/contrib" ||: - find "$GITHUB_WORKSPACE/contrib" -type f | wc -l ||: - - name: Build - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME" - - name: Upload build URLs to artifacts - if: ${{ success() || failure() }} - uses: actions/upload-artifact@v3 - with: - name: ${{ env.BUILD_URLS }} - path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" "$CACHES_PATH" + uses: ./.github/workflows/reusable_build.yml + with: + build_name: binary_darwin + checkout_depth: 0 BuilderBinDarwinAarch64: needs: [DockerHubPush] - runs-on: [self-hosted, builder] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/build_check - IMAGES_PATH=${{runner.temp}}/images_path - REPO_COPY=${{runner.temp}}/build_check/ClickHouse - CACHES_PATH=${{runner.temp}}/../ccaches - BUILD_NAME=binary_darwin_aarch64 - EOF - - name: Download changed images - uses: actions/download-artifact@v3 - with: - name: changed_images - path: ${{ env.IMAGES_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - submodules: true - fetch-depth: 0 # otherwise we will have no info about contributors - filter: tree:0 - - name: Apply sparse checkout for contrib # in order to check that it doesn't break build - run: | - rm -rf "$GITHUB_WORKSPACE/contrib" && echo 'removed' - git -C "$GITHUB_WORKSPACE" checkout . && echo 'restored' - "$GITHUB_WORKSPACE/contrib/update-submodules.sh" && echo 'OK' - du -hs "$GITHUB_WORKSPACE/contrib" ||: - find "$GITHUB_WORKSPACE/contrib" -type f | wc -l ||: - - name: Build - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME" - - name: Upload build URLs to artifacts - if: ${{ success() || failure() }} - uses: actions/upload-artifact@v3 - with: - name: ${{ env.BUILD_URLS }} - path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" "$CACHES_PATH" + uses: ./.github/workflows/reusable_build.yml + with: + build_name: binary_darwin_aarch64 + checkout_depth: 0 ############################################################################################ ##################################### Docker images ####################################### ############################################################################################ @@ -510,303 +195,114 @@ jobs: ##################################### BUILD REPORTER ####################################### ############################################################################################ BuilderReport: + if: ${{ success() || failure() }} needs: - BuilderDebRelease - BuilderDebAarch64 - BuilderDebAsan - BuilderDebTsan - BuilderDebDebug - runs-on: [self-hosted, style-checker] - if: ${{ success() || failure() }} - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - CHECK_NAME=ClickHouse build check - REPORTS_PATH=${{runner.temp}}/reports_dir - TEMP_PATH=${{runner.temp}}/report_check - NEEDS_DATA_PATH=${{runner.temp}}/needs.json - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Report Builder - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cat > "$NEEDS_DATA_PATH" << 'EOF' - ${{ toJSON(needs) }} - EOF - cd "$GITHUB_WORKSPACE/tests/ci" - python3 build_report_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: ClickHouse build check + runner_type: style-checker + additional_envs: | + NEEDS_DATA<> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/report_check - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=ClickHouse special build check - NEEDS_DATA_PATH=${{runner.temp}}/needs.json - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Report Builder - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cat > "$NEEDS_DATA_PATH" << 'EOF' - ${{ toJSON(needs) }} - EOF - cd "$GITHUB_WORKSPACE/tests/ci" - python3 build_report_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: ClickHouse special build check + runner_type: style-checker + additional_envs: | + NEEDS_DATA<> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/test_install - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Install packages (amd64) - REPO_COPY=${{runner.temp}}/test_install/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Test packages installation - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 install_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Install packages (amd64) + runner_type: style-checker + run_command: | + cd "$REPO_COPY/tests/ci" + python3 install_check.py "$CHECK_NAME" InstallPackagesTestAarch64: needs: [BuilderDebAarch64] - runs-on: [self-hosted, style-checker-aarch64] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/test_install - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Install packages (arm64) - REPO_COPY=${{runner.temp}}/test_install/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Test packages installation - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 install_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Install packages (arm64) + runner_type: style-checker-aarch64 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 install_check.py "$CHECK_NAME" ############################################################################################## ########################### FUNCTIONAl STATELESS TESTS ####################################### ############################################################################################## FunctionalStatelessTestAsan: needs: [BuilderDebAsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (asan) - REPO_COPY=${{runner.temp}}/stateless_debug/ClickHouse - KILL_TIMEOUT=10800 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stateless tests (asan) + runner_type: func-tester + additional_envs: | + KILL_TIMEOUT=10800 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" ############################################################################################## ############################ FUNCTIONAl STATEFUL TESTS ####################################### ############################################################################################## FunctionalStatefulTestDebug: needs: [BuilderDebDebug] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateful_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateful tests (debug) - REPO_COPY=${{runner.temp}}/stateful_debug/ClickHouse - KILL_TIMEOUT=3600 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stateful tests (debug) + runner_type: func-tester + additional_envs: | + KILL_TIMEOUT=3600 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" ############################################################################################## ######################################### STRESS TESTS ####################################### ############################################################################################## StressTestTsan: needs: [BuilderDebTsan] - # func testers have 16 cores + 128 GB memory - # while stress testers have 36 cores + 72 memory - # It would be better to have something like 32 + 128, - # but such servers almost unavailable as spot instances. - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stress_thread - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stress test (tsan) - REPO_COPY=${{runner.temp}}/stress_thread/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Stress test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 stress_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stress test (tsan) + runner_type: stress-tester + run_command: | + cd "$REPO_COPY/tests/ci" + python3 stress_check.py "$CHECK_NAME" ############################################################################################# ############################# INTEGRATION TESTS ############################################# ############################################################################################# IntegrationTestsRelease: needs: [BuilderDebRelease] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_release - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (release) - REPO_COPY=${{runner.temp}}/integration_tests_release/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Integration tests (release) + runner_type: stress-tester + batches: 4 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 integration_test_check.py "$CHECK_NAME" FinishCheck: needs: - DockerHubPush diff --git a/.github/workflows/docs_check.yml b/.github/workflows/docs_check.yml index dada9999a68..6d449e74f30 100644 --- a/.github/workflows/docs_check.yml +++ b/.github/workflows/docs_check.yml @@ -96,68 +96,30 @@ jobs: path: ${{ runner.temp }}/changed_images.json StyleCheck: needs: DockerHubPush - runs-on: [self-hosted, style-checker] - if: ${{ success() || failure() }} - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{ runner.temp }}/style_check - ROBOT_CLICKHOUSE_SSH_KEY<> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/docs_check - REPO_COPY=${{runner.temp}}/docs_check/ClickHouse - EOF - - name: Download changed images - uses: actions/download-artifact@v3 - with: - name: changed_images - path: ${{ env.TEMP_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Docs Check - run: | - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 docs_check.py - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Docs check + runner_type: func-tester-aarch64 + additional_envs: | + run_command: | + cd "$REPO_COPY/tests/ci" + python3 docs_check.py FinishCheck: needs: - StyleCheck diff --git a/.github/workflows/jepsen.yml b/.github/workflows/jepsen.yml index 7f1fd16aa89..163de7769af 100644 --- a/.github/workflows/jepsen.yml +++ b/.github/workflows/jepsen.yml @@ -11,60 +11,19 @@ on: # yamllint disable-line rule:truthy workflow_call: jobs: KeeperJepsenRelease: - runs-on: [self-hosted, style-checker] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/keeper_jepsen - REPO_COPY=${{runner.temp}}/keeper_jepsen/ClickHouse - EOF - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - fetch-depth: 0 - filter: tree:0 - - name: Jepsen Test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 jepsen_check.py keeper - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Jepsen keeper check + runner_type: style-checker + run_command: | + cd "$REPO_COPY/tests/ci" + python3 jepsen_check.py keeper # ServerJepsenRelease: # runs-on: [self-hosted, style-checker] - # if: ${{ always() }} - # needs: [KeeperJepsenRelease] - # steps: - # - name: Set envs - # run: | - # cat >> "$GITHUB_ENV" << 'EOF' - # TEMP_PATH=${{runner.temp}}/server_jepsen - # REPO_COPY=${{runner.temp}}/server_jepsen/ClickHouse - # EOF - # - name: Check out repository code - # uses: ClickHouse/checkout@v1 - # with: - # clear-repository: true - # fetch-depth: 0 - # filter: tree:0 - # - name: Jepsen Test - # run: | - # sudo rm -fr "$TEMP_PATH" - # mkdir -p "$TEMP_PATH" - # cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - # cd "$REPO_COPY/tests/ci" - # python3 jepsen_check.py server - # - name: Cleanup - # if: always() - # run: | - # docker ps --quiet | xargs --no-run-if-empty docker kill ||: - # docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - # sudo rm -fr "$TEMP_PATH" + # uses: ./.github/workflows/reusable_test.yml + # with: + # test_name: Jepsen server check + # runner_type: style-checker + # run_command: | + # cd "$REPO_COPY/tests/ci" + # python3 jepsen_check.py server diff --git a/.github/workflows/libfuzzer.yml b/.github/workflows/libfuzzer.yml index e8a0396684a..1ca637c0d84 100644 --- a/.github/workflows/libfuzzer.yml +++ b/.github/workflows/libfuzzer.yml @@ -10,86 +10,17 @@ on: # yamllint disable-line rule:truthy workflow_call: jobs: BuilderFuzzers: - runs-on: [self-hosted, builder] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/build_check - IMAGES_PATH=${{runner.temp}}/images_path - REPO_COPY=${{runner.temp}}/build_check/ClickHouse - CACHES_PATH=${{runner.temp}}/../ccaches - BUILD_NAME=fuzzers - EOF - - name: Download changed images - # even if artifact does not exist, e.g. on `do not test` label or failed Docker job - continue-on-error: true - uses: actions/download-artifact@v3 - with: - name: changed_images - path: ${{ env.IMAGES_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - submodules: true - ref: ${{github.ref}} - - name: Build - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME" - - name: Upload build URLs to artifacts - if: ${{ success() || failure() }} - uses: actions/upload-artifact@v3 - with: - name: ${{ env.BUILD_URLS }} - path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" "$CACHES_PATH" + uses: ./.github/workflows/reusable_build.yml + with: + build_name: fuzzers libFuzzerTest: needs: [BuilderFuzzers] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/libfuzzer - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=libFuzzer tests - REPO_COPY=${{runner.temp}}/libfuzzer/ClickHouse - KILL_TIMEOUT=10800 - EOF - - name: Download changed images - # even if artifact does not exist, e.g. on `do not test` label or failed Docker job - continue-on-error: true - uses: actions/download-artifact@v3 - with: - name: changed_images - path: ${{ env.TEMP_PATH }} - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: libFuzzer test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 libfuzzer_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: libFuzzer tests + runner_type: func-tester + additional_envs: | + KILL_TIMEOUT=10800 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 libfuzzer_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" diff --git a/.github/workflows/master.yml b/.github/workflows/master.yml index a9aa7717add..e662a5b6f98 100644 --- a/.github/workflows/master.yml +++ b/.github/workflows/master.yml @@ -1,3 +1,4 @@ +# yamllint disable rule:comments-indentation name: MasterCI env: @@ -19,7 +20,12 @@ jobs: - name: Python unit tests run: | cd "$GITHUB_WORKSPACE/tests/ci" - python3 -m unittest discover -s . -p '*_test.py' + echo "Testing the main ci directory" + python3 -m unittest discover -s . -p 'test_*.py' + for dir in *_lambda/; do + echo "Testing $dir" + python3 -m unittest discover -s "$dir" -p 'test_*.py' + done DockerHubPushAarch64: runs-on: [self-hosted, style-checker-aarch64] steps: @@ -83,885 +89,140 @@ jobs: path: ${{ runner.temp }}/changed_images.json StyleCheck: needs: DockerHubPush - runs-on: [self-hosted, style-checker] if: ${{ success() || failure() }} - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{ runner.temp }}/style_check - EOF - - name: Download changed images - # even if artifact does not exist, e.g. on `do not test` label or failed Docker job - continue-on-error: true - uses: actions/download-artifact@v3 - with: - name: changed_images - path: ${{ env.TEMP_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Style Check - run: | - cd "$GITHUB_WORKSPACE/tests/ci" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Style check + runner_type: style-checker + run_command: | + cd "$REPO_COPY/tests/ci" python3 style_check.py --no-push - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" CompatibilityCheckX86: needs: [BuilderDebRelease] - runs-on: [self-hosted, style-checker] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/compatibility_check - REPO_COPY=${{runner.temp}}/compatibility_check/ClickHouse - REPORTS_PATH=${{runner.temp}}/reports_dir - EOF - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: CompatibilityCheckX86 - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" && python3 compatibility_check.py --check-name "Compatibility check (amd64)" --check-glibc --check-distributions - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Compatibility check X86 + runner_type: style-checker + run_command: | + cd "$REPO_COPY/tests/ci" + python3 compatibility_check.py --check-name "Compatibility check (amd64)" --check-glibc --check-distributions CompatibilityCheckAarch64: needs: [BuilderDebAarch64] - runs-on: [self-hosted, style-checker] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/compatibility_check - REPO_COPY=${{runner.temp}}/compatibility_check/ClickHouse - REPORTS_PATH=${{runner.temp}}/reports_dir - EOF - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: CompatibilityCheckAarch64 - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" && python3 compatibility_check.py --check-name "Compatibility check (aarch64)" --check-glibc - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Compatibility check X86 + runner_type: style-checker + run_command: | + cd "$REPO_COPY/tests/ci" + python3 compatibility_check.py --check-name "Compatibility check (aarch64)" --check-glibc ######################################################################################### #################################### ORDINARY BUILDS #################################### ######################################################################################### BuilderDebRelease: needs: [DockerHubPush] - runs-on: [self-hosted, builder] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/build_check - IMAGES_PATH=${{runner.temp}}/images_path - REPO_COPY=${{runner.temp}}/build_check/ClickHouse - CACHES_PATH=${{runner.temp}}/../ccaches - BUILD_NAME=package_release - EOF - - name: Download changed images - uses: actions/download-artifact@v3 - with: - name: changed_images - path: ${{ env.IMAGES_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - submodules: true - fetch-depth: 0 # For a proper version and performance artifacts - filter: tree:0 - - name: Build - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME" - - name: Upload build URLs to artifacts - if: ${{ success() || failure() }} - uses: actions/upload-artifact@v3 - with: - name: ${{ env.BUILD_URLS }} - path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_build.yml + with: + checkout_depth: 0 + build_name: package_release BuilderDebAarch64: needs: [DockerHubPush] - runs-on: [self-hosted, builder] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/build_check - IMAGES_PATH=${{runner.temp}}/images_path - REPO_COPY=${{runner.temp}}/build_check/ClickHouse - CACHES_PATH=${{runner.temp}}/../ccaches - BUILD_NAME=package_aarch64 - EOF - - name: Download changed images - uses: actions/download-artifact@v3 - with: - name: changed_images - path: ${{ runner.temp }}/images_path - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - submodules: true - fetch-depth: 0 # For a proper version and performance artifacts - filter: tree:0 - - name: Build - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME" - - name: Upload build URLs to artifacts - uses: actions/upload-artifact@v3 - with: - name: ${{ env.BUILD_URLS }} - path: ${{ runner.temp }}/build_check/${{ env.BUILD_URLS }}.json - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" "$CACHES_PATH" + uses: ./.github/workflows/reusable_build.yml + with: + checkout_depth: 0 + build_name: package_aarch64 BuilderBinRelease: needs: [DockerHubPush] - runs-on: [self-hosted, builder] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/build_check - IMAGES_PATH=${{runner.temp}}/images_path - REPO_COPY=${{runner.temp}}/build_check/ClickHouse - CACHES_PATH=${{runner.temp}}/../ccaches - BUILD_NAME=binary_release - EOF - - name: Download changed images - uses: actions/download-artifact@v3 - with: - name: changed_images - path: ${{ env.IMAGES_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - submodules: true - fetch-depth: 0 # otherwise we will have no info about contributors - filter: tree:0 - - name: Build - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME" - - name: Upload build URLs to artifacts - if: ${{ success() || failure() }} - uses: actions/upload-artifact@v3 - with: - name: ${{ env.BUILD_URLS }} - path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" "$CACHES_PATH" + uses: ./.github/workflows/reusable_build.yml + with: + checkout_depth: 0 + build_name: binary_release BuilderDebAsan: needs: [DockerHubPush] - runs-on: [self-hosted, builder] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/build_check - IMAGES_PATH=${{runner.temp}}/images_path - REPO_COPY=${{runner.temp}}/build_check/ClickHouse - CACHES_PATH=${{runner.temp}}/../ccaches - BUILD_NAME=package_asan - EOF - - name: Download changed images - uses: actions/download-artifact@v3 - with: - name: changed_images - path: ${{ env.IMAGES_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - submodules: true - - name: Build - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME" - - name: Upload build URLs to artifacts - if: ${{ success() || failure() }} - uses: actions/upload-artifact@v3 - with: - name: ${{ env.BUILD_URLS }} - path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" "$CACHES_PATH" + uses: ./.github/workflows/reusable_build.yml + with: + build_name: package_asan BuilderDebUBsan: needs: [DockerHubPush] - runs-on: [self-hosted, builder] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/build_check - IMAGES_PATH=${{runner.temp}}/images_path - REPO_COPY=${{runner.temp}}/build_check/ClickHouse - CACHES_PATH=${{runner.temp}}/../ccaches - BUILD_NAME=package_ubsan - EOF - - name: Download changed images - uses: actions/download-artifact@v3 - with: - name: changed_images - path: ${{ env.IMAGES_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - submodules: true - - name: Build - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME" - - name: Upload build URLs to artifacts - if: ${{ success() || failure() }} - uses: actions/upload-artifact@v3 - with: - name: ${{ env.BUILD_URLS }} - path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" "$CACHES_PATH" + uses: ./.github/workflows/reusable_build.yml + with: + build_name: package_ubsan BuilderDebTsan: needs: [DockerHubPush] - runs-on: [self-hosted, builder] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/build_check - IMAGES_PATH=${{runner.temp}}/images_path - REPO_COPY=${{runner.temp}}/build_check/ClickHouse - CACHES_PATH=${{runner.temp}}/../ccaches - BUILD_NAME=package_tsan - EOF - - name: Download changed images - uses: actions/download-artifact@v3 - with: - name: changed_images - path: ${{ env.IMAGES_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - submodules: true - - name: Build - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME" - - name: Upload build URLs to artifacts - if: ${{ success() || failure() }} - uses: actions/upload-artifact@v3 - with: - name: ${{ env.BUILD_URLS }} - path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" "$CACHES_PATH" + uses: ./.github/workflows/reusable_build.yml + with: + build_name: package_tsan BuilderDebMsan: needs: [DockerHubPush] - runs-on: [self-hosted, builder] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/build_check - IMAGES_PATH=${{runner.temp}}/images_path - REPO_COPY=${{runner.temp}}/build_check/ClickHouse - CACHES_PATH=${{runner.temp}}/../ccaches - BUILD_NAME=package_msan - EOF - - name: Download changed images - uses: actions/download-artifact@v3 - with: - name: changed_images - path: ${{ env.IMAGES_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - submodules: true - - name: Build - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME" - - name: Upload build URLs to artifacts - if: ${{ success() || failure() }} - uses: actions/upload-artifact@v3 - with: - name: ${{ env.BUILD_URLS }} - path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" "$CACHES_PATH" + uses: ./.github/workflows/reusable_build.yml + with: + build_name: package_msan BuilderDebDebug: needs: [DockerHubPush] - runs-on: [self-hosted, builder] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/build_check - IMAGES_PATH=${{runner.temp}}/images_path - REPO_COPY=${{runner.temp}}/build_check/ClickHouse - CACHES_PATH=${{runner.temp}}/../ccaches - BUILD_NAME=package_debug - EOF - - name: Download changed images - uses: actions/download-artifact@v3 - with: - name: changed_images - path: ${{ env.IMAGES_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - submodules: true - - name: Apply sparse checkout for contrib # in order to check that it doesn't break build - run: | - rm -rf "$GITHUB_WORKSPACE/contrib" && echo 'removed' - git -C "$GITHUB_WORKSPACE" checkout . && echo 'restored' - "$GITHUB_WORKSPACE/contrib/update-submodules.sh" && echo 'OK' - du -hs "$GITHUB_WORKSPACE/contrib" ||: - find "$GITHUB_WORKSPACE/contrib" -type f | wc -l ||: - - name: Build - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME" - - name: Upload build URLs to artifacts - if: ${{ success() || failure() }} - uses: actions/upload-artifact@v3 - with: - name: ${{ env.BUILD_URLS }} - path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" "$CACHES_PATH" + uses: ./.github/workflows/reusable_build.yml + with: + build_name: package_debug ########################################################################################## ##################################### SPECIAL BUILDS ##################################### ########################################################################################## BuilderBinClangTidy: needs: [DockerHubPush] - runs-on: [self-hosted, builder] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/build_check - IMAGES_PATH=${{runner.temp}}/images_path - REPO_COPY=${{runner.temp}}/build_check/ClickHouse - CACHES_PATH=${{runner.temp}}/../ccaches - BUILD_NAME=binary_tidy - EOF - - name: Download changed images - uses: actions/download-artifact@v3 - with: - name: changed_images - path: ${{ env.IMAGES_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - submodules: true - - name: Build - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME" - - name: Upload build URLs to artifacts - if: ${{ success() || failure() }} - uses: actions/upload-artifact@v3 - with: - name: ${{ env.BUILD_URLS }} - path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" "$CACHES_PATH" + uses: ./.github/workflows/reusable_build.yml + with: + build_name: binary_tidy BuilderBinDarwin: needs: [DockerHubPush] - runs-on: [self-hosted, builder] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/build_check - IMAGES_PATH=${{runner.temp}}/images_path - REPO_COPY=${{runner.temp}}/build_check/ClickHouse - CACHES_PATH=${{runner.temp}}/../ccaches - BUILD_NAME=binary_darwin - EOF - - name: Download changed images - uses: actions/download-artifact@v3 - with: - name: changed_images - path: ${{ env.IMAGES_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - submodules: true - fetch-depth: 0 # otherwise we will have no info about contributors - filter: tree:0 - - name: Apply sparse checkout for contrib # in order to check that it doesn't break build - run: | - rm -rf "$GITHUB_WORKSPACE/contrib" && echo 'removed' - git -C "$GITHUB_WORKSPACE" checkout . && echo 'restored' - "$GITHUB_WORKSPACE/contrib/update-submodules.sh" && echo 'OK' - du -hs "$GITHUB_WORKSPACE/contrib" ||: - find "$GITHUB_WORKSPACE/contrib" -type f | wc -l ||: - - name: Build - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME" - - name: Upload build URLs to artifacts - if: ${{ success() || failure() }} - uses: actions/upload-artifact@v3 - with: - name: ${{ env.BUILD_URLS }} - path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" "$CACHES_PATH" + uses: ./.github/workflows/reusable_build.yml + with: + build_name: binary_darwin + checkout_depth: 0 BuilderBinAarch64: needs: [DockerHubPush] - runs-on: [self-hosted, builder] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/build_check - IMAGES_PATH=${{runner.temp}}/images_path - REPO_COPY=${{runner.temp}}/build_check/ClickHouse - CACHES_PATH=${{runner.temp}}/../ccaches - BUILD_NAME=binary_aarch64 - EOF - - name: Download changed images - uses: actions/download-artifact@v3 - with: - name: changed_images - path: ${{ env.IMAGES_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - submodules: true - fetch-depth: 0 # otherwise we will have no info about contributors - filter: tree:0 - - name: Build - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME" - - name: Upload build URLs to artifacts - if: ${{ success() || failure() }} - uses: actions/upload-artifact@v3 - with: - name: ${{ env.BUILD_URLS }} - path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" "$CACHES_PATH" + uses: ./.github/workflows/reusable_build.yml + with: + build_name: binary_aarch64 + checkout_depth: 0 BuilderBinFreeBSD: needs: [DockerHubPush] - runs-on: [self-hosted, builder] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/build_check - IMAGES_PATH=${{runner.temp}}/images_path - REPO_COPY=${{runner.temp}}/build_check/ClickHouse - CACHES_PATH=${{runner.temp}}/../ccaches - BUILD_NAME=binary_freebsd - EOF - - name: Download changed images - uses: actions/download-artifact@v3 - with: - name: changed_images - path: ${{ env.IMAGES_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - submodules: true - fetch-depth: 0 # otherwise we will have no info about contributors - filter: tree:0 - - name: Build - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME" - - name: Upload build URLs to artifacts - if: ${{ success() || failure() }} - uses: actions/upload-artifact@v3 - with: - name: ${{ env.BUILD_URLS }} - path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" "$CACHES_PATH" + uses: ./.github/workflows/reusable_build.yml + with: + build_name: binary_freebsd + checkout_depth: 0 BuilderBinDarwinAarch64: needs: [DockerHubPush] - runs-on: [self-hosted, builder] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/build_check - IMAGES_PATH=${{runner.temp}}/images_path - REPO_COPY=${{runner.temp}}/build_check/ClickHouse - CACHES_PATH=${{runner.temp}}/../ccaches - BUILD_NAME=binary_darwin_aarch64 - EOF - - name: Download changed images - uses: actions/download-artifact@v3 - with: - name: changed_images - path: ${{ env.IMAGES_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - submodules: true - fetch-depth: 0 # otherwise we will have no info about contributors - filter: tree:0 - - name: Apply sparse checkout for contrib # in order to check that it doesn't break build - run: | - rm -rf "$GITHUB_WORKSPACE/contrib" && echo 'removed' - git -C "$GITHUB_WORKSPACE" checkout . && echo 'restored' - "$GITHUB_WORKSPACE/contrib/update-submodules.sh" && echo 'OK' - du -hs "$GITHUB_WORKSPACE/contrib" ||: - find "$GITHUB_WORKSPACE/contrib" -type f | wc -l ||: - - name: Build - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME" - - name: Upload build URLs to artifacts - if: ${{ success() || failure() }} - uses: actions/upload-artifact@v3 - with: - name: ${{ env.BUILD_URLS }} - path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" "$CACHES_PATH" + uses: ./.github/workflows/reusable_build.yml + with: + build_name: binary_darwin_aarch64 + checkout_depth: 0 BuilderBinPPC64: needs: [DockerHubPush] - runs-on: [self-hosted, builder] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/build_check - IMAGES_PATH=${{runner.temp}}/images_path - REPO_COPY=${{runner.temp}}/build_check/ClickHouse - CACHES_PATH=${{runner.temp}}/../ccaches - BUILD_NAME=binary_ppc64le - EOF - - name: Download changed images - uses: actions/download-artifact@v3 - with: - name: changed_images - path: ${{ env.IMAGES_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - submodules: true - fetch-depth: 0 # otherwise we will have no info about contributors - filter: tree:0 - - name: Build - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME" - - name: Upload build URLs to artifacts - if: ${{ success() || failure() }} - uses: actions/upload-artifact@v3 - with: - name: ${{ env.BUILD_URLS }} - path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" "$CACHES_PATH" + uses: ./.github/workflows/reusable_build.yml + with: + build_name: binary_ppc64le + checkout_depth: 0 BuilderBinAmd64Compat: needs: [DockerHubPush] - runs-on: [self-hosted, builder] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/build_check - IMAGES_PATH=${{runner.temp}}/images_path - REPO_COPY=${{runner.temp}}/build_check/ClickHouse - CACHES_PATH=${{runner.temp}}/../ccaches - BUILD_NAME=binary_amd64_compat - EOF - - name: Download changed images - uses: actions/download-artifact@v3 - with: - name: changed_images - path: ${{ env.IMAGES_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - submodules: true - fetch-depth: 0 # otherwise we will have no info about contributors - filter: tree:0 - - name: Build - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME" - - name: Upload build URLs to artifacts - if: ${{ success() || failure() }} - uses: actions/upload-artifact@v3 - with: - name: ${{ env.BUILD_URLS }} - path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" "$CACHES_PATH" + uses: ./.github/workflows/reusable_build.yml + with: + build_name: binary_amd64_compat + checkout_depth: 0 BuilderBinAarch64V80Compat: needs: [DockerHubPush] - runs-on: [self-hosted, builder] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/build_check - IMAGES_PATH=${{runner.temp}}/images_path - REPO_COPY=${{runner.temp}}/build_check/ClickHouse - CACHES_PATH=${{runner.temp}}/../ccaches - BUILD_NAME=binary_aarch64_v80compat - EOF - - name: Download changed images - uses: actions/download-artifact@v3 - with: - name: changed_images - path: ${{ env.IMAGES_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - submodules: true - fetch-depth: 0 # otherwise we will have no info about contributors - filter: tree:0 - - name: Build - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME" - - name: Upload build URLs to artifacts - if: ${{ success() || failure() }} - uses: actions/upload-artifact@v3 - with: - name: ${{ env.BUILD_URLS }} - path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" "$CACHES_PATH" + uses: ./.github/workflows/reusable_build.yml + with: + build_name: binary_aarch64_v80compat + checkout_depth: 0 BuilderBinRISCV64: needs: [DockerHubPush] - runs-on: [self-hosted, builder] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/build_check - IMAGES_PATH=${{runner.temp}}/images_path - REPO_COPY=${{runner.temp}}/build_check/ClickHouse - CACHES_PATH=${{runner.temp}}/../ccaches - BUILD_NAME=binary_riscv64 - EOF - - name: Download changed images - uses: actions/download-artifact@v3 - with: - name: changed_images - path: ${{ env.IMAGES_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - submodules: true - fetch-depth: 0 # otherwise we will have no info about contributors - filter: tree:0 - - name: Build - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME" - - name: Upload build URLs to artifacts - if: ${{ success() || failure() }} - uses: actions/upload-artifact@v3 - with: - name: ${{ env.BUILD_URLS }} - path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" "$CACHES_PATH" + uses: ./.github/workflows/reusable_build.yml + with: + build_name: binary_riscv64 + checkout_depth: 0 BuilderBinS390X: needs: [DockerHubPush] - runs-on: [self-hosted, builder] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/build_check - IMAGES_PATH=${{runner.temp}}/images_path - REPO_COPY=${{runner.temp}}/build_check/ClickHouse - CACHES_PATH=${{runner.temp}}/../ccaches - BUILD_NAME=binary_s390x - EOF - - name: Download changed images - uses: actions/download-artifact@v3 - with: - name: changed_images - path: ${{ env.IMAGES_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - submodules: true - fetch-depth: 0 # otherwise we will have no info about contributors - filter: tree:0 - - name: Build - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME" - - name: Upload build URLs to artifacts - if: ${{ success() || failure() }} - uses: actions/upload-artifact@v3 - with: - name: ${{ env.BUILD_URLS }} - path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" "$CACHES_PATH" + uses: ./.github/workflows/reusable_build.yml + with: + build_name: binary_s390x + checkout_depth: 0 ############################################################################################ ##################################### Docker images ####################################### ############################################################################################ @@ -994,6 +255,7 @@ jobs: ##################################### BUILD REPORTER ####################################### ############################################################################################ BuilderReport: + if: ${{ success() || failure() }} needs: - BuilderBinRelease - BuilderDebAarch64 @@ -1003,42 +265,19 @@ jobs: - BuilderDebRelease - BuilderDebTsan - BuilderDebUBsan - runs-on: [self-hosted, style-checker] - if: ${{ success() || failure() }} - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - CHECK_NAME=ClickHouse build check - REPORTS_PATH=${{runner.temp}}/reports_dir - REPORTS_PATH=${{runner.temp}}/reports_dir - TEMP_PATH=${{runner.temp}}/report_check - NEEDS_DATA_PATH=${{runner.temp}}/needs.json - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Report Builder - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cat > "$NEEDS_DATA_PATH" << 'EOF' - ${{ toJSON(needs) }} - EOF - cd "$GITHUB_WORKSPACE/tests/ci" - python3 build_report_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: ClickHouse build check + runner_type: style-checker + additional_envs: | + NEEDS_DATA<> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/report_check - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=ClickHouse special build check - NEEDS_DATA_PATH=${{runner.temp}}/needs.json - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Report Builder - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cat > "$NEEDS_DATA_PATH" << 'EOF' - ${{ toJSON(needs) }} - EOF - cd "$GITHUB_WORKSPACE/tests/ci" - python3 build_report_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: ClickHouse special build check + runner_type: style-checker + additional_envs: | + NEEDS_DATA<> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/test_install - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Install packages (amd64) - REPO_COPY=${{runner.temp}}/test_install/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Test packages installation - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 install_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Install packages (amd64) + runner_type: style-checker + run_command: | + cd "$REPO_COPY/tests/ci" + python3 install_check.py "$CHECK_NAME" InstallPackagesTestAarch64: needs: [BuilderDebAarch64] - runs-on: [self-hosted, style-checker-aarch64] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/test_install - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Install packages (arm64) - REPO_COPY=${{runner.temp}}/test_install/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Test packages installation - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 install_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Install packages (arm64) + runner_type: style-checker-aarch64 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 install_check.py "$CHECK_NAME" ############################################################################################## ########################### FUNCTIONAl STATELESS TESTS ####################################### ############################################################################################## FunctionalStatelessTestRelease: needs: [BuilderDebRelease] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_release - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (release) - REPO_COPY=${{runner.temp}}/stateless_release/ClickHouse - KILL_TIMEOUT=10800 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stateless tests (release) + runner_type: func-tester + additional_envs: | + KILL_TIMEOUT=10800 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" FunctionalStatelessTestReleaseDatabaseOrdinary: needs: [BuilderDebRelease] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_release_database_ordinary - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (release, DatabaseOrdinary) - REPO_COPY=${{runner.temp}}/stateless_release_database_ordinary/ClickHouse - KILL_TIMEOUT=10800 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestReleaseDatabaseReplicated0: + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stateless tests (release, DatabaseOrdinary) + runner_type: func-tester + additional_envs: | + KILL_TIMEOUT=10800 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" + FunctionalStatelessTestReleaseDatabaseReplicated: needs: [BuilderDebRelease] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_database_replicated - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (release, DatabaseReplicated) - REPO_COPY=${{runner.temp}}/stateless_database_replicated/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=0 - RUN_BY_HASH_TOTAL=4 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestReleaseDatabaseReplicated1: + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stateless tests (release, DatabaseReplicated) + runner_type: func-tester + additional_envs: | + KILL_TIMEOUT=10800 + batches: 4 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" + FunctionalStatelessTestReleaseS3: needs: [BuilderDebRelease] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_database_replicated - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (release, DatabaseReplicated) - REPO_COPY=${{runner.temp}}/stateless_database_replicated/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=1 - RUN_BY_HASH_TOTAL=4 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestReleaseDatabaseReplicated2: - needs: [BuilderDebRelease] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_database_replicated - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (release, DatabaseReplicated) - REPO_COPY=${{runner.temp}}/stateless_database_replicated/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=2 - RUN_BY_HASH_TOTAL=4 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestReleaseDatabaseReplicated3: - needs: [BuilderDebRelease] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_database_replicated - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (release, DatabaseReplicated) - REPO_COPY=${{runner.temp}}/stateless_database_replicated/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=3 - RUN_BY_HASH_TOTAL=4 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestReleaseS3_0: - needs: [BuilderDebRelease] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_s3_storage - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (release, s3 storage) - REPO_COPY=${{runner.temp}}/stateless_s3_storage/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=0 - RUN_BY_HASH_TOTAL=2 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestReleaseS3_1: - needs: [BuilderDebRelease] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_s3_storage - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (release, s3 storage) - REPO_COPY=${{runner.temp}}/stateless_s3_storage/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=1 - RUN_BY_HASH_TOTAL=2 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stateless tests (release, s3 storage) + runner_type: func-tester + additional_envs: | + KILL_TIMEOUT=10800 + batches: 2 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" FunctionalStatelessTestReleaseAnalyzer: needs: [BuilderDebRelease] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_analyzer - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (release, analyzer) - REPO_COPY=${{runner.temp}}/stateless_analyzer/ClickHouse - KILL_TIMEOUT=10800 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stateless tests (release, analyzer) + runner_type: func-tester + additional_envs: | + KILL_TIMEOUT=10800 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" FunctionalStatelessTestAarch64: needs: [BuilderDebAarch64] - runs-on: [self-hosted, func-tester-aarch64] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_release - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (aarch64) - REPO_COPY=${{runner.temp}}/stateless_release/ClickHouse - KILL_TIMEOUT=10800 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestAsan0: + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stateless tests (aarch64) + runner_type: func-tester-aarch64 + additional_envs: | + KILL_TIMEOUT=10800 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" + FunctionalStatelessTestAsan: needs: [BuilderDebAsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (asan) - REPO_COPY=${{runner.temp}}/stateless_debug/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=0 - RUN_BY_HASH_TOTAL=4 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestAsan1: - needs: [BuilderDebAsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (asan) - REPO_COPY=${{runner.temp}}/stateless_debug/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=1 - RUN_BY_HASH_TOTAL=4 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestAsan2: - needs: [BuilderDebAsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (asan) - REPO_COPY=${{runner.temp}}/stateless_debug/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=2 - RUN_BY_HASH_TOTAL=4 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestAsan3: - needs: [BuilderDebAsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (asan) - REPO_COPY=${{runner.temp}}/stateless_debug/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=3 - RUN_BY_HASH_TOTAL=4 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestTsan0: + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stateless tests (asan) + runner_type: func-tester + additional_envs: | + KILL_TIMEOUT=10800 + batches: 4 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" + FunctionalStatelessTestTsan: needs: [BuilderDebTsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_tsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (tsan) - REPO_COPY=${{runner.temp}}/stateless_tsan/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=0 - RUN_BY_HASH_TOTAL=5 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestTsan1: - needs: [BuilderDebTsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_tsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (tsan) - REPO_COPY=${{runner.temp}}/stateless_tsan/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=1 - RUN_BY_HASH_TOTAL=5 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestTsan2: - needs: [BuilderDebTsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_tsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (tsan) - REPO_COPY=${{runner.temp}}/stateless_tsan/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=2 - RUN_BY_HASH_TOTAL=5 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestTsan3: - needs: [BuilderDebTsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_tsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (tsan) - REPO_COPY=${{runner.temp}}/stateless_tsan/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=3 - RUN_BY_HASH_TOTAL=5 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestTsan4: - needs: [BuilderDebTsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_tsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (tsan) - REPO_COPY=${{runner.temp}}/stateless_tsan/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=4 - RUN_BY_HASH_TOTAL=5 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestUBsan0: + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stateless tests (tsan) + runner_type: func-tester + additional_envs: | + KILL_TIMEOUT=10800 + batches: 5 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" + FunctionalStatelessTestUBsan: needs: [BuilderDebUBsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_ubsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (ubsan) - REPO_COPY=${{runner.temp}}/stateless_ubsan/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=0 - RUN_BY_HASH_TOTAL=2 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestUBsan1: - needs: [BuilderDebUBsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_ubsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (ubsan) - REPO_COPY=${{runner.temp}}/stateless_ubsan/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=1 - RUN_BY_HASH_TOTAL=2 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestMsan0: + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stateless tests (ubsan) + runner_type: func-tester + additional_envs: | + KILL_TIMEOUT=10800 + batches: 2 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" + FunctionalStatelessTestMsan: needs: [BuilderDebMsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_memory - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (msan) - REPO_COPY=${{runner.temp}}/stateless_memory/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=0 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestMsan1: - needs: [BuilderDebMsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_memory - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (msan) - REPO_COPY=${{runner.temp}}/stateless_memory/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=1 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestMsan2: - needs: [BuilderDebMsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_memory - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (msan) - REPO_COPY=${{runner.temp}}/stateless_memory/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=2 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestMsan3: - needs: [BuilderDebMsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_memory - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (msan) - REPO_COPY=${{runner.temp}}/stateless_memory/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=3 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestMsan4: - needs: [BuilderDebMsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_memory - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (msan) - REPO_COPY=${{runner.temp}}/stateless_memory/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=4 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestMsan5: - needs: [BuilderDebMsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_memory - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (msan) - REPO_COPY=${{runner.temp}}/stateless_memory/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=5 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestDebug0: + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stateless tests (msan) + runner_type: func-tester + additional_envs: | + KILL_TIMEOUT=10800 + batches: 6 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" + FunctionalStatelessTestDebug: needs: [BuilderDebDebug] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (debug) - REPO_COPY=${{runner.temp}}/stateless_debug/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=0 - RUN_BY_HASH_TOTAL=5 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestDebug1: - needs: [BuilderDebDebug] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (debug) - REPO_COPY=${{runner.temp}}/stateless_debug/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=1 - RUN_BY_HASH_TOTAL=5 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestDebug2: - needs: [BuilderDebDebug] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (debug) - REPO_COPY=${{runner.temp}}/stateless_debug/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=2 - RUN_BY_HASH_TOTAL=5 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestDebug3: - needs: [BuilderDebDebug] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (debug) - REPO_COPY=${{runner.temp}}/stateless_debug/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=3 - RUN_BY_HASH_TOTAL=5 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestDebug4: - needs: [BuilderDebDebug] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (debug) - REPO_COPY=${{runner.temp}}/stateless_debug/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=4 - RUN_BY_HASH_TOTAL=5 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stateless tests (debug) + runner_type: func-tester + additional_envs: | + KILL_TIMEOUT=10800 + batches: 5 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" ############################################################################################## ############################ FUNCTIONAl STATEFUL TESTS ####################################### ############################################################################################## FunctionalStatefulTestRelease: needs: [BuilderDebRelease] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateful_release - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateful tests (release) - REPO_COPY=${{runner.temp}}/stateful_release/ClickHouse - KILL_TIMEOUT=3600 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stateful tests (release) + runner_type: func-tester + additional_envs: | + KILL_TIMEOUT=3600 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" FunctionalStatefulTestAarch64: needs: [BuilderDebAarch64] - runs-on: [self-hosted, func-tester-aarch64] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateful_release - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateful tests (aarch64) - REPO_COPY=${{runner.temp}}/stateful_release/ClickHouse - KILL_TIMEOUT=3600 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stateful tests (aarch64) + runner_type: func-tester-aarch64 + additional_envs: | + KILL_TIMEOUT=3600 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" FunctionalStatefulTestAsan: needs: [BuilderDebAsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateful_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateful tests (asan) - REPO_COPY=${{runner.temp}}/stateful_debug/ClickHouse - KILL_TIMEOUT=3600 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stateful tests (asan) + runner_type: func-tester + additional_envs: | + KILL_TIMEOUT=3600 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" FunctionalStatefulTestTsan: needs: [BuilderDebTsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateful_tsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateful tests (tsan) - REPO_COPY=${{runner.temp}}/stateful_tsan/ClickHouse - KILL_TIMEOUT=3600 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stateful tests (tsan) + runner_type: func-tester + additional_envs: | + KILL_TIMEOUT=3600 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" FunctionalStatefulTestMsan: needs: [BuilderDebMsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateful_msan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateful tests (msan) - REPO_COPY=${{runner.temp}}/stateful_msan/ClickHouse - KILL_TIMEOUT=3600 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stateful tests (msan) + runner_type: func-tester + additional_envs: | + KILL_TIMEOUT=3600 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" FunctionalStatefulTestUBsan: needs: [BuilderDebUBsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateful_ubsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateful tests (ubsan) - REPO_COPY=${{runner.temp}}/stateful_ubsan/ClickHouse - KILL_TIMEOUT=3600 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stateful tests (ubsan) + runner_type: func-tester + additional_envs: | + KILL_TIMEOUT=3600 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" FunctionalStatefulTestDebug: needs: [BuilderDebDebug] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateful_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateful tests (debug) - REPO_COPY=${{runner.temp}}/stateful_debug/ClickHouse - KILL_TIMEOUT=3600 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stateful tests (debug) + runner_type: func-tester + additional_envs: | + KILL_TIMEOUT=3600 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" ############################################################################################## ######################################### STRESS TESTS ####################################### ############################################################################################## StressTestAsan: needs: [BuilderDebAsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stress_thread - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stress test (asan) - REPO_COPY=${{runner.temp}}/stress_thread/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Stress test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 stress_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stress test (asan) + runner_type: stress-tester + run_command: | + cd "$REPO_COPY/tests/ci" + python3 stress_check.py "$CHECK_NAME" StressTestTsan: needs: [BuilderDebTsan] - # func testers have 16 cores + 128 GB memory - # while stress testers have 36 cores + 72 memory - # It would be better to have something like 32 + 128, - # but such servers almost unavailable as spot instances. - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stress_thread - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stress test (tsan) - REPO_COPY=${{runner.temp}}/stress_thread/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Stress test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 stress_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stress test (tsan) + runner_type: stress-tester + run_command: | + cd "$REPO_COPY/tests/ci" + python3 stress_check.py "$CHECK_NAME" StressTestMsan: needs: [BuilderDebMsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stress_memory - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stress test (msan) - REPO_COPY=${{runner.temp}}/stress_memory/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Stress test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 stress_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stress test (msan) + runner_type: stress-tester + run_command: | + cd "$REPO_COPY/tests/ci" + python3 stress_check.py "$CHECK_NAME" StressTestUBsan: needs: [BuilderDebUBsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stress_undefined - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stress test (ubsan) - REPO_COPY=${{runner.temp}}/stress_undefined/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Stress test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 stress_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stress test (ubsan) + runner_type: stress-tester + run_command: | + cd "$REPO_COPY/tests/ci" + python3 stress_check.py "$CHECK_NAME" StressTestDebug: needs: [BuilderDebDebug] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stress_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stress test (debug) - REPO_COPY=${{runner.temp}}/stress_debug/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Stress test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 stress_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stress test (debug) + runner_type: stress-tester + run_command: | + cd "$REPO_COPY/tests/ci" + python3 stress_check.py "$CHECK_NAME" ############################################################################################# ############################# INTEGRATION TESTS ############################################# ############################################################################################# - IntegrationTestsAsan0: + IntegrationTestsAsan: needs: [BuilderDebAsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_asan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (asan) - REPO_COPY=${{runner.temp}}/integration_tests_asan/ClickHouse - RUN_BY_HASH_NUM=0 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsAsan1: + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Integration tests (asan) + runner_type: stress-tester + batches: 4 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 integration_test_check.py "$CHECK_NAME" + IntegrationTestsAnalyzerAsan: needs: [BuilderDebAsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_asan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (asan) - REPO_COPY=${{runner.temp}}/integration_tests_asan/ClickHouse - RUN_BY_HASH_NUM=1 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsAsan2: - needs: [BuilderDebAsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_asan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (asan) - REPO_COPY=${{runner.temp}}/integration_tests_asan/ClickHouse - RUN_BY_HASH_NUM=2 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsAsan3: - needs: [BuilderDebAsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_asan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (asan) - REPO_COPY=${{runner.temp}}/integration_tests_asan/ClickHouse - RUN_BY_HASH_NUM=3 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsAsan4: - needs: [BuilderDebAsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_asan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (asan) - REPO_COPY=${{runner.temp}}/integration_tests_asan/ClickHouse - RUN_BY_HASH_NUM=4 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsAsan5: - needs: [BuilderDebAsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_asan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (asan) - REPO_COPY=${{runner.temp}}/integration_tests_asan/ClickHouse - RUN_BY_HASH_NUM=5 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsAnalyzerAsan0: - needs: [BuilderDebAsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_asan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (asan, analyzer) - REPO_COPY=${{runner.temp}}/integration_tests_asan/ClickHouse - RUN_BY_HASH_NUM=0 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsAnalyzerAsan1: - needs: [BuilderDebAsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_asan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (asan, analyzer) - REPO_COPY=${{runner.temp}}/integration_tests_asan/ClickHouse - RUN_BY_HASH_NUM=1 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsAnalyzerAsan2: - needs: [BuilderDebAsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_asan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (asan, analyzer) - REPO_COPY=${{runner.temp}}/integration_tests_asan/ClickHouse - RUN_BY_HASH_NUM=2 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsAnalyzerAsan3: - needs: [BuilderDebAsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_asan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (asan, analyzer) - REPO_COPY=${{runner.temp}}/integration_tests_asan/ClickHouse - RUN_BY_HASH_NUM=3 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsAnalyzerAsan4: - needs: [BuilderDebAsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_asan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (asan, analyzer) - REPO_COPY=${{runner.temp}}/integration_tests_asan/ClickHouse - RUN_BY_HASH_NUM=4 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsAnalyzerAsan5: - needs: [BuilderDebAsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_asan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (asan, analyzer) - REPO_COPY=${{runner.temp}}/integration_tests_asan/ClickHouse - RUN_BY_HASH_NUM=5 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsTsan0: + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Integration tests (asan, analyzer) + runner_type: stress-tester + batches: 6 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 integration_test_check.py "$CHECK_NAME" + IntegrationTestsTsan: needs: [BuilderDebTsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_tsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (tsan) - REPO_COPY=${{runner.temp}}/integration_tests_tsan/ClickHouse - RUN_BY_HASH_NUM=0 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsTsan1: - needs: [BuilderDebTsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_tsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (tsan) - REPO_COPY=${{runner.temp}}/integration_tests_tsan/ClickHouse - RUN_BY_HASH_NUM=1 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsTsan2: - needs: [BuilderDebTsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_tsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (tsan) - REPO_COPY=${{runner.temp}}/integration_tests_tsan/ClickHouse - RUN_BY_HASH_NUM=2 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsTsan3: - needs: [BuilderDebTsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_tsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (tsan) - REPO_COPY=${{runner.temp}}/integration_tests_tsan/ClickHouse - RUN_BY_HASH_NUM=3 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsTsan4: - needs: [BuilderDebTsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_tsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (tsan) - REPO_COPY=${{runner.temp}}/integration_tests_tsan/ClickHouse - RUN_BY_HASH_NUM=4 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsTsan5: - needs: [BuilderDebTsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_tsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (tsan) - REPO_COPY=${{runner.temp}}/integration_tests_tsan/ClickHouse - RUN_BY_HASH_NUM=5 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsRelease0: + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Integration tests (tsan) + runner_type: stress-tester + batches: 6 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 integration_test_check.py "$CHECK_NAME" + IntegrationTestsRelease: needs: [BuilderDebRelease] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_release - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (release) - REPO_COPY=${{runner.temp}}/integration_tests_release/ClickHouse - RUN_BY_HASH_NUM=0 - RUN_BY_HASH_TOTAL=4 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsRelease1: - needs: [BuilderDebRelease] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_release - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (release) - REPO_COPY=${{runner.temp}}/integration_tests_release/ClickHouse - RUN_BY_HASH_NUM=1 - RUN_BY_HASH_TOTAL=4 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsRelease2: - needs: [BuilderDebRelease] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_release - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (release) - REPO_COPY=${{runner.temp}}/integration_tests_release/ClickHouse - RUN_BY_HASH_NUM=2 - RUN_BY_HASH_TOTAL=4 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsRelease3: - needs: [BuilderDebRelease] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_release - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (release) - REPO_COPY=${{runner.temp}}/integration_tests_release/ClickHouse - RUN_BY_HASH_NUM=3 - RUN_BY_HASH_TOTAL=4 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Integration tests (release) + runner_type: stress-tester + batches: 4 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 integration_test_check.py "$CHECK_NAME" ############################################################################################## ##################################### AST FUZZERS ############################################ ############################################################################################## ASTFuzzerTestAsan: needs: [BuilderDebAsan] - runs-on: [self-hosted, fuzzer-unit-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/ast_fuzzer_asan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=AST fuzzer (asan) - REPO_COPY=${{runner.temp}}/ast_fuzzer_asan/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Fuzzer - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 ast_fuzzer_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: AST fuzzer (asan) + runner_type: fuzzer-unit-tester + run_command: | + cd "$REPO_COPY/tests/ci" + python3 ast_fuzzer_check.py "$CHECK_NAME" ASTFuzzerTestTsan: needs: [BuilderDebTsan] - runs-on: [self-hosted, fuzzer-unit-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/ast_fuzzer_tsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=AST fuzzer (tsan) - REPO_COPY=${{runner.temp}}/ast_fuzzer_tsan/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Fuzzer - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 ast_fuzzer_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: AST fuzzer (tsan) + runner_type: fuzzer-unit-tester + run_command: | + cd "$REPO_COPY/tests/ci" + python3 ast_fuzzer_check.py "$CHECK_NAME" ASTFuzzerTestUBSan: needs: [BuilderDebUBsan] - runs-on: [self-hosted, fuzzer-unit-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/ast_fuzzer_ubsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=AST fuzzer (ubsan) - REPO_COPY=${{runner.temp}}/ast_fuzzer_ubsan/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Fuzzer - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 ast_fuzzer_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: AST fuzzer (ubsan) + runner_type: fuzzer-unit-tester + run_command: | + cd "$REPO_COPY/tests/ci" + python3 ast_fuzzer_check.py "$CHECK_NAME" ASTFuzzerTestMSan: needs: [BuilderDebMsan] - runs-on: [self-hosted, fuzzer-unit-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/ast_fuzzer_msan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=AST fuzzer (msan) - REPO_COPY=${{runner.temp}}/ast_fuzzer_msan/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Fuzzer - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 ast_fuzzer_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: AST fuzzer (msan) + runner_type: fuzzer-unit-tester + run_command: | + cd "$REPO_COPY/tests/ci" + python3 ast_fuzzer_check.py "$CHECK_NAME" ASTFuzzerTestDebug: needs: [BuilderDebDebug] - runs-on: [self-hosted, fuzzer-unit-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/ast_fuzzer_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=AST fuzzer (debug) - REPO_COPY=${{runner.temp}}/ast_fuzzer_debug/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Fuzzer - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 ast_fuzzer_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: AST fuzzer (debug) + runner_type: fuzzer-unit-tester + run_command: | + cd "$REPO_COPY/tests/ci" + python3 ast_fuzzer_check.py "$CHECK_NAME" ############################################################################################# #################################### UNIT TESTS ############################################# ############################################################################################# UnitTestsAsan: needs: [BuilderDebAsan] - runs-on: [self-hosted, fuzzer-unit-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/unit_tests_asan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Unit tests (asan) - REPO_COPY=${{runner.temp}}/unit_tests_asan/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Unit test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 unit_tests_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Unit tests (asan) + runner_type: fuzzer-unit-tester + run_command: | + cd "$REPO_COPY/tests/ci" + python3 unit_tests_check.py "$CHECK_NAME" UnitTestsReleaseClang: needs: [BuilderBinRelease] - runs-on: [self-hosted, fuzzer-unit-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/unit_tests_asan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Unit tests (release) - REPO_COPY=${{runner.temp}}/unit_tests_asan/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Unit test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 unit_tests_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Unit tests (release) + runner_type: fuzzer-unit-tester + run_command: | + cd "$REPO_COPY/tests/ci" + python3 unit_tests_check.py "$CHECK_NAME" UnitTestsTsan: needs: [BuilderDebTsan] - runs-on: [self-hosted, fuzzer-unit-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/unit_tests_tsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Unit tests (tsan) - REPO_COPY=${{runner.temp}}/unit_tests_tsan/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Unit test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 unit_tests_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Unit tests (tsan) + runner_type: fuzzer-unit-tester + run_command: | + cd "$REPO_COPY/tests/ci" + python3 unit_tests_check.py "$CHECK_NAME" UnitTestsMsan: needs: [BuilderDebMsan] - runs-on: [self-hosted, fuzzer-unit-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/unit_tests_msan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Unit tests (msan) - REPO_COPY=${{runner.temp}}/unit_tests_msan/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Unit test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 unit_tests_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Unit tests (msan) + runner_type: fuzzer-unit-tester + run_command: | + cd "$REPO_COPY/tests/ci" + python3 unit_tests_check.py "$CHECK_NAME" UnitTestsUBsan: needs: [BuilderDebUBsan] - runs-on: [self-hosted, fuzzer-unit-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/unit_tests_ubsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Unit tests (ubsan) - REPO_COPY=${{runner.temp}}/unit_tests_ubsan/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Unit test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 unit_tests_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Unit tests (ubsan) + runner_type: fuzzer-unit-tester + run_command: | + cd "$REPO_COPY/tests/ci" + python3 unit_tests_check.py "$CHECK_NAME" ############################################################################################# #################################### PERFORMANCE TESTS ###################################### ############################################################################################# - PerformanceComparisonX86-0: + PerformanceComparisonX86: needs: [BuilderDebRelease] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/performance_comparison - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Performance Comparison - REPO_COPY=${{runner.temp}}/performance_comparison/ClickHouse - RUN_BY_HASH_NUM=0 - RUN_BY_HASH_TOTAL=4 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Performance Comparison - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 performance_comparison_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - PerformanceComparisonX86-1: - needs: [BuilderDebRelease] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/performance_comparison - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Performance Comparison - REPO_COPY=${{runner.temp}}/performance_comparison/ClickHouse - RUN_BY_HASH_NUM=1 - RUN_BY_HASH_TOTAL=4 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Performance Comparison - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 performance_comparison_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - PerformanceComparisonX86-2: - needs: [BuilderDebRelease] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/performance_comparison - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Performance Comparison - REPO_COPY=${{runner.temp}}/performance_comparison/ClickHouse - RUN_BY_HASH_NUM=2 - RUN_BY_HASH_TOTAL=4 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Performance Comparison - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 performance_comparison_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - PerformanceComparisonX86-3: - needs: [BuilderDebRelease] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/performance_comparison - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Performance Comparison - REPO_COPY=${{runner.temp}}/performance_comparison/ClickHouse - RUN_BY_HASH_NUM=3 - RUN_BY_HASH_TOTAL=4 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Performance Comparison - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 performance_comparison_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - PerformanceComparisonAarch-0: + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Performance Comparison + runner_type: stress-tester + batches: 4 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 performance_comparison_check.py "$CHECK_NAME" + PerformanceComparisonAarch: needs: [BuilderDebAarch64] - runs-on: [self-hosted, func-tester-aarch64] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/performance_comparison - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Performance Comparison Aarch64 - REPO_COPY=${{runner.temp}}/performance_comparison/ClickHouse - RUN_BY_HASH_NUM=0 - RUN_BY_HASH_TOTAL=4 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Performance Comparison - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 performance_comparison_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - PerformanceComparisonAarch-1: - needs: [BuilderDebAarch64] - runs-on: [self-hosted, func-tester-aarch64] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/performance_comparison - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Performance Comparison Aarch64 - REPO_COPY=${{runner.temp}}/performance_comparison/ClickHouse - RUN_BY_HASH_NUM=1 - RUN_BY_HASH_TOTAL=4 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Performance Comparison - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 performance_comparison_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - PerformanceComparisonAarch-2: - needs: [BuilderDebAarch64] - runs-on: [self-hosted, func-tester-aarch64] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/performance_comparison - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Performance Comparison Aarch64 - REPO_COPY=${{runner.temp}}/performance_comparison/ClickHouse - RUN_BY_HASH_NUM=2 - RUN_BY_HASH_TOTAL=4 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Performance Comparison - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 performance_comparison_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - PerformanceComparisonAarch-3: - needs: [BuilderDebAarch64] - runs-on: [self-hosted, func-tester-aarch64] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/performance_comparison - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Performance Comparison Aarch64 - REPO_COPY=${{runner.temp}}/performance_comparison/ClickHouse - RUN_BY_HASH_NUM=3 - RUN_BY_HASH_TOTAL=4 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Performance Comparison - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 performance_comparison_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Performance Comparison Aarch64 + runner_type: func-tester-aarch64 + batches: 4 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 performance_comparison_check.py "$CHECK_NAME" ############################################################################################## ###################################### SQLANCER FUZZERS ###################################### ############################################################################################## SQLancerTestRelease: needs: [BuilderDebRelease] - runs-on: [self-hosted, fuzzer-unit-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/sqlancer_release - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=SQLancer (release) - REPO_COPY=${{runner.temp}}/sqlancer_release/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: SQLancer - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 sqlancer_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: SQLancer (release) + runner_type: fuzzer-unit-tester + run_command: | + cd "$REPO_COPY/tests/ci" + python3 sqlancer_check.py "$CHECK_NAME" SQLancerTestDebug: needs: [BuilderDebDebug] - runs-on: [self-hosted, fuzzer-unit-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/sqlancer_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=SQLancer (debug) - REPO_COPY=${{runner.temp}}/sqlancer_debug/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: SQLancer - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 sqlancer_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: SQLancer (debug) + runner_type: fuzzer-unit-tester + run_command: | + cd "$REPO_COPY/tests/ci" + python3 sqlancer_check.py "$CHECK_NAME" FinishCheck: needs: - DockerHubPush - BuilderReport - BuilderSpecialReport - MarkReleaseReady - - FunctionalStatelessTestDebug0 - - FunctionalStatelessTestDebug1 - - FunctionalStatelessTestDebug2 - - FunctionalStatelessTestDebug3 - - FunctionalStatelessTestDebug4 + - FunctionalStatelessTestDebug - FunctionalStatelessTestRelease - FunctionalStatelessTestReleaseDatabaseOrdinary - - FunctionalStatelessTestReleaseDatabaseReplicated0 - - FunctionalStatelessTestReleaseDatabaseReplicated1 - - FunctionalStatelessTestReleaseDatabaseReplicated2 - - FunctionalStatelessTestReleaseDatabaseReplicated3 + - FunctionalStatelessTestReleaseDatabaseReplicated + - FunctionalStatelessTestReleaseAnalyzer + - FunctionalStatelessTestReleaseS3 - FunctionalStatelessTestAarch64 - - FunctionalStatelessTestAsan0 - - FunctionalStatelessTestAsan1 - - FunctionalStatelessTestAsan2 - - FunctionalStatelessTestAsan3 - - FunctionalStatelessTestTsan0 - - FunctionalStatelessTestTsan1 - - FunctionalStatelessTestTsan2 - - FunctionalStatelessTestTsan3 - - FunctionalStatelessTestTsan4 - - FunctionalStatelessTestMsan0 - - FunctionalStatelessTestMsan1 - - FunctionalStatelessTestMsan2 - - FunctionalStatelessTestMsan3 - - FunctionalStatelessTestMsan4 - - FunctionalStatelessTestMsan5 - - FunctionalStatelessTestUBsan0 - - FunctionalStatelessTestUBsan1 + - FunctionalStatelessTestAsan + - FunctionalStatelessTestTsan + - FunctionalStatelessTestMsan + - FunctionalStatelessTestUBsan - FunctionalStatefulTestDebug - FunctionalStatefulTestRelease - - FunctionalStatelessTestReleaseS3_0 - - FunctionalStatelessTestReleaseS3_1 - FunctionalStatefulTestAarch64 - FunctionalStatefulTestAsan - FunctionalStatefulTestTsan @@ -4239,32 +808,12 @@ jobs: - StressTestTsan - StressTestMsan - StressTestUBsan - - IntegrationTestsAsan0 - - IntegrationTestsAsan1 - - IntegrationTestsAsan2 - - IntegrationTestsAsan3 - - IntegrationTestsAsan4 - - IntegrationTestsAsan5 - - IntegrationTestsAnalyzerAsan0 - - IntegrationTestsAnalyzerAsan1 - - IntegrationTestsAnalyzerAsan2 - - IntegrationTestsAnalyzerAsan3 - - IntegrationTestsAnalyzerAsan4 - - IntegrationTestsAnalyzerAsan5 - - IntegrationTestsRelease0 - - IntegrationTestsRelease1 - - IntegrationTestsRelease2 - - IntegrationTestsRelease3 - - IntegrationTestsTsan0 - - IntegrationTestsTsan1 - - IntegrationTestsTsan2 - - IntegrationTestsTsan3 - - IntegrationTestsTsan4 - - IntegrationTestsTsan5 - - PerformanceComparisonX86-0 - - PerformanceComparisonX86-1 - - PerformanceComparisonX86-2 - - PerformanceComparisonX86-3 + - IntegrationTestsAsan + - IntegrationTestsAnalyzerAsan + - IntegrationTestsTsan + - IntegrationTestsRelease + - PerformanceComparisonX86 + - PerformanceComparisonAarch - CompatibilityCheckX86 - CompatibilityCheckAarch64 - ASTFuzzerTestDebug diff --git a/.github/workflows/nightly.yml b/.github/workflows/nightly.yml index 6452b83fdd6..1e94f70b9e6 100644 --- a/.github/workflows/nightly.yml +++ b/.github/workflows/nightly.yml @@ -74,9 +74,6 @@ jobs: with: name: changed_images path: ${{ runner.temp }}/changed_images.json - Codebrowser: - needs: [DockerHubPush] - uses: ./.github/workflows/woboq.yml SonarCloud: runs-on: [self-hosted, builder] env: diff --git a/.github/workflows/pull_request.yml b/.github/workflows/pull_request.yml index 5937f434135..f8f052d9226 100644 --- a/.github/workflows/pull_request.yml +++ b/.github/workflows/pull_request.yml @@ -1,3 +1,4 @@ +# yamllint disable rule:comments-indentation name: PullRequestCI env: @@ -47,10 +48,10 @@ jobs: run: | cd "$GITHUB_WORKSPACE/tests/ci" echo "Testing the main ci directory" - python3 -m unittest discover -s . -p '*_test.py' + python3 -m unittest discover -s . -p 'test_*.py' for dir in *_lambda/; do echo "Testing $dir" - python3 -m unittest discover -s "$dir" -p '*_test.py' + python3 -m unittest discover -s "$dir" -p 'test_*.py' done DockerHubPushAarch64: needs: CheckLabels @@ -117,900 +118,145 @@ jobs: path: ${{ runner.temp }}/changed_images.json StyleCheck: needs: DockerHubPush - runs-on: [self-hosted, style-checker] # We need additional `&& ! cancelled()` to have the job being able to cancel if: ${{ success() || failure() || ( always() && ! cancelled() ) }} - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{ runner.temp }}/style_check - ROBOT_CLICKHOUSE_SSH_KEY<> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/fasttest - REPO_COPY=${{runner.temp}}/fasttest/ClickHouse - CACHES_PATH=${{runner.temp}}/../ccaches - EOF - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Download changed images - uses: actions/download-artifact@v3 - with: - name: changed_images - path: ${{ env.TEMP_PATH }} - - name: Fast Test - run: | - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" && python3 fast_test_check.py - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" "$CACHES_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Fast tests + runner_type: builder + run_command: | + cd "$REPO_COPY/tests/ci" + python3 fast_test_check.py CompatibilityCheckX86: needs: [BuilderDebRelease] - runs-on: [self-hosted, style-checker] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/compatibility_check - REPO_COPY=${{runner.temp}}/compatibility_check/ClickHouse - REPORTS_PATH=${{runner.temp}}/reports_dir - EOF - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: CompatibilityCheckX86 - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" && python3 compatibility_check.py --check-name "Compatibility check (amd64)" --check-glibc --check-distributions - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Compatibility check X86 + runner_type: style-checker + run_command: | + cd "$REPO_COPY/tests/ci" + python3 compatibility_check.py --check-name "Compatibility check (amd64)" --check-glibc --check-distributions CompatibilityCheckAarch64: needs: [BuilderDebAarch64] - runs-on: [self-hosted, style-checker] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/compatibility_check - REPO_COPY=${{runner.temp}}/compatibility_check/ClickHouse - REPORTS_PATH=${{runner.temp}}/reports_dir - EOF - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: CompatibilityCheckAarch64 - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" && python3 compatibility_check.py --check-name "Compatibility check (aarch64)" --check-glibc - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Compatibility check X86 + runner_type: style-checker + run_command: | + cd "$REPO_COPY/tests/ci" + python3 compatibility_check.py --check-name "Compatibility check (aarch64)" --check-glibc ######################################################################################### #################################### ORDINARY BUILDS #################################### ######################################################################################### BuilderDebRelease: - needs: [DockerHubPush, FastTest, StyleCheck] - runs-on: [self-hosted, builder] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/build_check - IMAGES_PATH=${{runner.temp}}/images_path - REPO_COPY=${{runner.temp}}/build_check/ClickHouse - CACHES_PATH=${{runner.temp}}/../ccaches - BUILD_NAME=package_release - EOF - - name: Download changed images - uses: actions/download-artifact@v3 - with: - name: changed_images - path: ${{ env.IMAGES_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - fetch-depth: 0 # for performance artifact - filter: tree:0 - submodules: true - - name: Build - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME" - - name: Upload build URLs to artifacts - if: ${{ success() || failure() }} - uses: actions/upload-artifact@v3 - with: - name: ${{ env.BUILD_URLS }} - path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - BuilderBinRelease: - needs: [DockerHubPush, FastTest, StyleCheck] - runs-on: [self-hosted, builder] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/build_check - IMAGES_PATH=${{runner.temp}}/images_path - REPO_COPY=${{runner.temp}}/build_check/ClickHouse - CACHES_PATH=${{runner.temp}}/../ccaches - BUILD_NAME=binary_release - EOF - - name: Download changed images - uses: actions/download-artifact@v3 - with: - name: changed_images - path: ${{ env.IMAGES_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - submodules: true - - name: Build - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME" - - name: Upload build URLs to artifacts - if: ${{ success() || failure() }} - uses: actions/upload-artifact@v3 - with: - name: ${{ env.BUILD_URLS }} - path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" "$CACHES_PATH" + needs: [FastTest, StyleCheck] + uses: ./.github/workflows/reusable_build.yml + with: + build_name: package_release + checkout_depth: 0 BuilderDebAarch64: - needs: [DockerHubPush, FastTest, StyleCheck] - runs-on: [self-hosted, builder] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/build_check - IMAGES_PATH=${{runner.temp}}/images_path - REPO_COPY=${{runner.temp}}/build_check/ClickHouse - CACHES_PATH=${{runner.temp}}/../ccaches - BUILD_NAME=package_aarch64 - EOF - - name: Download changed images - uses: actions/download-artifact@v3 - with: - name: changed_images - path: ${{ runner.temp }}/images_path - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - submodules: true - fetch-depth: 0 # for performance artifact - filter: tree:0 - - name: Build - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME" - - name: Upload build URLs to artifacts - if: ${{ success() || failure() }} - uses: actions/upload-artifact@v3 - with: - name: ${{ env.BUILD_URLS }} - path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" "$CACHES_PATH" + needs: [FastTest, StyleCheck] + uses: ./.github/workflows/reusable_build.yml + with: + build_name: package_aarch64 + checkout_depth: 0 + BuilderBinRelease: + needs: [FastTest, StyleCheck] + uses: ./.github/workflows/reusable_build.yml + with: + build_name: binary_release BuilderDebAsan: - needs: [DockerHubPush, FastTest, StyleCheck] - runs-on: [self-hosted, builder] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/build_check - IMAGES_PATH=${{runner.temp}}/images_path - REPO_COPY=${{runner.temp}}/build_check/ClickHouse - CACHES_PATH=${{runner.temp}}/../ccaches - BUILD_NAME=package_asan - EOF - - name: Download changed images - uses: actions/download-artifact@v3 - with: - name: changed_images - path: ${{ env.IMAGES_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - submodules: true - - name: Build - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME" - - name: Upload build URLs to artifacts - if: ${{ success() || failure() }} - uses: actions/upload-artifact@v3 - with: - name: ${{ env.BUILD_URLS }} - path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" "$CACHES_PATH" + needs: [FastTest, StyleCheck] + uses: ./.github/workflows/reusable_build.yml + with: + build_name: package_asan BuilderDebUBsan: - needs: [DockerHubPush, FastTest, StyleCheck] - runs-on: [self-hosted, builder] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/build_check - IMAGES_PATH=${{runner.temp}}/images_path - REPO_COPY=${{runner.temp}}/build_check/ClickHouse - CACHES_PATH=${{runner.temp}}/../ccaches - BUILD_NAME=package_ubsan - EOF - - name: Download changed images - uses: actions/download-artifact@v3 - with: - name: changed_images - path: ${{ env.IMAGES_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - submodules: true - - name: Build - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME" - - name: Upload build URLs to artifacts - if: ${{ success() || failure() }} - uses: actions/upload-artifact@v3 - with: - name: ${{ env.BUILD_URLS }} - path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" "$CACHES_PATH" + needs: [FastTest, StyleCheck] + uses: ./.github/workflows/reusable_build.yml + with: + build_name: package_ubsan BuilderDebTsan: - needs: [DockerHubPush, FastTest, StyleCheck] - runs-on: [self-hosted, builder] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/build_check - IMAGES_PATH=${{runner.temp}}/images_path - REPO_COPY=${{runner.temp}}/build_check/ClickHouse - CACHES_PATH=${{runner.temp}}/../ccaches - BUILD_NAME=package_tsan - EOF - - name: Download changed images - uses: actions/download-artifact@v3 - with: - name: changed_images - path: ${{ env.IMAGES_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - submodules: true - - name: Build - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME" - - name: Upload build URLs to artifacts - if: ${{ success() || failure() }} - uses: actions/upload-artifact@v3 - with: - name: ${{ env.BUILD_URLS }} - path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" "$CACHES_PATH" + needs: [FastTest, StyleCheck] + uses: ./.github/workflows/reusable_build.yml + with: + build_name: package_tsan BuilderDebMsan: - needs: [DockerHubPush, FastTest, StyleCheck] - runs-on: [self-hosted, builder] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/build_check - IMAGES_PATH=${{runner.temp}}/images_path - REPO_COPY=${{runner.temp}}/build_check/ClickHouse - CACHES_PATH=${{runner.temp}}/../ccaches - BUILD_NAME=package_msan - EOF - - name: Download changed images - uses: actions/download-artifact@v3 - with: - name: changed_images - path: ${{ env.IMAGES_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - submodules: true - - name: Build - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME" - - name: Upload build URLs to artifacts - if: ${{ success() || failure() }} - uses: actions/upload-artifact@v3 - with: - name: ${{ env.BUILD_URLS }} - path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" "$CACHES_PATH" + needs: [FastTest, StyleCheck] + uses: ./.github/workflows/reusable_build.yml + with: + build_name: package_msan BuilderDebDebug: - needs: [DockerHubPush, FastTest, StyleCheck] - runs-on: [self-hosted, builder] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/build_check - IMAGES_PATH=${{runner.temp}}/images_path - REPO_COPY=${{runner.temp}}/build_check/ClickHouse - CACHES_PATH=${{runner.temp}}/../ccaches - BUILD_NAME=package_debug - EOF - - name: Download changed images - uses: actions/download-artifact@v3 - with: - name: changed_images - path: ${{ env.IMAGES_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - submodules: true - - name: Apply sparse checkout for contrib # in order to check that it doesn't break build - run: | - rm -rf "$GITHUB_WORKSPACE/contrib" && echo 'removed' - git -C "$GITHUB_WORKSPACE" checkout . && echo 'restored' - "$GITHUB_WORKSPACE/contrib/update-submodules.sh" && echo 'OK' - du -hs "$GITHUB_WORKSPACE/contrib" ||: - find "$GITHUB_WORKSPACE/contrib" -type f | wc -l ||: - - name: Build - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME" - - name: Upload build URLs to artifacts - if: ${{ success() || failure() }} - uses: actions/upload-artifact@v3 - with: - name: ${{ env.BUILD_URLS }} - path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" "$CACHES_PATH" + needs: [FastTest, StyleCheck] + uses: ./.github/workflows/reusable_build.yml + with: + build_name: package_debug ########################################################################################## ##################################### SPECIAL BUILDS ##################################### ########################################################################################## BuilderBinClangTidy: - needs: [DockerHubPush, FastTest, StyleCheck] - runs-on: [self-hosted, builder] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/build_check - IMAGES_PATH=${{runner.temp}}/images_path - REPO_COPY=${{runner.temp}}/build_check/ClickHouse - CACHES_PATH=${{runner.temp}}/../ccaches - BUILD_NAME=binary_tidy - EOF - - name: Download changed images - uses: actions/download-artifact@v3 - with: - name: changed_images - path: ${{ env.IMAGES_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - submodules: true - - name: Build - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME" - - name: Upload build URLs to artifacts - if: ${{ success() || failure() }} - uses: actions/upload-artifact@v3 - with: - name: ${{ env.BUILD_URLS }} - path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" "$CACHES_PATH" + needs: [FastTest, StyleCheck] + uses: ./.github/workflows/reusable_build.yml + with: + build_name: binary_tidy BuilderBinDarwin: - needs: [DockerHubPush, FastTest, StyleCheck] - runs-on: [self-hosted, builder] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/build_check - IMAGES_PATH=${{runner.temp}}/images_path - REPO_COPY=${{runner.temp}}/build_check/ClickHouse - CACHES_PATH=${{runner.temp}}/../ccaches - BUILD_NAME=binary_darwin - EOF - - name: Download changed images - uses: actions/download-artifact@v3 - with: - name: changed_images - path: ${{ env.IMAGES_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - submodules: true - - name: Apply sparse checkout for contrib # in order to check that it doesn't break build - run: | - rm -rf "$GITHUB_WORKSPACE/contrib" && echo 'removed' - git -C "$GITHUB_WORKSPACE" checkout . && echo 'restored' - "$GITHUB_WORKSPACE/contrib/update-submodules.sh" && echo 'OK' - du -hs "$GITHUB_WORKSPACE/contrib" ||: - find "$GITHUB_WORKSPACE/contrib" -type f | wc -l ||: - - name: Build - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME" - - name: Upload build URLs to artifacts - if: ${{ success() || failure() }} - uses: actions/upload-artifact@v3 - with: - name: ${{ env.BUILD_URLS }} - path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" "$CACHES_PATH" + needs: [FastTest, StyleCheck] + uses: ./.github/workflows/reusable_build.yml + with: + build_name: binary_darwin BuilderBinAarch64: - needs: [DockerHubPush, FastTest, StyleCheck] - runs-on: [self-hosted, builder] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/build_check - IMAGES_PATH=${{runner.temp}}/images_path - REPO_COPY=${{runner.temp}}/build_check/ClickHouse - CACHES_PATH=${{runner.temp}}/../ccaches - BUILD_NAME=binary_aarch64 - EOF - - name: Download changed images - uses: actions/download-artifact@v3 - with: - name: changed_images - path: ${{ env.IMAGES_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - submodules: true - - name: Build - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME" - - name: Upload build URLs to artifacts - if: ${{ success() || failure() }} - uses: actions/upload-artifact@v3 - with: - name: ${{ env.BUILD_URLS }} - path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" "$CACHES_PATH" + needs: [FastTest, StyleCheck] + uses: ./.github/workflows/reusable_build.yml + with: + build_name: binary_aarch64 BuilderBinFreeBSD: - needs: [DockerHubPush, FastTest, StyleCheck] - runs-on: [self-hosted, builder] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/build_check - IMAGES_PATH=${{runner.temp}}/images_path - REPO_COPY=${{runner.temp}}/build_check/ClickHouse - CACHES_PATH=${{runner.temp}}/../ccaches - BUILD_NAME=binary_freebsd - EOF - - name: Download changed images - uses: actions/download-artifact@v3 - with: - name: changed_images - path: ${{ env.IMAGES_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - submodules: true - - name: Build - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME" - - name: Upload build URLs to artifacts - if: ${{ success() || failure() }} - uses: actions/upload-artifact@v3 - with: - name: ${{ env.BUILD_URLS }} - path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" "$CACHES_PATH" + needs: [FastTest, StyleCheck] + uses: ./.github/workflows/reusable_build.yml + with: + build_name: binary_freebsd BuilderBinDarwinAarch64: - needs: [DockerHubPush, FastTest, StyleCheck] - runs-on: [self-hosted, builder] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/build_check - IMAGES_PATH=${{runner.temp}}/images_path - REPO_COPY=${{runner.temp}}/build_check/ClickHouse - CACHES_PATH=${{runner.temp}}/../ccaches - BUILD_NAME=binary_darwin_aarch64 - EOF - - name: Download changed images - uses: actions/download-artifact@v3 - with: - name: changed_images - path: ${{ env.IMAGES_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - submodules: true - - name: Apply sparse checkout for contrib # in order to check that it doesn't break build - run: | - rm -rf "$GITHUB_WORKSPACE/contrib" && echo 'removed' - git -C "$GITHUB_WORKSPACE" checkout . && echo 'restored' - "$GITHUB_WORKSPACE/contrib/update-submodules.sh" && echo 'OK' - du -hs "$GITHUB_WORKSPACE/contrib" ||: - find "$GITHUB_WORKSPACE/contrib" -type f | wc -l ||: - - name: Build - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME" - - name: Upload build URLs to artifacts - if: ${{ success() || failure() }} - uses: actions/upload-artifact@v3 - with: - name: ${{ env.BUILD_URLS }} - path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" "$CACHES_PATH" + needs: [FastTest, StyleCheck] + uses: ./.github/workflows/reusable_build.yml + with: + build_name: binary_darwin_aarch64 BuilderBinPPC64: - needs: [DockerHubPush, FastTest, StyleCheck] - runs-on: [self-hosted, builder] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/build_check - IMAGES_PATH=${{runner.temp}}/images_path - REPO_COPY=${{runner.temp}}/build_check/ClickHouse - CACHES_PATH=${{runner.temp}}/../ccaches - BUILD_NAME=binary_ppc64le - EOF - - name: Download changed images - uses: actions/download-artifact@v3 - with: - name: changed_images - path: ${{ env.IMAGES_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - submodules: true - - name: Build - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME" - - name: Upload build URLs to artifacts - if: ${{ success() || failure() }} - uses: actions/upload-artifact@v3 - with: - name: ${{ env.BUILD_URLS }} - path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" "$CACHES_PATH" + needs: [FastTest, StyleCheck] + uses: ./.github/workflows/reusable_build.yml + with: + build_name: binary_ppc64le BuilderBinAmd64Compat: - needs: [DockerHubPush, FastTest, StyleCheck] - runs-on: [self-hosted, builder] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/build_check - IMAGES_PATH=${{runner.temp}}/images_path - REPO_COPY=${{runner.temp}}/build_check/ClickHouse - CACHES_PATH=${{runner.temp}}/../ccaches - BUILD_NAME=binary_amd64_compat - EOF - - name: Download changed images - uses: actions/download-artifact@v3 - with: - name: changed_images - path: ${{ env.IMAGES_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - submodules: true - - name: Build - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME" - - name: Upload build URLs to artifacts - if: ${{ success() || failure() }} - uses: actions/upload-artifact@v3 - with: - name: ${{ env.BUILD_URLS }} - path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" "$CACHES_PATH" + needs: [FastTest, StyleCheck] + uses: ./.github/workflows/reusable_build.yml + with: + build_name: binary_amd64_compat BuilderBinAarch64V80Compat: - needs: [DockerHubPush, FastTest, StyleCheck] - runs-on: [self-hosted, builder] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/build_check - IMAGES_PATH=${{runner.temp}}/images_path - REPO_COPY=${{runner.temp}}/build_check/ClickHouse - CACHES_PATH=${{runner.temp}}/../ccaches - BUILD_NAME=binary_aarch64_v80compat - EOF - - name: Download changed images - uses: actions/download-artifact@v3 - with: - name: changed_images - path: ${{ env.IMAGES_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - submodules: true - - name: Build - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME" - - name: Upload build URLs to artifacts - if: ${{ success() || failure() }} - uses: actions/upload-artifact@v3 - with: - name: ${{ env.BUILD_URLS }} - path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" "$CACHES_PATH" + needs: [FastTest, StyleCheck] + uses: ./.github/workflows/reusable_build.yml + with: + build_name: binary_aarch64_v80compat BuilderBinRISCV64: - needs: [DockerHubPush, FastTest, StyleCheck] - runs-on: [self-hosted, builder] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/build_check - IMAGES_PATH=${{runner.temp}}/images_path - REPO_COPY=${{runner.temp}}/build_check/ClickHouse - CACHES_PATH=${{runner.temp}}/../ccaches - BUILD_NAME=binary_riscv64 - EOF - - name: Download changed images - uses: actions/download-artifact@v3 - with: - name: changed_images - path: ${{ env.IMAGES_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - submodules: true - - name: Build - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME" - - name: Upload build URLs to artifacts - if: ${{ success() || failure() }} - uses: actions/upload-artifact@v3 - with: - name: ${{ env.BUILD_URLS }} - path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" "$CACHES_PATH" + needs: [FastTest, StyleCheck] + uses: ./.github/workflows/reusable_build.yml + with: + build_name: binary_riscv64 BuilderBinS390X: - needs: [DockerHubPush, FastTest, StyleCheck] - runs-on: [self-hosted, builder] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/build_check - IMAGES_PATH=${{runner.temp}}/images_path - REPO_COPY=${{runner.temp}}/build_check/ClickHouse - CACHES_PATH=${{runner.temp}}/../ccaches - BUILD_NAME=binary_s390x - EOF - - name: Download changed images - uses: actions/download-artifact@v3 - with: - name: changed_images - path: ${{ env.IMAGES_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - submodules: true - - name: Build - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME" - - name: Upload build URLs to artifacts - if: ${{ success() || failure() }} - uses: actions/upload-artifact@v3 - with: - name: ${{ env.BUILD_URLS }} - path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" "$CACHES_PATH" + needs: [FastTest, StyleCheck] + uses: ./.github/workflows/reusable_build.yml + with: + build_name: binary_s390x ############################################################################################ ##################################### Docker images ####################################### ############################################################################################ @@ -1043,6 +289,7 @@ jobs: ##################################### BUILD REPORTER ####################################### ############################################################################################ BuilderReport: + if: ${{ success() || failure() }} needs: - BuilderBinRelease - BuilderDebAarch64 @@ -1052,41 +299,19 @@ jobs: - BuilderDebRelease - BuilderDebTsan - BuilderDebUBsan - runs-on: [self-hosted, style-checker] - if: ${{ success() || failure() }} - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - CHECK_NAME=ClickHouse build check - REPORTS_PATH=${{runner.temp}}/reports_dir - TEMP_PATH=${{runner.temp}}/report_check - NEEDS_DATA_PATH=${{runner.temp}}/needs.json - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Report Builder - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cat > "$NEEDS_DATA_PATH" << 'EOF' - ${{ toJSON(needs) }} - EOF - cd "$GITHUB_WORKSPACE/tests/ci" - python3 build_report_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: ClickHouse build check + runner_type: style-checker + additional_envs: | + NEEDS_DATA<> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/report_check - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=ClickHouse special build check - NEEDS_DATA_PATH=${{runner.temp}}/needs.json - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Report Builder - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cat > "$NEEDS_DATA_PATH" << 'EOF' - ${{ toJSON(needs) }} - EOF - cd "$GITHUB_WORKSPACE/tests/ci" - python3 build_report_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: ClickHouse special build check + runner_type: style-checker + additional_envs: | + NEEDS_DATA<> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/test_install - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Install packages (amd64) - REPO_COPY=${{runner.temp}}/test_install/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Test packages installation - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 install_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Install packages (amd64) + runner_type: style-checker + run_command: | + cd "$REPO_COPY/tests/ci" + python3 install_check.py "$CHECK_NAME" InstallPackagesTestAarch64: needs: [BuilderDebAarch64] - runs-on: [self-hosted, style-checker-aarch64] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/test_install - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Install packages (arm64) - REPO_COPY=${{runner.temp}}/test_install/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Test packages installation - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 install_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Install packages (arm64) + runner_type: style-checker-aarch64 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 install_check.py "$CHECK_NAME" ############################################################################################## ########################### FUNCTIONAl STATELESS TESTS ####################################### ############################################################################################## FunctionalStatelessTestRelease: needs: [BuilderDebRelease] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_release - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (release) - REPO_COPY=${{runner.temp}}/stateless_release/ClickHouse - KILL_TIMEOUT=10800 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestReleaseDatabaseReplicated0: + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stateless tests (release) + runner_type: func-tester + additional_envs: | + KILL_TIMEOUT=10800 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" + FunctionalStatelessTestReleaseDatabaseReplicated: needs: [BuilderDebRelease] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_database_replicated - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (release, DatabaseReplicated) - REPO_COPY=${{runner.temp}}/stateless_database_replicated/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=0 - RUN_BY_HASH_TOTAL=4 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestReleaseDatabaseReplicated1: - needs: [BuilderDebRelease] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_database_replicated - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (release, DatabaseReplicated) - REPO_COPY=${{runner.temp}}/stateless_database_replicated/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=1 - RUN_BY_HASH_TOTAL=4 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestReleaseDatabaseReplicated2: - needs: [BuilderDebRelease] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_database_replicated - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (release, DatabaseReplicated) - REPO_COPY=${{runner.temp}}/stateless_database_replicated/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=2 - RUN_BY_HASH_TOTAL=4 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestReleaseDatabaseReplicated3: - needs: [BuilderDebRelease] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_database_replicated - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (release, DatabaseReplicated) - REPO_COPY=${{runner.temp}}/stateless_database_replicated/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=3 - RUN_BY_HASH_TOTAL=4 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stateless tests (release, DatabaseReplicated) + runner_type: func-tester + additional_envs: | + KILL_TIMEOUT=10800 + batches: 4 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" FunctionalStatelessTestReleaseWideParts: needs: [BuilderDebRelease] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_wide_parts - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (release, wide parts enabled) - REPO_COPY=${{runner.temp}}/stateless_wide_parts/ClickHouse - KILL_TIMEOUT=10800 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stateless tests (release, wide parts enabled) + runner_type: func-tester + additional_envs: | + KILL_TIMEOUT=10800 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" FunctionalStatelessTestReleaseAnalyzer: needs: [BuilderDebRelease] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_analyzer - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (release, analyzer) - REPO_COPY=${{runner.temp}}/stateless_analyzer/ClickHouse - KILL_TIMEOUT=10800 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestReleaseS3_0: + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stateless tests (release, analyzer) + runner_type: func-tester + additional_envs: | + KILL_TIMEOUT=10800 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" + FunctionalStatelessTestReleaseS3: needs: [BuilderDebRelease] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_s3_storage - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (release, s3 storage) - REPO_COPY=${{runner.temp}}/stateless_s3_storage/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=0 - RUN_BY_HASH_TOTAL=2 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestReleaseS3_1: - needs: [BuilderDebRelease] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_s3_storage - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (release, s3 storage) - REPO_COPY=${{runner.temp}}/stateless_s3_storage/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=1 - RUN_BY_HASH_TOTAL=2 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestS3Debug0: + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stateless tests (release, s3 storage) + runner_type: func-tester + additional_envs: | + KILL_TIMEOUT=10800 + batches: 2 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" + FunctionalStatelessTestS3Debug: needs: [BuilderDebDebug] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_s3_storage_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (debug, s3 storage) - REPO_COPY=${{runner.temp}}/stateless_s3_storage_debug/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=0 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestS3Debug1: - needs: [BuilderDebDebug] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_s3_storage_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (debug, s3 storage) - REPO_COPY=${{runner.temp}}/stateless_s3_storage_debug/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=1 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestS3Debug2: - needs: [BuilderDebDebug] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_s3_storage_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (debug, s3 storage) - REPO_COPY=${{runner.temp}}/stateless_s3_storage_debug/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=2 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestS3Debug3: - needs: [BuilderDebDebug] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_s3_storage_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (debug, s3 storage) - REPO_COPY=${{runner.temp}}/stateless_s3_storage_debug/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=3 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestS3Debug4: - needs: [BuilderDebDebug] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_s3_storage_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (debug, s3 storage) - REPO_COPY=${{runner.temp}}/stateless_s3_storage_debug/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=4 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestS3Debug5: - needs: [BuilderDebDebug] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_s3_storage_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (debug, s3 storage) - REPO_COPY=${{runner.temp}}/stateless_s3_storage_debug/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=5 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestS3Tsan0: + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stateless tests (debug, s3 storage) + runner_type: func-tester + additional_envs: | + KILL_TIMEOUT=10800 + batches: 6 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" + FunctionalStatelessTestS3Tsan: needs: [BuilderDebTsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_s3_storage_tsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (tsan, s3 storage) - REPO_COPY=${{runner.temp}}/stateless_s3_storage_tsan/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=0 - RUN_BY_HASH_TOTAL=5 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestS3Tsan1: - needs: [BuilderDebTsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_s3_storage_tsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (tsan, s3 storage) - REPO_COPY=${{runner.temp}}/stateless_s3_storage_tsan/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=1 - RUN_BY_HASH_TOTAL=5 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestS3Tsan2: - needs: [BuilderDebTsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_s3_storage_tsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (tsan, s3 storage) - REPO_COPY=${{runner.temp}}/stateless_s3_storage_tsan/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=2 - RUN_BY_HASH_TOTAL=5 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestS3Tsan3: - needs: [BuilderDebTsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_s3_storage_tsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (tsan, s3 storage) - REPO_COPY=${{runner.temp}}/stateless_s3_storage_tsan/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=3 - RUN_BY_HASH_TOTAL=5 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestS3Tsan4: - needs: [BuilderDebTsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_s3_storage_tsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (tsan, s3 storage) - REPO_COPY=${{runner.temp}}/stateless_s3_storage_tsan/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=4 - RUN_BY_HASH_TOTAL=5 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stateless tests (tsan, s3 storage) + runner_type: func-tester + additional_envs: | + KILL_TIMEOUT=10800 + batches: 5 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" FunctionalStatelessTestAarch64: needs: [BuilderDebAarch64] - runs-on: [self-hosted, func-tester-aarch64] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_release - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (aarch64) - REPO_COPY=${{runner.temp}}/stateless_release/ClickHouse - KILL_TIMEOUT=10800 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestAsan0: + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stateless tests (aarch64) + runner_type: func-tester-aarch64 + additional_envs: | + KILL_TIMEOUT=10800 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" + FunctionalStatelessTestAsan: needs: [BuilderDebAsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (asan) - REPO_COPY=${{runner.temp}}/stateless_debug/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=0 - RUN_BY_HASH_TOTAL=4 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestAsan1: - needs: [BuilderDebAsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (asan) - REPO_COPY=${{runner.temp}}/stateless_debug/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=1 - RUN_BY_HASH_TOTAL=4 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestAsan2: - needs: [BuilderDebAsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (asan) - REPO_COPY=${{runner.temp}}/stateless_debug/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=2 - RUN_BY_HASH_TOTAL=4 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestAsan3: - needs: [BuilderDebAsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (asan) - REPO_COPY=${{runner.temp}}/stateless_debug/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=3 - RUN_BY_HASH_TOTAL=4 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestTsan0: + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stateless tests (asan) + runner_type: func-tester + additional_envs: | + KILL_TIMEOUT=10800 + batches: 4 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" + FunctionalStatelessTestTsan: needs: [BuilderDebTsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_tsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (tsan) - REPO_COPY=${{runner.temp}}/stateless_tsan/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=0 - RUN_BY_HASH_TOTAL=5 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestTsan1: - needs: [BuilderDebTsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_tsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (tsan) - REPO_COPY=${{runner.temp}}/stateless_tsan/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=1 - RUN_BY_HASH_TOTAL=5 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestTsan2: - needs: [BuilderDebTsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_tsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (tsan) - REPO_COPY=${{runner.temp}}/stateless_tsan/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=2 - RUN_BY_HASH_TOTAL=5 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestTsan3: - needs: [BuilderDebTsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_tsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (tsan) - REPO_COPY=${{runner.temp}}/stateless_tsan/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=3 - RUN_BY_HASH_TOTAL=5 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestTsan4: - needs: [BuilderDebTsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_tsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (tsan) - REPO_COPY=${{runner.temp}}/stateless_tsan/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=4 - RUN_BY_HASH_TOTAL=5 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestUBsan0: + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stateless tests (tsan) + runner_type: func-tester + additional_envs: | + KILL_TIMEOUT=10800 + batches: 5 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" + FunctionalStatelessTestMsan: + needs: [BuilderDebMsan] + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stateless tests (msan) + runner_type: func-tester + additional_envs: | + KILL_TIMEOUT=10800 + batches: 6 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" + FunctionalStatelessTestUBsan: needs: [BuilderDebUBsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_ubsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (ubsan) - REPO_COPY=${{runner.temp}}/stateless_ubsan/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=0 - RUN_BY_HASH_TOTAL=2 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestUBsan1: - needs: [BuilderDebUBsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_ubsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (ubsan) - REPO_COPY=${{runner.temp}}/stateless_ubsan/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=1 - RUN_BY_HASH_TOTAL=2 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestMsan0: - needs: [BuilderDebMsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_memory - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (msan) - REPO_COPY=${{runner.temp}}/stateless_memory/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=0 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestMsan1: - needs: [BuilderDebMsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_memory - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (msan) - REPO_COPY=${{runner.temp}}/stateless_memory/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=1 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestMsan2: - needs: [BuilderDebMsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_memory - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (msan) - REPO_COPY=${{runner.temp}}/stateless_memory/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=2 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestMsan3: - needs: [BuilderDebMsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_memory - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (msan) - REPO_COPY=${{runner.temp}}/stateless_memory/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=3 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestMsan4: - needs: [BuilderDebMsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_memory - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (msan) - REPO_COPY=${{runner.temp}}/stateless_memory/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=4 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestMsan5: - needs: [BuilderDebMsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_memory - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (msan) - REPO_COPY=${{runner.temp}}/stateless_memory/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=5 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestDebug0: + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stateless tests (ubsan) + runner_type: func-tester + additional_envs: | + KILL_TIMEOUT=10800 + batches: 2 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" + FunctionalStatelessTestDebug: needs: [BuilderDebDebug] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (debug) - REPO_COPY=${{runner.temp}}/stateless_debug/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=0 - RUN_BY_HASH_TOTAL=5 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestDebug1: - needs: [BuilderDebDebug] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (debug) - REPO_COPY=${{runner.temp}}/stateless_debug/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=1 - RUN_BY_HASH_TOTAL=5 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestDebug2: - needs: [BuilderDebDebug] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (debug) - REPO_COPY=${{runner.temp}}/stateless_debug/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=2 - RUN_BY_HASH_TOTAL=5 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestDebug3: - needs: [BuilderDebDebug] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (debug) - REPO_COPY=${{runner.temp}}/stateless_debug/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=3 - RUN_BY_HASH_TOTAL=5 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestDebug4: - needs: [BuilderDebDebug] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (debug) - REPO_COPY=${{runner.temp}}/stateless_debug/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=4 - RUN_BY_HASH_TOTAL=5 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stateless tests (debug) + runner_type: func-tester + additional_envs: | + KILL_TIMEOUT=10800 + batches: 5 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" FunctionalStatelessTestFlakyCheck: needs: [BuilderDebAsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_flaky_asan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests flaky check (asan) - REPO_COPY=${{runner.temp}}/stateless_flaky_asan/ClickHouse - KILL_TIMEOUT=3600 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stateless tests flaky check (asan) + runner_type: func-tester + additional_envs: | + KILL_TIMEOUT=3600 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" TestsBugfixCheck: needs: [CheckLabels, StyleCheck] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/tests_bugfix_check - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=tests bugfix validate check - KILL_TIMEOUT=3600 - REPO_COPY=${{runner.temp}}/tests_bugfix_check/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Bugfix test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: tests bugfix validate check + runner_type: func-tester + additional_envs: | + KILL_TIMEOUT=3600 + run_command: | + cd "$REPO_COPY/tests/ci" - TEMP_PATH="${TEMP_PATH}/integration" \ - REPORTS_PATH="${REPORTS_PATH}/integration" \ - python3 integration_test_check.py "Integration $CHECK_NAME" \ - --validate-bugfix --post-commit-status=file || echo 'ignore exit code' + TEMP_PATH="${TEMP_PATH}/integration" \ + REPORTS_PATH="${REPORTS_PATH}/integration" \ + python3 integration_test_check.py "Integration $CHECK_NAME" \ + --validate-bugfix --post-commit-status=file || echo 'ignore exit code' - TEMP_PATH="${TEMP_PATH}/stateless" \ - REPORTS_PATH="${REPORTS_PATH}/stateless" \ - python3 functional_test_check.py "Stateless $CHECK_NAME" "$KILL_TIMEOUT" \ - --validate-bugfix --post-commit-status=file || echo 'ignore exit code' + TEMP_PATH="${TEMP_PATH}/stateless" \ + REPORTS_PATH="${REPORTS_PATH}/stateless" \ + python3 functional_test_check.py "Stateless $CHECK_NAME" "$KILL_TIMEOUT" \ + --validate-bugfix --post-commit-status=file || echo 'ignore exit code' - python3 bugfix_validate_check.py "${TEMP_PATH}/stateless/functional_commit_status.tsv" "${TEMP_PATH}/integration/integration_commit_status.tsv" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + python3 bugfix_validate_check.py "${TEMP_PATH}/stateless/functional_commit_status.tsv" "${TEMP_PATH}/integration/integration_commit_status.tsv" ############################################################################################## ############################ FUNCTIONAl STATEFUL TESTS ####################################### ############################################################################################## FunctionalStatefulTestRelease: needs: [BuilderDebRelease] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateful_release - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateful tests (release) - REPO_COPY=${{runner.temp}}/stateful_release/ClickHouse - KILL_TIMEOUT=3600 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stateful tests (release) + runner_type: func-tester + additional_envs: | + KILL_TIMEOUT=3600 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" FunctionalStatefulTestAarch64: needs: [BuilderDebAarch64] - runs-on: [self-hosted, func-tester-aarch64] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateful_release - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateful tests (aarch64) - REPO_COPY=${{runner.temp}}/stateful_release/ClickHouse - KILL_TIMEOUT=3600 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stateful tests (aarch64) + runner_type: func-tester-aarch64 + additional_envs: | + KILL_TIMEOUT=3600 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" FunctionalStatefulTestAsan: needs: [BuilderDebAsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateful_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateful tests (asan) - REPO_COPY=${{runner.temp}}/stateful_debug/ClickHouse - KILL_TIMEOUT=3600 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stateful tests (asan) + runner_type: func-tester + additional_envs: | + KILL_TIMEOUT=3600 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" FunctionalStatefulTestTsan: needs: [BuilderDebTsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateful_tsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateful tests (tsan) - REPO_COPY=${{runner.temp}}/stateful_tsan/ClickHouse - KILL_TIMEOUT=3600 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stateful tests (tsan) + runner_type: func-tester + additional_envs: | + KILL_TIMEOUT=3600 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" FunctionalStatefulTestMsan: needs: [BuilderDebMsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateful_msan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateful tests (msan) - REPO_COPY=${{runner.temp}}/stateful_msan/ClickHouse - KILL_TIMEOUT=3600 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stateful tests (msan) + runner_type: func-tester + additional_envs: | + KILL_TIMEOUT=3600 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" FunctionalStatefulTestUBsan: needs: [BuilderDebUBsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateful_ubsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateful tests (ubsan) - REPO_COPY=${{runner.temp}}/stateful_ubsan/ClickHouse - KILL_TIMEOUT=3600 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stateful tests (ubsan) + runner_type: func-tester + additional_envs: | + KILL_TIMEOUT=3600 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" FunctionalStatefulTestDebug: needs: [BuilderDebDebug] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateful_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateful tests (debug) - REPO_COPY=${{runner.temp}}/stateful_debug/ClickHouse - KILL_TIMEOUT=3600 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stateful tests (debug) + runner_type: func-tester + additional_envs: | + KILL_TIMEOUT=3600 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" # Parallel replicas FunctionalStatefulTestDebugParallelReplicas: needs: [BuilderDebDebug] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateful_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateful tests (debug, ParallelReplicas) - REPO_COPY=${{runner.temp}}/stateful_debug/ClickHouse - KILL_TIMEOUT=3600 - EOF - - name: Download json reports - uses: actions/download-artifact@v2 - with: - path: ${{ env.REPORTS_PATH }} - - name: Clear repository - run: | - sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE" - - name: Check out repository code - uses: actions/checkout@v2 - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stateful tests (debug, ParallelReplicas) + runner_type: func-tester + additional_envs: | + KILL_TIMEOUT=3600 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" FunctionalStatefulTestUBsanParallelReplicas: needs: [BuilderDebUBsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateful_ubsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateful tests (ubsan, ParallelReplicas) - REPO_COPY=${{runner.temp}}/stateful_ubsan/ClickHouse - KILL_TIMEOUT=3600 - EOF - - name: Download json reports - uses: actions/download-artifact@v2 - with: - path: ${{ env.REPORTS_PATH }} - - name: Clear repository - run: | - sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE" - - name: Check out repository code - uses: actions/checkout@v2 - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stateful tests (ubsan, ParallelReplicas) + runner_type: func-tester + additional_envs: | + KILL_TIMEOUT=3600 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" FunctionalStatefulTestMsanParallelReplicas: needs: [BuilderDebMsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateful_msan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateful tests (msan, ParallelReplicas) - REPO_COPY=${{runner.temp}}/stateful_msan/ClickHouse - KILL_TIMEOUT=3600 - EOF - - name: Download json reports - uses: actions/download-artifact@v2 - with: - path: ${{ env.REPORTS_PATH }} - - name: Clear repository - run: | - sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE" - - name: Check out repository code - uses: actions/checkout@v2 - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stateful tests (msan, ParallelReplicas) + runner_type: func-tester + additional_envs: | + KILL_TIMEOUT=3600 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" FunctionalStatefulTestTsanParallelReplicas: needs: [BuilderDebTsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateful_tsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateful tests (tsan, ParallelReplicas) - REPO_COPY=${{runner.temp}}/stateful_tsan/ClickHouse - KILL_TIMEOUT=3600 - EOF - - name: Download json reports - uses: actions/download-artifact@v2 - with: - path: ${{ env.REPORTS_PATH }} - - name: Clear repository - run: | - sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE" - - name: Check out repository code - uses: actions/checkout@v2 - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stateful tests (tsan, ParallelReplicas) + runner_type: func-tester + additional_envs: | + KILL_TIMEOUT=3600 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" FunctionalStatefulTestAsanParallelReplicas: needs: [BuilderDebAsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateful_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateful tests (asan, ParallelReplicas) - REPO_COPY=${{runner.temp}}/stateful_debug/ClickHouse - KILL_TIMEOUT=3600 - EOF - - name: Download json reports - uses: actions/download-artifact@v2 - with: - path: ${{ env.REPORTS_PATH }} - - name: Clear repository - run: | - sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE" - - name: Check out repository code - uses: actions/checkout@v2 - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stateful tests (asan, ParallelReplicas) + runner_type: func-tester + additional_envs: | + KILL_TIMEOUT=3600 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" FunctionalStatefulTestReleaseParallelReplicas: needs: [BuilderDebRelease] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateful_release - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateful tests (release, ParallelReplicas) - REPO_COPY=${{runner.temp}}/stateful_release/ClickHouse - KILL_TIMEOUT=3600 - EOF - - name: Download json reports - uses: actions/download-artifact@v2 - with: - path: ${{ env.REPORTS_PATH }} - - name: Clear repository - run: | - sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE" - - name: Check out repository code - uses: actions/checkout@v2 - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stateful tests (release, ParallelReplicas) + runner_type: func-tester + additional_envs: | + KILL_TIMEOUT=3600 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" ############################################################################################## ######################################### STRESS TESTS ####################################### ############################################################################################## StressTestAsan: needs: [BuilderDebAsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stress_asan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stress test (asan) - REPO_COPY=${{runner.temp}}/stress_asan/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Stress test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 stress_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stress test (asan) + runner_type: stress-tester + run_command: | + cd "$REPO_COPY/tests/ci" + python3 stress_check.py "$CHECK_NAME" StressTestTsan: needs: [BuilderDebTsan] - # func testers have 16 cores + 128 GB memory - # while stress testers have 36 cores + 72 memory - # It would be better to have something like 32 + 128, - # but such servers almost unavailable as spot instances. - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stress_thread - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stress test (tsan) - REPO_COPY=${{runner.temp}}/stress_thread/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Stress test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 stress_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stress test (tsan) + runner_type: stress-tester + run_command: | + cd "$REPO_COPY/tests/ci" + python3 stress_check.py "$CHECK_NAME" StressTestMsan: needs: [BuilderDebMsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stress_memory - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stress test (msan) - REPO_COPY=${{runner.temp}}/stress_memory/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Stress test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 stress_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stress test (msan) + runner_type: stress-tester + run_command: | + cd "$REPO_COPY/tests/ci" + python3 stress_check.py "$CHECK_NAME" StressTestUBsan: needs: [BuilderDebUBsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stress_undefined - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stress test (ubsan) - REPO_COPY=${{runner.temp}}/stress_undefined/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Stress test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 stress_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stress test (ubsan) + runner_type: stress-tester + run_command: | + cd "$REPO_COPY/tests/ci" + python3 stress_check.py "$CHECK_NAME" StressTestDebug: needs: [BuilderDebDebug] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stress_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stress test (debug) - REPO_COPY=${{runner.temp}}/stress_debug/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Stress test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 stress_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - ############################################################################################## - ######################################### UPGRADE CHECK ###################################### - ############################################################################################## + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stress test (debug) + runner_type: stress-tester + run_command: | + cd "$REPO_COPY/tests/ci" + python3 stress_check.py "$CHECK_NAME" +############################################################################################## +######################################### UPGRADE CHECK ###################################### +############################################################################################## UpgradeCheckAsan: needs: [BuilderDebAsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/upgrade_asan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Upgrade check (asan) - REPO_COPY=${{runner.temp}}/upgrade_asan/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Upgrade check - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 upgrade_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Upgrade check (asan) + runner_type: stress-tester + run_command: | + cd "$REPO_COPY/tests/ci" + python3 upgrade_check.py "$CHECK_NAME" UpgradeCheckTsan: needs: [BuilderDebTsan] - # same as for stress test with tsan - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/upgrade_thread - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Upgrade check (tsan) - REPO_COPY=${{runner.temp}}/upgrade_thread/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Upgrade check - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 upgrade_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Upgrade check (tsan) + runner_type: stress-tester + run_command: | + cd "$REPO_COPY/tests/ci" + python3 upgrade_check.py "$CHECK_NAME" UpgradeCheckMsan: needs: [BuilderDebMsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/upgrade_memory - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Upgrade check (msan) - REPO_COPY=${{runner.temp}}/upgrade_memory/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Upgrade check - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 upgrade_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Upgrade check (msan) + runner_type: stress-tester + run_command: | + cd "$REPO_COPY/tests/ci" + python3 upgrade_check.py "$CHECK_NAME" UpgradeCheckDebug: needs: [BuilderDebDebug] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/upgrade_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Upgrade check (debug) - REPO_COPY=${{runner.temp}}/upgrade_debug/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Upgrade check - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 upgrade_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Upgrade check (debug) + runner_type: stress-tester + run_command: | + cd "$REPO_COPY/tests/ci" + python3 upgrade_check.py "$CHECK_NAME" ############################################################################################## ##################################### AST FUZZERS ############################################ ############################################################################################## ASTFuzzerTestAsan: needs: [BuilderDebAsan] - runs-on: [self-hosted, fuzzer-unit-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/ast_fuzzer_asan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=AST fuzzer (asan) - REPO_COPY=${{runner.temp}}/ast_fuzzer_asan/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Fuzzer - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 ast_fuzzer_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: AST fuzzer (asan) + runner_type: fuzzer-unit-tester + run_command: | + cd "$REPO_COPY/tests/ci" + python3 ast_fuzzer_check.py "$CHECK_NAME" ASTFuzzerTestTsan: needs: [BuilderDebTsan] - runs-on: [self-hosted, fuzzer-unit-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/ast_fuzzer_tsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=AST fuzzer (tsan) - REPO_COPY=${{runner.temp}}/ast_fuzzer_tsan/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Fuzzer - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 ast_fuzzer_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: AST fuzzer (tsan) + runner_type: fuzzer-unit-tester + run_command: | + cd "$REPO_COPY/tests/ci" + python3 ast_fuzzer_check.py "$CHECK_NAME" ASTFuzzerTestUBSan: needs: [BuilderDebUBsan] - runs-on: [self-hosted, fuzzer-unit-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/ast_fuzzer_ubsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=AST fuzzer (ubsan) - REPO_COPY=${{runner.temp}}/ast_fuzzer_ubsan/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Fuzzer - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 ast_fuzzer_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: AST fuzzer (ubsan) + runner_type: fuzzer-unit-tester + run_command: | + cd "$REPO_COPY/tests/ci" + python3 ast_fuzzer_check.py "$CHECK_NAME" ASTFuzzerTestMSan: needs: [BuilderDebMsan] - runs-on: [self-hosted, fuzzer-unit-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/ast_fuzzer_msan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=AST fuzzer (msan) - REPO_COPY=${{runner.temp}}/ast_fuzzer_msan/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Fuzzer - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 ast_fuzzer_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: AST fuzzer (msan) + runner_type: fuzzer-unit-tester + run_command: | + cd "$REPO_COPY/tests/ci" + python3 ast_fuzzer_check.py "$CHECK_NAME" ASTFuzzerTestDebug: needs: [BuilderDebDebug] - runs-on: [self-hosted, fuzzer-unit-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/ast_fuzzer_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=AST fuzzer (debug) - REPO_COPY=${{runner.temp}}/ast_fuzzer_debug/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Fuzzer - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 ast_fuzzer_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: AST fuzzer (debug) + runner_type: fuzzer-unit-tester + run_command: | + cd "$REPO_COPY/tests/ci" + python3 ast_fuzzer_check.py "$CHECK_NAME" ############################################################################################# ############################# INTEGRATION TESTS ############################################# ############################################################################################# - IntegrationTestsAsan0: + IntegrationTestsAsan: needs: [BuilderDebAsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_asan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (asan) - REPO_COPY=${{runner.temp}}/integration_tests_asan/ClickHouse - RUN_BY_HASH_NUM=0 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsAsan1: + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Integration tests (asan) + runner_type: stress-tester + batches: 4 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 integration_test_check.py "$CHECK_NAME" + IntegrationTestsAnalyzerAsan: needs: [BuilderDebAsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_asan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (asan) - REPO_COPY=${{runner.temp}}/integration_tests_asan/ClickHouse - RUN_BY_HASH_NUM=1 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsAsan2: - needs: [BuilderDebAsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_asan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (asan) - REPO_COPY=${{runner.temp}}/integration_tests_asan/ClickHouse - RUN_BY_HASH_NUM=2 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsAsan3: - needs: [BuilderDebAsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_asan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (asan) - REPO_COPY=${{runner.temp}}/integration_tests_asan/ClickHouse - RUN_BY_HASH_NUM=3 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsAsan4: - needs: [BuilderDebAsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_asan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (asan) - REPO_COPY=${{runner.temp}}/integration_tests_asan/ClickHouse - RUN_BY_HASH_NUM=4 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsAsan5: - needs: [BuilderDebAsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_asan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (asan) - REPO_COPY=${{runner.temp}}/integration_tests_asan/ClickHouse - RUN_BY_HASH_NUM=5 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsAnalyzerAsan0: - needs: [BuilderDebAsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_asan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (asan, analyzer) - REPO_COPY=${{runner.temp}}/integration_tests_asan/ClickHouse - RUN_BY_HASH_NUM=0 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsAnalyzerAsan1: - needs: [BuilderDebAsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_asan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (asan, analyzer) - REPO_COPY=${{runner.temp}}/integration_tests_asan/ClickHouse - RUN_BY_HASH_NUM=1 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsAnalyzerAsan2: - needs: [BuilderDebAsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_asan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (asan, analyzer) - REPO_COPY=${{runner.temp}}/integration_tests_asan/ClickHouse - RUN_BY_HASH_NUM=2 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsAnalyzerAsan3: - needs: [BuilderDebAsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_asan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (asan, analyzer) - REPO_COPY=${{runner.temp}}/integration_tests_asan/ClickHouse - RUN_BY_HASH_NUM=3 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsAnalyzerAsan4: - needs: [BuilderDebAsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_asan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (asan, analyzer) - REPO_COPY=${{runner.temp}}/integration_tests_asan/ClickHouse - RUN_BY_HASH_NUM=4 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsAnalyzerAsan5: - needs: [BuilderDebAsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_asan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (asan, analyzer) - REPO_COPY=${{runner.temp}}/integration_tests_asan/ClickHouse - RUN_BY_HASH_NUM=5 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsTsan0: + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Integration tests (asan, analyzer) + runner_type: stress-tester + batches: 6 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 integration_test_check.py "$CHECK_NAME" + IntegrationTestsTsan: needs: [BuilderDebTsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_tsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (tsan) - REPO_COPY=${{runner.temp}}/integration_tests_tsan/ClickHouse - RUN_BY_HASH_NUM=0 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsTsan1: - needs: [BuilderDebTsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_tsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (tsan) - REPO_COPY=${{runner.temp}}/integration_tests_tsan/ClickHouse - RUN_BY_HASH_NUM=1 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsTsan2: - needs: [BuilderDebTsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_tsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (tsan) - REPO_COPY=${{runner.temp}}/integration_tests_tsan/ClickHouse - RUN_BY_HASH_NUM=2 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsTsan3: - needs: [BuilderDebTsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_tsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (tsan) - REPO_COPY=${{runner.temp}}/integration_tests_tsan/ClickHouse - RUN_BY_HASH_NUM=3 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsTsan4: - needs: [BuilderDebTsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_tsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (tsan) - REPO_COPY=${{runner.temp}}/integration_tests_tsan/ClickHouse - RUN_BY_HASH_NUM=4 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsTsan5: - needs: [BuilderDebTsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_tsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (tsan) - REPO_COPY=${{runner.temp}}/integration_tests_tsan/ClickHouse - RUN_BY_HASH_NUM=5 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsRelease0: + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Integration tests (tsan) + runner_type: stress-tester + batches: 6 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 integration_test_check.py "$CHECK_NAME" + IntegrationTestsRelease: needs: [BuilderDebRelease] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_release - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (release) - REPO_COPY=${{runner.temp}}/integration_tests_release/ClickHouse - RUN_BY_HASH_NUM=0 - RUN_BY_HASH_TOTAL=4 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsRelease1: - needs: [BuilderDebRelease] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_release - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (release) - REPO_COPY=${{runner.temp}}/integration_tests_release/ClickHouse - RUN_BY_HASH_NUM=1 - RUN_BY_HASH_TOTAL=4 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsRelease2: - needs: [BuilderDebRelease] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_release - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (release) - REPO_COPY=${{runner.temp}}/integration_tests_release/ClickHouse - RUN_BY_HASH_NUM=2 - RUN_BY_HASH_TOTAL=4 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsRelease3: - needs: [BuilderDebRelease] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_release - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (release) - REPO_COPY=${{runner.temp}}/integration_tests_release/ClickHouse - RUN_BY_HASH_NUM=3 - RUN_BY_HASH_TOTAL=4 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Integration tests (release) + runner_type: stress-tester + batches: 4 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 integration_test_check.py "$CHECK_NAME" IntegrationTestsFlakyCheck: needs: [BuilderDebAsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_asan_flaky_check - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests flaky check (asan) - REPO_COPY=${{runner.temp}}/integration_tests_asan_flaky_check/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Integration tests flaky check (asan) + runner_type: stress-tester + run_command: | + cd "$REPO_COPY/tests/ci" + python3 integration_test_check.py "$CHECK_NAME" ############################################################################################# #################################### UNIT TESTS ############################################# ############################################################################################# UnitTestsAsan: needs: [BuilderDebAsan] - runs-on: [self-hosted, fuzzer-unit-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/unit_tests_asan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Unit tests (asan) - REPO_COPY=${{runner.temp}}/unit_tests_asan/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Unit test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 unit_tests_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Unit tests (asan) + runner_type: fuzzer-unit-tester + run_command: | + cd "$REPO_COPY/tests/ci" + python3 unit_tests_check.py "$CHECK_NAME" UnitTestsReleaseClang: needs: [BuilderBinRelease] - runs-on: [self-hosted, fuzzer-unit-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/unit_tests_asan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Unit tests (release) - REPO_COPY=${{runner.temp}}/unit_tests_asan/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Unit test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 unit_tests_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Unit tests (release) + runner_type: fuzzer-unit-tester + run_command: | + cd "$REPO_COPY/tests/ci" + python3 unit_tests_check.py "$CHECK_NAME" UnitTestsTsan: needs: [BuilderDebTsan] - runs-on: [self-hosted, fuzzer-unit-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/unit_tests_tsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Unit tests (tsan) - REPO_COPY=${{runner.temp}}/unit_tests_tsan/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Unit test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 unit_tests_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Unit tests (tsan) + runner_type: fuzzer-unit-tester + run_command: | + cd "$REPO_COPY/tests/ci" + python3 unit_tests_check.py "$CHECK_NAME" UnitTestsMsan: needs: [BuilderDebMsan] - runs-on: [self-hosted, fuzzer-unit-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/unit_tests_msan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Unit tests (msan) - REPO_COPY=${{runner.temp}}/unit_tests_msan/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Unit test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 unit_tests_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Unit tests (msan) + runner_type: fuzzer-unit-tester + run_command: | + cd "$REPO_COPY/tests/ci" + python3 unit_tests_check.py "$CHECK_NAME" UnitTestsUBsan: needs: [BuilderDebUBsan] - runs-on: [self-hosted, fuzzer-unit-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/unit_tests_ubsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Unit tests (ubsan) - REPO_COPY=${{runner.temp}}/unit_tests_ubsan/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Unit test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 unit_tests_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Unit tests (ubsan) + runner_type: fuzzer-unit-tester + run_command: | + cd "$REPO_COPY/tests/ci" + python3 unit_tests_check.py "$CHECK_NAME" ############################################################################################# #################################### PERFORMANCE TESTS ###################################### ############################################################################################# - PerformanceComparisonX86-0: + PerformanceComparisonX86: needs: [BuilderDebRelease] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/performance_comparison - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Performance Comparison - REPO_COPY=${{runner.temp}}/performance_comparison/ClickHouse - RUN_BY_HASH_NUM=0 - RUN_BY_HASH_TOTAL=4 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Performance Comparison - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 performance_comparison_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - PerformanceComparisonX86-1: - needs: [BuilderDebRelease] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/performance_comparison - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Performance Comparison - REPO_COPY=${{runner.temp}}/performance_comparison/ClickHouse - RUN_BY_HASH_NUM=1 - RUN_BY_HASH_TOTAL=4 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Performance Comparison - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 performance_comparison_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - PerformanceComparisonX86-2: - needs: [BuilderDebRelease] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/performance_comparison - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Performance Comparison - REPO_COPY=${{runner.temp}}/performance_comparison/ClickHouse - RUN_BY_HASH_NUM=2 - RUN_BY_HASH_TOTAL=4 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Performance Comparison - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 performance_comparison_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - PerformanceComparisonX86-3: - needs: [BuilderDebRelease] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/performance_comparison - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Performance Comparison - REPO_COPY=${{runner.temp}}/performance_comparison/ClickHouse - RUN_BY_HASH_NUM=3 - RUN_BY_HASH_TOTAL=4 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Performance Comparison - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 performance_comparison_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - PerformanceComparisonAarch-0: + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Performance Comparison + runner_type: stress-tester + batches: 4 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 performance_comparison_check.py "$CHECK_NAME" + PerformanceComparisonAarch: needs: [BuilderDebAarch64] - runs-on: [self-hosted, func-tester-aarch64] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/performance_comparison - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Performance Comparison Aarch64 - REPO_COPY=${{runner.temp}}/performance_comparison/ClickHouse - RUN_BY_HASH_NUM=0 - RUN_BY_HASH_TOTAL=4 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Performance Comparison - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 performance_comparison_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - PerformanceComparisonAarch-1: - needs: [BuilderDebAarch64] - runs-on: [self-hosted, func-tester-aarch64] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/performance_comparison - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Performance Comparison Aarch64 - REPO_COPY=${{runner.temp}}/performance_comparison/ClickHouse - RUN_BY_HASH_NUM=1 - RUN_BY_HASH_TOTAL=4 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Performance Comparison - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 performance_comparison_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - PerformanceComparisonAarch-2: - needs: [BuilderDebAarch64] - runs-on: [self-hosted, func-tester-aarch64] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/performance_comparison - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Performance Comparison Aarch64 - REPO_COPY=${{runner.temp}}/performance_comparison/ClickHouse - RUN_BY_HASH_NUM=2 - RUN_BY_HASH_TOTAL=4 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Performance Comparison - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 performance_comparison_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - PerformanceComparisonAarch-3: - needs: [BuilderDebAarch64] - runs-on: [self-hosted, func-tester-aarch64] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/performance_comparison - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Performance Comparison Aarch64 - REPO_COPY=${{runner.temp}}/performance_comparison/ClickHouse - RUN_BY_HASH_NUM=3 - RUN_BY_HASH_TOTAL=4 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Performance Comparison - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 performance_comparison_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Performance Comparison Aarch64 + runner_type: func-tester-aarch64 + batches: 4 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 performance_comparison_check.py "$CHECK_NAME" ############################################################################################## ###################################### SQLANCER FUZZERS ###################################### ############################################################################################## SQLancerTestRelease: needs: [BuilderDebRelease] - runs-on: [self-hosted, fuzzer-unit-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/sqlancer_release - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=SQLancer (release) - REPO_COPY=${{runner.temp}}/sqlancer_release/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: SQLancer - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 sqlancer_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: SQLancer (release) + runner_type: fuzzer-unit-tester + run_command: | + cd "$REPO_COPY/tests/ci" + python3 sqlancer_check.py "$CHECK_NAME" SQLancerTestDebug: needs: [BuilderDebDebug] - runs-on: [self-hosted, fuzzer-unit-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/sqlancer_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=SQLancer (debug) - REPO_COPY=${{runner.temp}}/sqlancer_debug/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: SQLancer - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 sqlancer_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" -############################################################################################# -###################################### JEPSEN TESTS ######################################### -############################################################################################# - Jepsen: - # This is special test NOT INCLUDED in FinishCheck - # When it's skipped, all dependent tasks will be skipped too. - # DO NOT add it there - if: contains(github.event.pull_request.labels.*.name, 'jepsen-test') - needs: [BuilderBinRelease] - uses: ./.github/workflows/jepsen.yml + uses: ./.github/workflows/reusable_test.yml + with: + test_name: SQLancer (debug) + runner_type: fuzzer-unit-tester + run_command: | + cd "$REPO_COPY/tests/ci" + python3 sqlancer_check.py "$CHECK_NAME" FinishCheck: needs: - StyleCheck @@ -5096,36 +978,16 @@ jobs: - BuilderReport - BuilderSpecialReport - FastTest - - FunctionalStatelessTestDebug0 - - FunctionalStatelessTestDebug1 - - FunctionalStatelessTestDebug2 - - FunctionalStatelessTestDebug3 - - FunctionalStatelessTestDebug4 + - FunctionalStatelessTestDebug - FunctionalStatelessTestRelease - - FunctionalStatelessTestReleaseDatabaseReplicated0 - - FunctionalStatelessTestReleaseDatabaseReplicated1 - - FunctionalStatelessTestReleaseDatabaseReplicated2 - - FunctionalStatelessTestReleaseDatabaseReplicated3 + - FunctionalStatelessTestReleaseDatabaseReplicated - FunctionalStatelessTestReleaseWideParts - FunctionalStatelessTestReleaseAnalyzer - FunctionalStatelessTestAarch64 - - FunctionalStatelessTestAsan0 - - FunctionalStatelessTestAsan1 - - FunctionalStatelessTestAsan2 - - FunctionalStatelessTestAsan3 - - FunctionalStatelessTestTsan0 - - FunctionalStatelessTestTsan1 - - FunctionalStatelessTestTsan2 - - FunctionalStatelessTestTsan3 - - FunctionalStatelessTestTsan4 - - FunctionalStatelessTestMsan0 - - FunctionalStatelessTestMsan1 - - FunctionalStatelessTestMsan2 - - FunctionalStatelessTestMsan3 - - FunctionalStatelessTestMsan4 - - FunctionalStatelessTestMsan5 - - FunctionalStatelessTestUBsan0 - - FunctionalStatelessTestUBsan1 + - FunctionalStatelessTestAsan + - FunctionalStatelessTestTsan + - FunctionalStatelessTestMsan + - FunctionalStatelessTestUBsan - FunctionalStatefulTestDebug - FunctionalStatefulTestRelease - FunctionalStatefulTestAarch64 @@ -5133,57 +995,36 @@ jobs: - FunctionalStatefulTestTsan - FunctionalStatefulTestMsan - FunctionalStatefulTestUBsan - - FunctionalStatelessTestReleaseS3_0 - - FunctionalStatelessTestReleaseS3_1 - - FunctionalStatelessTestS3Debug0 - - FunctionalStatelessTestS3Debug1 - - FunctionalStatelessTestS3Debug2 - - FunctionalStatelessTestS3Debug4 - - FunctionalStatelessTestS3Debug5 - - FunctionalStatelessTestS3Tsan0 - - FunctionalStatelessTestS3Tsan1 - - FunctionalStatelessTestS3Tsan2 - - FunctionalStatelessTestS3Tsan4 + - FunctionalStatelessTestReleaseS3 + - FunctionalStatelessTestS3Debug + - FunctionalStatelessTestS3Tsan + - FunctionalStatefulTestReleaseParallelReplicas + - FunctionalStatefulTestAsanParallelReplicas + - FunctionalStatefulTestTsanParallelReplicas + - FunctionalStatefulTestMsanParallelReplicas + - FunctionalStatefulTestUBsanParallelReplicas + - FunctionalStatefulTestDebugParallelReplicas - StressTestDebug - StressTestAsan - StressTestTsan - StressTestMsan - StressTestUBsan + - UpgradeCheckAsan + - UpgradeCheckTsan + - UpgradeCheckMsan + - UpgradeCheckDebug - ASTFuzzerTestDebug - ASTFuzzerTestAsan - ASTFuzzerTestTsan - ASTFuzzerTestMSan - ASTFuzzerTestUBSan - - IntegrationTestsAsan0 - - IntegrationTestsAsan1 - - IntegrationTestsAsan2 - - IntegrationTestsAsan3 - - IntegrationTestsAsan4 - - IntegrationTestsAsan5 - - IntegrationTestsAnalyzerAsan0 - - IntegrationTestsAnalyzerAsan1 - - IntegrationTestsAnalyzerAsan2 - - IntegrationTestsAnalyzerAsan3 - - IntegrationTestsAnalyzerAsan4 - - IntegrationTestsAnalyzerAsan5 - - IntegrationTestsRelease0 - - IntegrationTestsRelease1 - - IntegrationTestsRelease2 - - IntegrationTestsRelease3 - - IntegrationTestsTsan0 - - IntegrationTestsTsan1 - - IntegrationTestsTsan2 - - IntegrationTestsTsan3 - - IntegrationTestsTsan4 - - IntegrationTestsTsan5 - - PerformanceComparisonX86-0 - - PerformanceComparisonX86-1 - - PerformanceComparisonX86-2 - - PerformanceComparisonX86-3 - - PerformanceComparisonAarch-0 - - PerformanceComparisonAarch-1 - - PerformanceComparisonAarch-2 - - PerformanceComparisonAarch-3 + - IntegrationTestsAsan + - IntegrationTestsAnalyzerAsan + - IntegrationTestsTsan + - IntegrationTestsRelease + - IntegrationTestsFlakyCheck + - PerformanceComparisonX86 + - PerformanceComparisonAarch - UnitTestsAsan - UnitTestsTsan - UnitTestsMsan @@ -5191,7 +1032,6 @@ jobs: - UnitTestsReleaseClang - CompatibilityCheckX86 - CompatibilityCheckAarch64 - - IntegrationTestsFlakyCheck - SQLancerTestRelease - SQLancerTestDebug runs-on: [self-hosted, style-checker] @@ -5205,6 +1045,44 @@ jobs: cd "$GITHUB_WORKSPACE/tests/ci" python3 finish_check.py python3 merge_pr.py --check-approved +############################################################################################## +############################ SQLLOGIC TEST ################################################### +############################################################################################## + SQLLogicTestRelease: + needs: [BuilderDebRelease] + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Sqllogic test (release) + runner_type: func-tester + additional_envs: | + KILL_TIMEOUT=10800 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 sqllogic_test.py "$CHECK_NAME" "$KILL_TIMEOUT" +############################################################################################## +##################################### SQL TEST ############################################### +############################################################################################## + SQLTest: + needs: [BuilderDebRelease] + uses: ./.github/workflows/reusable_test.yml + with: + test_name: SQLTest + runner_type: fuzzer-unit-tester + run_command: | + cd "$REPO_COPY/tests/ci" + python3 sqltest.py "$CHECK_NAME" +############################################################################################# +###################################### NOT IN FINISH ######################################## +############################################################################################# +###################################### JEPSEN TESTS ######################################### +############################################################################################# + Jepsen: + # This is special test NOT INCLUDED in FinishCheck + # When it's skipped, all dependent tasks will be skipped too. + # DO NOT add it there + if: contains(github.event.pull_request.labels.*.name, 'jepsen-test') + needs: [BuilderBinRelease] + uses: ./.github/workflows/jepsen.yml ############################################################################################# ####################################### libFuzzer ########################################### ############################################################################################# @@ -5212,77 +1090,3 @@ jobs: if: contains(github.event.pull_request.labels.*.name, 'libFuzzer') needs: [DockerHubPush, StyleCheck] uses: ./.github/workflows/libfuzzer.yml - ############################################################################################## - ############################ SQLLOGIC TEST ################################################### - ############################################################################################## - SQLLogicTestRelease: - needs: [BuilderDebRelease] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/sqllogic_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Sqllogic test (release) - REPO_COPY=${{runner.temp}}/sqllogic_debug/ClickHouse - KILL_TIMEOUT=10800 - EOF - - name: Download json reports - uses: actions/download-artifact@v2 - with: - path: ${{ env.REPORTS_PATH }} - - name: Clear repository - run: | - sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE" - - name: Check out repository code - uses: actions/checkout@v2 - - name: Sqllogic test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 sqllogic_test.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" -############################################################################################## -##################################### SQL TEST ############################################### -############################################################################################## - SQLTest: - needs: [BuilderDebRelease] - runs-on: [self-hosted, fuzzer-unit-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/sqltest - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=SQLTest - REPO_COPY=${{runner.temp}}/sqltest/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: SQLTest - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 sqltest.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index 5dd837c6456..b5771fa87ab 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -1,3 +1,4 @@ +# yamllint disable rule:comments-indentation name: ReleaseBranchCI env: @@ -75,466 +76,74 @@ jobs: path: ${{ runner.temp }}/changed_images.json CompatibilityCheckX86: needs: [BuilderDebRelease] - runs-on: [self-hosted, style-checker] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/compatibility_check - REPO_COPY=${{runner.temp}}/compatibility_check/ClickHouse - REPORTS_PATH=${{runner.temp}}/reports_dir - EOF - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: CompatibilityCheckX86 - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" && python3 compatibility_check.py --check-name "Compatibility check (amd64)" --check-glibc --check-distributions - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Compatibility check X86 + runner_type: style-checker + run_command: | + cd "$REPO_COPY/tests/ci" + python3 compatibility_check.py --check-name "Compatibility check (amd64)" --check-glibc --check-distributions CompatibilityCheckAarch64: needs: [BuilderDebAarch64] - runs-on: [self-hosted, style-checker] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/compatibility_check - REPO_COPY=${{runner.temp}}/compatibility_check/ClickHouse - REPORTS_PATH=${{runner.temp}}/reports_dir - EOF - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: CompatibilityCheckAarch64 - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" && python3 compatibility_check.py --check-name "Compatibility check (aarch64)" --check-glibc - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Compatibility check X86 + runner_type: style-checker + run_command: | + cd "$REPO_COPY/tests/ci" + python3 compatibility_check.py --check-name "Compatibility check (aarch64)" --check-glibc ######################################################################################### #################################### ORDINARY BUILDS #################################### ######################################################################################### BuilderDebRelease: needs: [DockerHubPush] - runs-on: [self-hosted, builder] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/build_check - IMAGES_PATH=${{runner.temp}}/images_path - REPO_COPY=${{runner.temp}}/build_check/ClickHouse - CACHES_PATH=${{runner.temp}}/../ccaches - BUILD_NAME=package_release - EOF - - name: Download changed images - uses: actions/download-artifact@v3 - with: - name: changed_images - path: ${{ env.IMAGES_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - submodules: true - fetch-depth: 0 # otherwise we will have no info about contributors - filter: tree:0 - - name: Build - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME" - - name: Upload build URLs to artifacts - if: ${{ success() || failure() }} - uses: actions/upload-artifact@v3 - with: - name: ${{ env.BUILD_URLS }} - path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" "$CACHES_PATH" + uses: ./.github/workflows/reusable_build.yml + with: + build_name: package_release + checkout_depth: 0 BuilderDebAarch64: needs: [DockerHubPush] - runs-on: [self-hosted, builder] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/build_check - IMAGES_PATH=${{runner.temp}}/images_path - REPO_COPY=${{runner.temp}}/build_check/ClickHouse - CACHES_PATH=${{runner.temp}}/../ccaches - BUILD_NAME=package_aarch64 - EOF - - name: Download changed images - uses: actions/download-artifact@v3 - with: - name: changed_images - path: ${{ runner.temp }}/images_path - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - submodules: true - fetch-depth: 0 # For a proper version and performance artifacts - filter: tree:0 - - name: Build - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME" - - name: Upload build URLs to artifacts - uses: actions/upload-artifact@v3 - with: - name: ${{ env.BUILD_URLS }} - path: ${{ runner.temp }}/build_check/${{ env.BUILD_URLS }}.json - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" "$CACHES_PATH" + uses: ./.github/workflows/reusable_build.yml + with: + build_name: package_aarch64 + checkout_depth: 0 BuilderDebAsan: needs: [DockerHubPush] - runs-on: [self-hosted, builder] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/build_check - IMAGES_PATH=${{runner.temp}}/images_path - REPO_COPY=${{runner.temp}}/build_check/ClickHouse - CACHES_PATH=${{runner.temp}}/../ccaches - BUILD_NAME=package_asan - EOF - - name: Download changed images - uses: actions/download-artifact@v3 - with: - name: changed_images - path: ${{ env.IMAGES_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - submodules: true - - name: Build - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME" - - name: Upload build URLs to artifacts - if: ${{ success() || failure() }} - uses: actions/upload-artifact@v3 - with: - name: ${{ env.BUILD_URLS }} - path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" "$CACHES_PATH" + uses: ./.github/workflows/reusable_build.yml + with: + build_name: package_asan BuilderDebUBsan: needs: [DockerHubPush] - runs-on: [self-hosted, builder] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/build_check - IMAGES_PATH=${{runner.temp}}/images_path - REPO_COPY=${{runner.temp}}/build_check/ClickHouse - CACHES_PATH=${{runner.temp}}/../ccaches - BUILD_NAME=package_ubsan - EOF - - name: Download changed images - uses: actions/download-artifact@v3 - with: - name: changed_images - path: ${{ env.IMAGES_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - submodules: true - - name: Build - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME" - - name: Upload build URLs to artifacts - if: ${{ success() || failure() }} - uses: actions/upload-artifact@v3 - with: - name: ${{ env.BUILD_URLS }} - path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" "$CACHES_PATH" + uses: ./.github/workflows/reusable_build.yml + with: + build_name: package_ubsan BuilderDebTsan: needs: [DockerHubPush] - runs-on: [self-hosted, builder] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/build_check - IMAGES_PATH=${{runner.temp}}/images_path - REPO_COPY=${{runner.temp}}/build_check/ClickHouse - CACHES_PATH=${{runner.temp}}/../ccaches - BUILD_NAME=package_tsan - EOF - - name: Download changed images - uses: actions/download-artifact@v3 - with: - name: changed_images - path: ${{ env.IMAGES_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - submodules: true - - name: Build - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME" - - name: Upload build URLs to artifacts - if: ${{ success() || failure() }} - uses: actions/upload-artifact@v3 - with: - name: ${{ env.BUILD_URLS }} - path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" "$CACHES_PATH" + uses: ./.github/workflows/reusable_build.yml + with: + build_name: package_tsan BuilderDebMsan: needs: [DockerHubPush] - runs-on: [self-hosted, builder] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/build_check - IMAGES_PATH=${{runner.temp}}/images_path - REPO_COPY=${{runner.temp}}/build_check/ClickHouse - CACHES_PATH=${{runner.temp}}/../ccaches - BUILD_NAME=package_msan - EOF - - name: Download changed images - uses: actions/download-artifact@v3 - with: - name: changed_images - path: ${{ env.IMAGES_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - submodules: true - - name: Build - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME" - - name: Upload build URLs to artifacts - if: ${{ success() || failure() }} - uses: actions/upload-artifact@v3 - with: - name: ${{ env.BUILD_URLS }} - path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" "$CACHES_PATH" + uses: ./.github/workflows/reusable_build.yml + with: + build_name: package_msan BuilderDebDebug: needs: [DockerHubPush] - runs-on: [self-hosted, builder] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/build_check - IMAGES_PATH=${{runner.temp}}/images_path - REPO_COPY=${{runner.temp}}/build_check/ClickHouse - CACHES_PATH=${{runner.temp}}/../ccaches - BUILD_NAME=package_debug - EOF - - name: Download changed images - uses: actions/download-artifact@v3 - with: - name: changed_images - path: ${{ env.IMAGES_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - submodules: true - - name: Apply sparse checkout for contrib # in order to check that it doesn't break build - run: | - rm -rf "$GITHUB_WORKSPACE/contrib" && echo 'removed' - git -C "$GITHUB_WORKSPACE" checkout . && echo 'restored' - "$GITHUB_WORKSPACE/contrib/update-submodules.sh" && echo 'OK' - du -hs "$GITHUB_WORKSPACE/contrib" ||: - find "$GITHUB_WORKSPACE/contrib" -type f | wc -l ||: - - name: Build - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME" - - name: Upload build URLs to artifacts - if: ${{ success() || failure() }} - uses: actions/upload-artifact@v3 - with: - name: ${{ env.BUILD_URLS }} - path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" "$CACHES_PATH" + uses: ./.github/workflows/reusable_build.yml + with: + build_name: package_debug BuilderBinDarwin: needs: [DockerHubPush] - runs-on: [self-hosted, builder] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/build_check - IMAGES_PATH=${{runner.temp}}/images_path - REPO_COPY=${{runner.temp}}/build_check/ClickHouse - CACHES_PATH=${{runner.temp}}/../ccaches - BUILD_NAME=binary_darwin - EOF - - name: Download changed images - uses: actions/download-artifact@v3 - with: - name: changed_images - path: ${{ env.IMAGES_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - submodules: true - fetch-depth: 0 # otherwise we will have no info about contributors - filter: tree:0 - - name: Apply sparse checkout for contrib # in order to check that it doesn't break build - run: | - rm -rf "$GITHUB_WORKSPACE/contrib" && echo 'removed' - git -C "$GITHUB_WORKSPACE" checkout . && echo 'restored' - "$GITHUB_WORKSPACE/contrib/update-submodules.sh" && echo 'OK' - du -hs "$GITHUB_WORKSPACE/contrib" ||: - find "$GITHUB_WORKSPACE/contrib" -type f | wc -l ||: - - name: Build - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME" - - name: Upload build URLs to artifacts - if: ${{ success() || failure() }} - uses: actions/upload-artifact@v3 - with: - name: ${{ env.BUILD_URLS }} - path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" "$CACHES_PATH" + uses: ./.github/workflows/reusable_build.yml + with: + build_name: binary_darwin + checkout_depth: 0 BuilderBinDarwinAarch64: needs: [DockerHubPush] - runs-on: [self-hosted, builder] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/build_check - IMAGES_PATH=${{runner.temp}}/images_path - REPO_COPY=${{runner.temp}}/build_check/ClickHouse - CACHES_PATH=${{runner.temp}}/../ccaches - BUILD_NAME=binary_darwin_aarch64 - EOF - - name: Download changed images - uses: actions/download-artifact@v3 - with: - name: changed_images - path: ${{ env.IMAGES_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - submodules: true - fetch-depth: 0 # otherwise we will have no info about contributors - filter: tree:0 - - name: Apply sparse checkout for contrib # in order to check that it doesn't break build - run: | - rm -rf "$GITHUB_WORKSPACE/contrib" && echo 'removed' - git -C "$GITHUB_WORKSPACE" checkout . && echo 'restored' - "$GITHUB_WORKSPACE/contrib/update-submodules.sh" && echo 'OK' - du -hs "$GITHUB_WORKSPACE/contrib" ||: - find "$GITHUB_WORKSPACE/contrib" -type f | wc -l ||: - - name: Build - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME" - - name: Upload build URLs to artifacts - if: ${{ success() || failure() }} - uses: actions/upload-artifact@v3 - with: - name: ${{ env.BUILD_URLS }} - path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" "$CACHES_PATH" + uses: ./.github/workflows/reusable_build.yml + with: + build_name: binary_darwin_aarch64 + checkout_depth: 0 ############################################################################################ ##################################### Docker images ####################################### ############################################################################################ @@ -567,6 +176,7 @@ jobs: ##################################### BUILD REPORTER ####################################### ############################################################################################ BuilderReport: + if: ${{ success() || failure() }} needs: - BuilderDebRelease - BuilderDebAarch64 @@ -575,79 +185,33 @@ jobs: - BuilderDebUBsan - BuilderDebMsan - BuilderDebDebug - runs-on: [self-hosted, style-checker] - if: ${{ success() || failure() }} - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - CHECK_NAME=ClickHouse build check - REPORTS_PATH=${{runner.temp}}/reports_dir - REPORTS_PATH=${{runner.temp}}/reports_dir - TEMP_PATH=${{runner.temp}}/report_check - NEEDS_DATA_PATH=${{runner.temp}}/needs.json - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Report Builder - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cat > "$NEEDS_DATA_PATH" << 'EOF' - ${{ toJSON(needs) }} - EOF - cd "$GITHUB_WORKSPACE/tests/ci" - python3 build_report_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: ClickHouse build check + runner_type: style-checker + additional_envs: | + NEEDS_DATA<> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/report_check - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=ClickHouse special build check - NEEDS_DATA_PATH=${{runner.temp}}/needs.json - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Report Builder - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cat > "$NEEDS_DATA_PATH" << 'EOF' - ${{ toJSON(needs) }} - EOF - cd "$GITHUB_WORKSPACE/tests/ci" - python3 build_report_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: ClickHouse special build check + runner_type: style-checker + additional_envs: | + NEEDS_DATA<> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/test_install - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Install packages (amd64) - REPO_COPY=${{runner.temp}}/test_install/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Test packages installation - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 install_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Install packages (amd64) + runner_type: style-checker + run_command: | + cd "$REPO_COPY/tests/ci" + python3 install_check.py "$CHECK_NAME" InstallPackagesTestAarch64: needs: [BuilderDebAarch64] - runs-on: [self-hosted, style-checker-aarch64] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/test_install - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Install packages (arm64) - REPO_COPY=${{runner.temp}}/test_install/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Test packages installation - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 install_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Install packages (arm64) + runner_type: style-checker-aarch64 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 install_check.py "$CHECK_NAME" ############################################################################################## ########################### FUNCTIONAl STATELESS TESTS ####################################### ############################################################################################## FunctionalStatelessTestRelease: needs: [BuilderDebRelease] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (release) - REPO_COPY=${{runner.temp}}/stateless_debug/ClickHouse - KILL_TIMEOUT=10800 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stateless tests (release) + runner_type: func-tester + additional_envs: | + KILL_TIMEOUT=10800 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" FunctionalStatelessTestAarch64: needs: [BuilderDebAarch64] - runs-on: [self-hosted, func-tester-aarch64] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_release - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (aarch64) - REPO_COPY=${{runner.temp}}/stateless_release/ClickHouse - KILL_TIMEOUT=10800 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestAsan0: + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stateless tests (aarch64) + runner_type: func-tester-aarch64 + additional_envs: | + KILL_TIMEOUT=10800 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" + FunctionalStatelessTestAsan: needs: [BuilderDebAsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (asan) - REPO_COPY=${{runner.temp}}/stateless_debug/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=0 - RUN_BY_HASH_TOTAL=2 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestAsan1: - needs: [BuilderDebAsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (asan) - REPO_COPY=${{runner.temp}}/stateless_debug/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=1 - RUN_BY_HASH_TOTAL=2 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestTsan0: + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stateless tests (asan) + runner_type: func-tester + additional_envs: | + KILL_TIMEOUT=10800 + batches: 4 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" + FunctionalStatelessTestTsan: needs: [BuilderDebTsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_tsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (tsan) - REPO_COPY=${{runner.temp}}/stateless_tsan/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=0 - RUN_BY_HASH_TOTAL=3 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestTsan1: - needs: [BuilderDebTsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_tsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (tsan) - REPO_COPY=${{runner.temp}}/stateless_tsan/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=1 - RUN_BY_HASH_TOTAL=3 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestTsan2: - needs: [BuilderDebTsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_tsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (tsan) - REPO_COPY=${{runner.temp}}/stateless_tsan/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=2 - RUN_BY_HASH_TOTAL=3 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stateless tests (tsan) + runner_type: func-tester + additional_envs: | + KILL_TIMEOUT=10800 + batches: 5 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" FunctionalStatelessTestUBsan: needs: [BuilderDebUBsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_ubsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (ubsan) - REPO_COPY=${{runner.temp}}/stateless_ubsan/ClickHouse - KILL_TIMEOUT=10800 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestMsan0: + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stateless tests (ubsan) + runner_type: func-tester + additional_envs: | + KILL_TIMEOUT=10800 + batches: 2 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" + FunctionalStatelessTestMsan: needs: [BuilderDebMsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_memory - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (msan) - REPO_COPY=${{runner.temp}}/stateless_memory/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=0 - RUN_BY_HASH_TOTAL=3 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestMsan1: - needs: [BuilderDebMsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_memory - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (msan) - REPO_COPY=${{runner.temp}}/stateless_memory/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=1 - RUN_BY_HASH_TOTAL=3 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestMsan2: - needs: [BuilderDebMsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_memory - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (msan) - REPO_COPY=${{runner.temp}}/stateless_memory/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=2 - RUN_BY_HASH_TOTAL=3 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestDebug0: + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stateless tests (msan) + runner_type: func-tester + additional_envs: | + KILL_TIMEOUT=10800 + batches: 6 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" + FunctionalStatelessTestDebug: needs: [BuilderDebDebug] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (debug) - REPO_COPY=${{runner.temp}}/stateless_debug/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=0 - RUN_BY_HASH_TOTAL=3 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestDebug1: - needs: [BuilderDebDebug] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (debug) - REPO_COPY=${{runner.temp}}/stateless_debug/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=1 - RUN_BY_HASH_TOTAL=3 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestDebug2: - needs: [BuilderDebDebug] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (debug) - REPO_COPY=${{runner.temp}}/stateless_debug/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=2 - RUN_BY_HASH_TOTAL=3 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stateless tests (debug) + runner_type: func-tester + additional_envs: | + KILL_TIMEOUT=10800 + batches: 5 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" ############################################################################################## ############################ FUNCTIONAl STATEFUL TESTS ####################################### ############################################################################################## FunctionalStatefulTestRelease: needs: [BuilderDebRelease] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateful_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateful tests (release) - REPO_COPY=${{runner.temp}}/stateful_debug/ClickHouse - KILL_TIMEOUT=3600 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stateful tests (release) + runner_type: func-tester + additional_envs: | + KILL_TIMEOUT=3600 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" FunctionalStatefulTestAarch64: needs: [BuilderDebAarch64] - runs-on: [self-hosted, func-tester-aarch64] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateful_release - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateful tests (aarch64) - REPO_COPY=${{runner.temp}}/stateful_release/ClickHouse - KILL_TIMEOUT=3600 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stateful tests (aarch64) + runner_type: func-tester-aarch64 + additional_envs: | + KILL_TIMEOUT=3600 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" FunctionalStatefulTestAsan: needs: [BuilderDebAsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateful_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateful tests (asan) - REPO_COPY=${{runner.temp}}/stateful_debug/ClickHouse - KILL_TIMEOUT=3600 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stateful tests (asan) + runner_type: func-tester + additional_envs: | + KILL_TIMEOUT=3600 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" FunctionalStatefulTestTsan: needs: [BuilderDebTsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateful_tsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateful tests (tsan) - REPO_COPY=${{runner.temp}}/stateful_tsan/ClickHouse - KILL_TIMEOUT=3600 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stateful tests (tsan) + runner_type: func-tester + additional_envs: | + KILL_TIMEOUT=3600 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" FunctionalStatefulTestMsan: needs: [BuilderDebMsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateful_msan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateful tests (msan) - REPO_COPY=${{runner.temp}}/stateful_msan/ClickHouse - KILL_TIMEOUT=3600 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stateful tests (msan) + runner_type: func-tester + additional_envs: | + KILL_TIMEOUT=3600 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" FunctionalStatefulTestUBsan: needs: [BuilderDebUBsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateful_ubsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateful tests (ubsan) - REPO_COPY=${{runner.temp}}/stateful_ubsan/ClickHouse - KILL_TIMEOUT=3600 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stateful tests (ubsan) + runner_type: func-tester + additional_envs: | + KILL_TIMEOUT=3600 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" FunctionalStatefulTestDebug: needs: [BuilderDebDebug] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateful_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateful tests (debug) - REPO_COPY=${{runner.temp}}/stateful_debug/ClickHouse - KILL_TIMEOUT=3600 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stateful tests (debug) + runner_type: func-tester + additional_envs: | + KILL_TIMEOUT=3600 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" ############################################################################################## ######################################### STRESS TESTS ####################################### ############################################################################################## StressTestAsan: needs: [BuilderDebAsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stress_thread - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stress test (asan) - REPO_COPY=${{runner.temp}}/stress_thread/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Stress test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 stress_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stress test (asan) + runner_type: stress-tester + run_command: | + cd "$REPO_COPY/tests/ci" + python3 stress_check.py "$CHECK_NAME" StressTestTsan: needs: [BuilderDebTsan] - # func testers have 16 cores + 128 GB memory - # while stress testers have 36 cores + 72 memory - # It would be better to have something like 32 + 128, - # but such servers almost unavailable as spot instances. - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stress_thread - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stress test (tsan) - REPO_COPY=${{runner.temp}}/stress_thread/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Stress test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 stress_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stress test (tsan) + runner_type: stress-tester + run_command: | + cd "$REPO_COPY/tests/ci" + python3 stress_check.py "$CHECK_NAME" StressTestMsan: needs: [BuilderDebMsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stress_memory - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stress test (msan) - REPO_COPY=${{runner.temp}}/stress_memory/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Stress test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 stress_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stress test (msan) + runner_type: stress-tester + run_command: | + cd "$REPO_COPY/tests/ci" + python3 stress_check.py "$CHECK_NAME" StressTestUBsan: needs: [BuilderDebUBsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stress_undefined - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stress test (ubsan) - REPO_COPY=${{runner.temp}}/stress_undefined/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Stress test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 stress_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stress test (ubsan) + runner_type: stress-tester + run_command: | + cd "$REPO_COPY/tests/ci" + python3 stress_check.py "$CHECK_NAME" StressTestDebug: needs: [BuilderDebDebug] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stress_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stress test (debug) - REPO_COPY=${{runner.temp}}/stress_debug/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Stress test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 stress_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stress test (debug) + runner_type: stress-tester + run_command: | + cd "$REPO_COPY/tests/ci" + python3 stress_check.py "$CHECK_NAME" ############################################################################################# ############################# INTEGRATION TESTS ############################################# ############################################################################################# - IntegrationTestsAsan0: + IntegrationTestsAsan: needs: [BuilderDebAsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_asan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (asan) - REPO_COPY=${{runner.temp}}/integration_tests_asan/ClickHouse - RUN_BY_HASH_NUM=0 - RUN_BY_HASH_TOTAL=3 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsAsan1: + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Integration tests (asan) + runner_type: stress-tester + batches: 4 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 integration_test_check.py "$CHECK_NAME" + IntegrationTestsAnalyzerAsan: needs: [BuilderDebAsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_asan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (asan) - REPO_COPY=${{runner.temp}}/integration_tests_asan/ClickHouse - RUN_BY_HASH_NUM=1 - RUN_BY_HASH_TOTAL=3 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsAsan2: - needs: [BuilderDebAsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_asan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (asan) - REPO_COPY=${{runner.temp}}/integration_tests_asan/ClickHouse - RUN_BY_HASH_NUM=2 - RUN_BY_HASH_TOTAL=3 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsTsan0: + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Integration tests (asan, analyzer) + runner_type: stress-tester + batches: 6 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 integration_test_check.py "$CHECK_NAME" + IntegrationTestsTsan: needs: [BuilderDebTsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_tsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (tsan) - REPO_COPY=${{runner.temp}}/integration_tests_tsan/ClickHouse - RUN_BY_HASH_NUM=0 - RUN_BY_HASH_TOTAL=4 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsTsan1: - needs: [BuilderDebTsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_tsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (tsan) - REPO_COPY=${{runner.temp}}/integration_tests_tsan/ClickHouse - RUN_BY_HASH_NUM=1 - RUN_BY_HASH_TOTAL=4 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsTsan2: - needs: [BuilderDebTsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_tsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (tsan) - REPO_COPY=${{runner.temp}}/integration_tests_tsan/ClickHouse - RUN_BY_HASH_NUM=2 - RUN_BY_HASH_TOTAL=4 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsTsan3: - needs: [BuilderDebTsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_tsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (tsan) - REPO_COPY=${{runner.temp}}/integration_tests_tsan/ClickHouse - RUN_BY_HASH_NUM=3 - RUN_BY_HASH_TOTAL=4 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsRelease0: + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Integration tests (tsan) + runner_type: stress-tester + batches: 6 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 integration_test_check.py "$CHECK_NAME" + IntegrationTestsRelease: needs: [BuilderDebRelease] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_release - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (release) - REPO_COPY=${{runner.temp}}/integration_tests_release/ClickHouse - RUN_BY_HASH_NUM=0 - RUN_BY_HASH_TOTAL=2 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsRelease1: - needs: [BuilderDebRelease] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_release - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (release) - REPO_COPY=${{runner.temp}}/integration_tests_release/ClickHouse - RUN_BY_HASH_NUM=1 - RUN_BY_HASH_TOTAL=2 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Integration tests (release) + runner_type: stress-tester + batches: 4 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 integration_test_check.py "$CHECK_NAME" FinishCheck: needs: - DockerHubPush @@ -1972,19 +512,12 @@ jobs: - BuilderReport - BuilderSpecialReport - MarkReleaseReady - - FunctionalStatelessTestDebug0 - - FunctionalStatelessTestDebug1 - - FunctionalStatelessTestDebug2 + - FunctionalStatelessTestDebug - FunctionalStatelessTestRelease - FunctionalStatelessTestAarch64 - - FunctionalStatelessTestAsan0 - - FunctionalStatelessTestAsan1 - - FunctionalStatelessTestTsan0 - - FunctionalStatelessTestTsan1 - - FunctionalStatelessTestTsan2 - - FunctionalStatelessTestMsan0 - - FunctionalStatelessTestMsan1 - - FunctionalStatelessTestMsan2 + - FunctionalStatelessTestAsan + - FunctionalStatelessTestTsan + - FunctionalStatelessTestMsan - FunctionalStatelessTestUBsan - FunctionalStatefulTestDebug - FunctionalStatefulTestRelease @@ -1998,15 +531,9 @@ jobs: - StressTestTsan - StressTestMsan - StressTestUBsan - - IntegrationTestsAsan0 - - IntegrationTestsAsan1 - - IntegrationTestsAsan2 - - IntegrationTestsRelease0 - - IntegrationTestsRelease1 - - IntegrationTestsTsan0 - - IntegrationTestsTsan1 - - IntegrationTestsTsan2 - - IntegrationTestsTsan3 + - IntegrationTestsAsan + - IntegrationTestsTsan + - IntegrationTestsRelease - CompatibilityCheckX86 - CompatibilityCheckAarch64 runs-on: [self-hosted, style-checker] diff --git a/.github/workflows/reusable_build.yml b/.github/workflows/reusable_build.yml new file mode 100644 index 00000000000..f36b93bea58 --- /dev/null +++ b/.github/workflows/reusable_build.yml @@ -0,0 +1,79 @@ +### For the pure soul wishes to move it to another place +# https://github.com/orgs/community/discussions/9050 + +env: + # Force the stdout and stderr streams to be unbuffered + PYTHONUNBUFFERED: 1 + +name: Build ClickHouse +'on': + workflow_call: + inputs: + build_name: + description: the value of build type from tests/ci/ci_config.py + required: true + type: string + checkout_depth: + description: the value of the git shallow checkout + required: false + type: number + default: 1 + runner_type: + description: the label of runner to use + default: builder + type: string + additional_envs: + description: additional ENV variables to setup the job + type: string + +jobs: + Build: + name: Build-${{inputs.build_name}} + env: + GITHUB_JOB_OVERRIDDEN: Build-${{inputs.build_name}} + runs-on: [self-hosted, '${{inputs.runner_type}}'] + steps: + - name: Check out repository code + uses: ClickHouse/checkout@v1 + with: + clear-repository: true + submodules: true + fetch-depth: ${{inputs.checkout_depth}} + filter: tree:0 + - name: Set build envs + run: | + cat >> "$GITHUB_ENV" << 'EOF' + ${{inputs.additional_envs}} + EOF + python3 "$GITHUB_WORKSPACE"/tests/ci/ci_config.py --build-name "${{inputs.build_name}}" >> "$GITHUB_ENV" + - name: Apply sparse checkout for contrib # in order to check that it doesn't break build + # This step is done in GITHUB_WORKSPACE, + # because it's broken in REPO_COPY for some reason + if: ${{ env.BUILD_SPARSE_CHECKOUT == 'true' }} + run: | + rm -rf "$GITHUB_WORKSPACE/contrib" && echo 'removed' + git -C "$GITHUB_WORKSPACE" checkout . && echo 'restored' + "$GITHUB_WORKSPACE/contrib/update-submodules.sh" && echo 'OK' + du -hs "$GITHUB_WORKSPACE/contrib" ||: + find "$GITHUB_WORKSPACE/contrib" -type f | wc -l ||: + - name: Common setup + uses: ./.github/actions/common_setup + with: + job_type: build_check + - name: Download changed images + uses: actions/download-artifact@v3 + with: + name: changed_images + path: ${{ env.IMAGES_PATH }} + - name: Build + run: | + cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME" + - name: Upload build URLs to artifacts + if: ${{ success() || failure() }} + uses: actions/upload-artifact@v3 + with: + name: ${{ env.BUILD_URLS }} + path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json + - name: Clean + if: always() + uses: ./.github/actions/clean diff --git a/.github/workflows/reusable_test.yml b/.github/workflows/reusable_test.yml new file mode 100644 index 00000000000..e82d2d51596 --- /dev/null +++ b/.github/workflows/reusable_test.yml @@ -0,0 +1,113 @@ +### For the pure soul wishes to move it to another place +# https://github.com/orgs/community/discussions/9050 + +name: Testing workflow +'on': + workflow_call: + inputs: + test_name: + description: the value of test type from tests/ci/ci_config.py, ends up as $CHECK_NAME ENV + required: true + type: string + runner_type: + description: the label of runner to use + required: true + type: string + run_command: + description: the command to launch the check. Usually starts with `cd '$REPO_COPY/tests/ci'` + required: true + type: string + batches: + description: how many batches for the test will be launched + default: 1 + type: number + checkout_depth: + description: the value of the git shallow checkout + required: false + type: number + default: 1 + submodules: + description: if the submodules should be checked out + required: false + type: boolean + default: false + additional_envs: + description: additional ENV variables to setup the job + type: string + secrets: + secret_envs: + description: if given, it's passed to the environments + required: false + +env: + # Force the stdout and stderr streams to be unbuffered + PYTHONUNBUFFERED: 1 + CHECK_NAME: ${{inputs.test_name}} + +jobs: + PrepareStrategy: + # batches < 1 is misconfiguration, + # and we need this step only for batches > 1 + if: ${{ inputs.batches > 1 }} + runs-on: [self-hosted, style-checker-aarch64] + outputs: + batches: ${{steps.batches.outputs.batches}} + steps: + - name: Calculate batches + id: batches + run: | + batches_output=$(python3 -c 'import json; print(json.dumps(list(range(${{inputs.batches}}))))') + echo "batches=${batches_output}" >> "$GITHUB_OUTPUT" + Test: + # If PrepareStrategy is skipped for batches == 1, + # we still need to launch the test. + # `! failure()` is mandatory here to launch on skipped Job + # `&& !cancelled()` to allow the be cancelable + if: ${{ ( !failure() && !cancelled() ) && inputs.batches > 0 }} + # Do not add `-0` to the end, if there's only one batch + name: ${{inputs.test_name}}${{ inputs.batches > 1 && format('-{0}',matrix.batch) || '' }} + env: + GITHUB_JOB_OVERRIDDEN: ${{inputs.test_name}}${{ inputs.batches > 1 && format('-{0}',matrix.batch) || '' }} + runs-on: [self-hosted, '${{inputs.runner_type}}'] + needs: [PrepareStrategy] + strategy: + fail-fast: false # we always wait for entire matrix + matrix: + # if PrepareStrategy does not have batches, we use 0 + batch: ${{ needs.PrepareStrategy.outputs.batches + && fromJson(needs.PrepareStrategy.outputs.batches) + || fromJson('[0]')}} + steps: + - name: Check out repository code + uses: ClickHouse/checkout@v1 + with: + clear-repository: true + submodules: ${{inputs.submodules}} + fetch-depth: ${{inputs.checkout_depth}} + filter: tree:0 + - name: Set build envs + run: | + cat >> "$GITHUB_ENV" << 'EOF' + ${{inputs.additional_envs}} + ${{secrets.secret_envs}} + EOF + - name: Common setup + uses: ./.github/actions/common_setup + with: + job_type: test + - name: Download json reports + uses: actions/download-artifact@v3 + with: + path: ${{ env.REPORTS_PATH }} + - name: Setup batch + if: ${{ inputs.batches > 1}} + run: | + cat >> "$GITHUB_ENV" << 'EOF' + RUN_BY_HASH_NUM=${{matrix.batch}} + RUN_BY_HASH_TOTAL=${{inputs.batches}} + EOF + - name: Run test + run: ${{inputs.run_command}} + - name: Clean + if: always() + uses: ./.github/actions/clean diff --git a/.github/workflows/woboq.yml b/.github/workflows/woboq.yml deleted file mode 100644 index 1ef729af30a..00000000000 --- a/.github/workflows/woboq.yml +++ /dev/null @@ -1,44 +0,0 @@ -name: WoboqBuilder -env: - # Force the stdout and stderr streams to be unbuffered - PYTHONUNBUFFERED: 1 - -concurrency: - group: woboq -on: # yamllint disable-line rule:truthy - workflow_dispatch: - workflow_call: -jobs: - # don't use dockerhub push because this image updates so rarely - WoboqCodebrowser: - runs-on: [self-hosted, style-checker] - timeout-minutes: 420 # the task is pretty heavy, so there's an additional hour - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/codebrowser - REPO_COPY=${{runner.temp}}/codebrowser/ClickHouse - IMAGES_PATH=${{runner.temp}}/images_path - EOF - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - submodules: 'true' - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.IMAGES_PATH }} - - name: Codebrowser - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" && python3 codebrowser_check.py - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" diff --git a/.gitmodules b/.gitmodules index 904d2cec249..af90c788012 100644 --- a/.gitmodules +++ b/.gitmodules @@ -1,3 +1,6 @@ +# Please do not use 'branch = ...' tags with submodule entries. Such tags make updating submodules a +# little bit more convenient but they do *not* specify the tracked submodule branch. Thus, they are +# more confusing than useful. [submodule "contrib/zstd"] path = contrib/zstd url = https://github.com/facebook/zstd @@ -351,3 +354,6 @@ [submodule "contrib/aklomp-base64"] path = contrib/aklomp-base64 url = https://github.com/aklomp/base64.git +[submodule "contrib/pocketfft"] + path = contrib/pocketfft + url = https://github.com/mreineck/pocketfft.git diff --git a/CHANGELOG.md b/CHANGELOG.md index e95daca2a46..aa40012617c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,4 +1,5 @@ ### Table of Contents +**[ClickHouse release v23.10, 2023-11-02](#2310)**
**[ClickHouse release v23.9, 2023-09-28](#239)**
**[ClickHouse release v23.8 LTS, 2023-08-31](#238)**
**[ClickHouse release v23.7, 2023-07-27](#237)**
@@ -12,6 +13,184 @@ # 2023 Changelog +### ClickHouse release 23.10, 2023-11-02 + +#### Backward Incompatible Change +* There is no longer an option to automatically remove broken data parts. This closes [#55174](https://github.com/ClickHouse/ClickHouse/issues/55174). [#55184](https://github.com/ClickHouse/ClickHouse/pull/55184) ([Alexey Milovidov](https://github.com/alexey-milovidov)). [#55557](https://github.com/ClickHouse/ClickHouse/pull/55557) ([Jihyuk Bok](https://github.com/tomahawk28)). +* The obsolete in-memory data parts can no longer be read from the write-ahead log. If you have configured in-memory parts before, they have to be removed before the upgrade. [#55186](https://github.com/ClickHouse/ClickHouse/pull/55186) ([Alexey Milovidov](https://github.com/alexey-milovidov)). +* Remove the integration with Meilisearch. Reason: it was compatible only with the old version 0.18. The recent version of Meilisearch changed the protocol and does not work anymore. Note: we would appreciate it if you help to return it back. [#55189](https://github.com/ClickHouse/ClickHouse/pull/55189) ([Alexey Milovidov](https://github.com/alexey-milovidov)). +* Rename directory monitor concept into background INSERT. All the settings `*directory_monitor*` had been renamed to `distributed_background_insert*`. *Backward compatibility should be preserved* (since old settings had been added as an alias). [#55978](https://github.com/ClickHouse/ClickHouse/pull/55978) ([Azat Khuzhin](https://github.com/azat)). +* Do not interpret the `send_timeout` set on the client side as the `receive_timeout` on the server side and vise-versa. [#56035](https://github.com/ClickHouse/ClickHouse/pull/56035) ([Azat Khuzhin](https://github.com/azat)). +* Comparison of time intervals with different units will throw an exception. This closes [#55942](https://github.com/ClickHouse/ClickHouse/issues/55942). You might have occasionally rely on the previous behavior when the underlying numeric values were compared regardless of the units. [#56090](https://github.com/ClickHouse/ClickHouse/pull/56090) ([Alexey Milovidov](https://github.com/alexey-milovidov)). +* Rewrited the experimental `S3Queue` table engine completely: changed the way we keep information in zookeeper which allows to make less zookeeper requests, added caching of zookeeper state in cases when we know the state will not change, improved the polling from s3 process to make it less aggressive, changed the way ttl and max set for trached files is maintained, now it is a background process. Added `system.s3queue` and `system.s3queue_log` tables. Closes [#54998](https://github.com/ClickHouse/ClickHouse/issues/54998). [#54422](https://github.com/ClickHouse/ClickHouse/pull/54422) ([Kseniia Sumarokova](https://github.com/kssenii)). + +#### New Feature +* Add function `arrayFold(accumulator, x1, ..., xn -> expression, initial, array1, ..., arrayn)` which applies a lambda function to multiple arrays of the same cardinality and collects the result in an accumulator. [#49794](https://github.com/ClickHouse/ClickHouse/pull/49794) ([Lirikl](https://github.com/Lirikl)). +* Support for `Npy` format. `SELECT * FROM file('example_array.npy', Npy)`. [#55982](https://github.com/ClickHouse/ClickHouse/pull/55982) ([Yarik Briukhovetskyi](https://github.com/yariks5s)). +* If a table has a space-filling curve in its key, e.g., `ORDER BY mortonEncode(x, y)`, the conditions on its arguments, e.g., `x >= 10 AND x <= 20 AND y >= 20 AND y <= 30` can be used for indexing. A setting `analyze_index_with_space_filling_curves` is added to enable or disable this analysis. This closes [#41195](https://github.com/ClickHouse/ClickHouse/issue/41195). Continuation of [#4538](https://github.com/ClickHouse/ClickHouse/pull/4538). Continuation of [#6286](https://github.com/ClickHouse/ClickHouse/pull/6286). Continuation of [#28130](https://github.com/ClickHouse/ClickHouse/pull/28130). Continuation of [#41753](https://github.com/ClickHouse/ClickHouse/pull/#41753). [#55642](https://github.com/ClickHouse/ClickHouse/pull/55642) ([Alexey Milovidov](https://github.com/alexey-milovidov)). +* A new setting called `force_optimize_projection_name`, it takes a name of projection as an argument. If it's value set to a non-empty string, ClickHouse checks that this projection is used in the query at least once. Closes [#55331](https://github.com/ClickHouse/ClickHouse/issues/55331). [#56134](https://github.com/ClickHouse/ClickHouse/pull/56134) ([Yarik Briukhovetskyi](https://github.com/yariks5s)). +* Support asynchronous inserts with external data via native protocol. Previously it worked only if data is inlined into query. [#54730](https://github.com/ClickHouse/ClickHouse/pull/54730) ([Anton Popov](https://github.com/CurtizJ)). +* Added aggregation function `lttb` which uses the [Largest-Triangle-Three-Buckets](https://skemman.is/bitstream/1946/15343/3/SS_MSthesis.pdf) algorithm for downsampling data for visualization. [#53145](https://github.com/ClickHouse/ClickHouse/pull/53145) ([Sinan](https://github.com/sinsinan)). +* Query`CHECK TABLE` has better performance and usability (sends progress updates, cancellable). Support checking particular part with `CHECK TABLE ... PART 'part_name'`. [#53404](https://github.com/ClickHouse/ClickHouse/pull/53404) ([vdimir](https://github.com/vdimir)). +* Added function `jsonMergePatch`. When working with JSON data as strings, it provides a way to merge these strings (of JSON objects) together to form a single string containing a single JSON object. [#54364](https://github.com/ClickHouse/ClickHouse/pull/54364) ([Memo](https://github.com/Joeywzr)). +* The second part of Kusto Query Language dialect support. [Phase 1 implementation ](https://github.com/ClickHouse/ClickHouse/pull/37961) has been merged. [#42510](https://github.com/ClickHouse/ClickHouse/pull/42510) ([larryluogit](https://github.com/larryluogit)). +* Added a new SQL function, `arrayRandomSample(arr, k)` which returns a sample of k elements from the input array. Similar functionality could previously be achieved only with less convenient syntax, e.g. "SELECT arrayReduce('groupArraySample(3)', range(10))". [#54391](https://github.com/ClickHouse/ClickHouse/pull/54391) ([itayisraelov](https://github.com/itayisraelov)). +* Introduce `-ArgMin`/`-ArgMax` aggregate combinators which allow to aggregate by min/max values only. One use case can be found in [#54818](https://github.com/ClickHouse/ClickHouse/issues/54818). This PR also reorganize combinators into dedicated folder. [#54947](https://github.com/ClickHouse/ClickHouse/pull/54947) ([Amos Bird](https://github.com/amosbird)). +* Allow to drop cache for Protobuf format with `SYSTEM DROP SCHEMA FORMAT CACHE [FOR Protobuf]`. [#55064](https://github.com/ClickHouse/ClickHouse/pull/55064) ([Aleksandr Musorin](https://github.com/AVMusorin)). +* Add external HTTP Basic authenticator. [#55199](https://github.com/ClickHouse/ClickHouse/pull/55199) ([Aleksei Filatov](https://github.com/aalexfvk)). +* Added function `byteSwap` which reverses the bytes of unsigned integers. This is particularly useful for reversing values of types which are represented as unsigned integers internally such as IPv4. [#55211](https://github.com/ClickHouse/ClickHouse/pull/55211) ([Priyansh Agrawal](https://github.com/Priyansh121096)). +* Added function `formatQuery()` which returns a formatted version (possibly spanning multiple lines) of a SQL query string. Also added function `formatQuerySingleLine()` which does the same but the returned string will not contain linebreaks. [#55239](https://github.com/ClickHouse/ClickHouse/pull/55239) ([Salvatore Mesoraca](https://github.com/aiven-sal)). +* Added `DWARF` input format that reads debug symbols from an ELF executable/library/object file. [#55450](https://github.com/ClickHouse/ClickHouse/pull/55450) ([Michael Kolupaev](https://github.com/al13n321)). +* Allow to save unparsed records and errors in RabbitMQ, NATS and FileLog engines. Add virtual columns `_error` and `_raw_message`(for NATS and RabbitMQ), `_raw_record` (for FileLog) that are filled when ClickHouse fails to parse new record. The behaviour is controlled under storage settings `nats_handle_error_mode` for NATS, `rabbitmq_handle_error_mode` for RabbitMQ, `handle_error_mode` for FileLog similar to `kafka_handle_error_mode`. If it's set to `default`, en exception will be thrown when ClickHouse fails to parse a record, if it's set to `stream`, erorr and raw record will be saved into virtual columns. Closes [#36035](https://github.com/ClickHouse/ClickHouse/issues/36035). [#55477](https://github.com/ClickHouse/ClickHouse/pull/55477) ([Kruglov Pavel](https://github.com/Avogar)). +* Keeper client improvement: add `get_all_children_number command` that returns number of all children nodes under a specific path. [#55485](https://github.com/ClickHouse/ClickHouse/pull/55485) ([guoxiaolong](https://github.com/guoxiaolongzte)). +* Keeper client improvement: add `get_direct_children_number` command that returns number of direct children nodes under a path. [#55898](https://github.com/ClickHouse/ClickHouse/pull/55898) ([xuzifu666](https://github.com/xuzifu666)). +* Add statement `SHOW SETTING setting_name` which is a simpler version of existing statement `SHOW SETTINGS`. [#55979](https://github.com/ClickHouse/ClickHouse/pull/55979) ([Maksim Kita](https://github.com/kitaisreal)). +* Added fields `substreams` and `filenames` to the `system.parts_columns` table. [#55108](https://github.com/ClickHouse/ClickHouse/pull/55108) ([Anton Popov](https://github.com/CurtizJ)). +* Add support for `SHOW MERGES` query. [#55815](https://github.com/ClickHouse/ClickHouse/pull/55815) ([megao](https://github.com/jetgm)). +* Introduce a setting `create_table_empty_primary_key_by_default` for default `ORDER BY ()`. [#55899](https://github.com/ClickHouse/ClickHouse/pull/55899) ([Srikanth Chekuri](https://github.com/srikanthccv)). + +#### Performance Improvement +* Add option `query_plan_preserve_num_streams_after_window_functions` to preserve the number of streams after evaluating window functions to allow parallel stream processing. [#50771](https://github.com/ClickHouse/ClickHouse/pull/50771) ([frinkr](https://github.com/frinkr)). +* Release more streams if data is small. [#53867](https://github.com/ClickHouse/ClickHouse/pull/53867) ([Jiebin Sun](https://github.com/jiebinn)). +* RoaringBitmaps being optimized before serialization. [#55044](https://github.com/ClickHouse/ClickHouse/pull/55044) ([UnamedRus](https://github.com/UnamedRus)). +* Posting lists in inverted indexes are now optimized to use the smallest possible representation for internal bitmaps. Depending on the repetitiveness of the data, this may significantly reduce the space consumption of inverted indexes. [#55069](https://github.com/ClickHouse/ClickHouse/pull/55069) ([Harry Lee](https://github.com/HarryLeeIBM)). +* Fix contention on Context lock, this significantly improves performance for a lot of short-running concurrent queries. [#55121](https://github.com/ClickHouse/ClickHouse/pull/55121) ([Maksim Kita](https://github.com/kitaisreal)). +* Improved the performance of inverted index creation by 30%. This was achieved by replacing `std::unordered_map` with `absl::flat_hash_map`. [#55210](https://github.com/ClickHouse/ClickHouse/pull/55210) ([Harry Lee](https://github.com/HarryLeeIBM)). +* Support ORC filter push down (rowgroup level). [#55330](https://github.com/ClickHouse/ClickHouse/pull/55330) ([李扬](https://github.com/taiyang-li)). +* Improve performance of external aggregation with a lot of temporary files. [#55489](https://github.com/ClickHouse/ClickHouse/pull/55489) ([Maksim Kita](https://github.com/kitaisreal)). +* Set a reasonable size for the marks cache for secondary indices by default to avoid loading the marks over and over again. [#55654](https://github.com/ClickHouse/ClickHouse/pull/55654) ([Alexey Milovidov](https://github.com/alexey-milovidov)). +* Avoid unnecessary reconstruction of index granules when reading skip indexes. This addresses [#55653](https://github.com/ClickHouse/ClickHouse/issues/55653#issuecomment-1763766009). [#55683](https://github.com/ClickHouse/ClickHouse/pull/55683) ([Amos Bird](https://github.com/amosbird)). +* Cache CAST function in set during execution to improve the performance of function `IN` when set element type doesn't exactly match column type. [#55712](https://github.com/ClickHouse/ClickHouse/pull/55712) ([Duc Canh Le](https://github.com/canhld94)). +* Performance improvement for `ColumnVector::insertMany` and `ColumnVector::insertManyFrom`. [#55714](https://github.com/ClickHouse/ClickHouse/pull/55714) ([frinkr](https://github.com/frinkr)). +* Optimized Map subscript operations by predicting the next row's key position and reduce the comparisons. [#55929](https://github.com/ClickHouse/ClickHouse/pull/55929) ([lgbo](https://github.com/lgbo-ustc)). +* Support struct fields pruning in Parquet (in previous versions it didn't work in some cases). [#56117](https://github.com/ClickHouse/ClickHouse/pull/56117) ([lgbo](https://github.com/lgbo-ustc)). +* Add the ability to tune the number of parallel replicas used in a query execution based on the estimation of rows to read. [#51692](https://github.com/ClickHouse/ClickHouse/pull/51692) ([Raúl Marín](https://github.com/Algunenano)). +* Optimized external aggregation memory consumption in case many temporary files were generated. [#54798](https://github.com/ClickHouse/ClickHouse/pull/54798) ([Nikita Taranov](https://github.com/nickitat)). +* Distributed queries executed in `async_socket_for_remote` mode (default) now respect `max_threads` limit. Previously, some queries could create excessive threads (up to `max_distributed_connections`), causing server performance issues. [#53504](https://github.com/ClickHouse/ClickHouse/pull/53504) ([filimonov](https://github.com/filimonov)). +* Caching skip-able entries while executing DDL from Zookeeper distributed DDL queue. [#54828](https://github.com/ClickHouse/ClickHouse/pull/54828) ([Duc Canh Le](https://github.com/canhld94)). +* Experimental inverted indexes do not store tokens with too many matches (i.e. row ids in the posting list). This saves space and avoids ineffective index lookups when sequential scans would be equally fast or faster. The previous heuristics (`density` parameter passed to the index definition) that controlled when tokens would not be stored was too confusing for users. A much simpler heuristics based on parameter `max_rows_per_postings_list` (default: 64k) is introduced which directly controls the maximum allowed number of row ids in a postings list. [#55616](https://github.com/ClickHouse/ClickHouse/pull/55616) ([Harry Lee](https://github.com/HarryLeeIBM)). +* Improve write performance to `EmbeddedRocksDB` tables. [#55732](https://github.com/ClickHouse/ClickHouse/pull/55732) ([Duc Canh Le](https://github.com/canhld94)). +* Improved overall resilience for ClickHouse in case of many parts within partition (more than 1000). It might reduce the number of `TOO_MANY_PARTS` errors. [#55526](https://github.com/ClickHouse/ClickHouse/pull/55526) ([Nikita Mikhaylov](https://github.com/nikitamikhaylov)). +* Reduced memory consumption during loading of hierarchical dictionaries. [#55838](https://github.com/ClickHouse/ClickHouse/pull/55838) ([Nikita Taranov](https://github.com/nickitat)). +* All dictionaries support setting `dictionary_use_async_executor`. [#55839](https://github.com/ClickHouse/ClickHouse/pull/55839) ([vdimir](https://github.com/vdimir)). +* Prevent excesive memory usage when deserializing AggregateFunctionTopKGenericData. [#55947](https://github.com/ClickHouse/ClickHouse/pull/55947) ([Raúl Marín](https://github.com/Algunenano)). +* On a Keeper with lots of watches AsyncMetrics threads can consume 100% of CPU for noticable time in `DB::KeeperStorage::getSessionsWithWatchesCount()`. The fix is to avoid traversing heavy `watches` and `list_watches` sets. [#56054](https://github.com/ClickHouse/ClickHouse/pull/56054) ([Alexander Gololobov](https://github.com/davenger)). +* Add setting `optimize_trivial_approximate_count_query` to use `count()` approximation for storage EmbeddedRocksDB. Enable trivial count for StorageJoin. [#55806](https://github.com/ClickHouse/ClickHouse/pull/55806) ([Duc Canh Le](https://github.com/canhld94)). + +#### Improvement +* Functions `toDayOfWeek()` (MySQL alias: `DAYOFWEEK()`), `toYearWeek()` (`YEARWEEK()`) and `toWeek()` (`WEEK()`) now supports `String` arguments. This makes its behavior consistent with MySQL's behavior. [#55589](https://github.com/ClickHouse/ClickHouse/pull/55589) ([Robert Schulze](https://github.com/rschu1ze)). +* Introduced setting `date_time_overflow_behavior` with possible values `ignore`, `throw`, `saturate` that controls the overflow behavior when converting from Date, Date32, DateTime64, Integer or Float to Date, Date32, DateTime or DateTime64. [#55696](https://github.com/ClickHouse/ClickHouse/pull/55696) ([Andrey Zvonov](https://github.com/zvonand)). +* Implement query parameters support for `ALTER TABLE ... ACTION PARTITION [ID] {parameter_name:ParameterType}`. Merges [#49516](https://github.com/ClickHouse/ClickHouse/issues/49516). Closes [#49449](https://github.com/ClickHouse/ClickHouse/issues/49449). [#55604](https://github.com/ClickHouse/ClickHouse/pull/55604) ([alesapin](https://github.com/alesapin)). +* Print processor ids in a prettier manner in EXPLAIN. [#48852](https://github.com/ClickHouse/ClickHouse/pull/48852) ([Vlad Seliverstov](https://github.com/behebot)). +* Creating a direct dictionary with a lifetime field will be rejected at create time (as the lifetime does not make sense for direct dictionaries). Fixes: [#27861](https://github.com/ClickHouse/ClickHouse/issues/27861). [#49043](https://github.com/ClickHouse/ClickHouse/pull/49043) ([Rory Crispin](https://github.com/RoryCrispin)). +* Allow parameters in queries with partitions like `ALTER TABLE t DROP PARTITION`. Closes [#49449](https://github.com/ClickHouse/ClickHouse/issues/49449). [#49516](https://github.com/ClickHouse/ClickHouse/pull/49516) ([Nikolay Degterinsky](https://github.com/evillique)). +* Add a new column `xid` for `system.zookeeper_connection`. [#50702](https://github.com/ClickHouse/ClickHouse/pull/50702) ([helifu](https://github.com/helifu)). +* Display the correct server settings in `system.server_settings` after configuration reload. [#53774](https://github.com/ClickHouse/ClickHouse/pull/53774) ([helifu](https://github.com/helifu)). +* Add support for mathematical minus `−` character in queries, similar to `-`. [#54100](https://github.com/ClickHouse/ClickHouse/pull/54100) ([Alexey Milovidov](https://github.com/alexey-milovidov)). +* Add replica groups to the experimental `Replicated` database engine. Closes [#53620](https://github.com/ClickHouse/ClickHouse/issues/53620). [#54421](https://github.com/ClickHouse/ClickHouse/pull/54421) ([Nikolay Degterinsky](https://github.com/evillique)). +* It is better to retry retriable s3 errors than totally fail the query. Set bigger value to the s3_retry_attempts by default. [#54770](https://github.com/ClickHouse/ClickHouse/pull/54770) ([Sema Checherinda](https://github.com/CheSema)). +* Add load balancing mode `hostname_levenshtein_distance`. [#54826](https://github.com/ClickHouse/ClickHouse/pull/54826) ([JackyWoo](https://github.com/JackyWoo)). +* Improve hiding secrets in logs. [#55089](https://github.com/ClickHouse/ClickHouse/pull/55089) ([Vitaly Baranov](https://github.com/vitlibar)). +* For now the projection analysis will be performed only on top of query plan. The setting `query_plan_optimize_projection` became obsolete (it was enabled by default long time ago). [#55112](https://github.com/ClickHouse/ClickHouse/pull/55112) ([Nikita Mikhaylov](https://github.com/nikitamikhaylov)). +* When function `untuple` is now called on a tuple with named elements and itself has an alias (e.g. `select untuple(tuple(1)::Tuple(element_alias Int)) AS untuple_alias`), then the result column name is now generated from the untuple alias and the tuple element alias (in the example: "untuple_alias.element_alias"). [#55123](https://github.com/ClickHouse/ClickHouse/pull/55123) ([garcher22](https://github.com/garcher22)). +* Added setting `describe_include_virtual_columns`, which allows to include virtual columns of table into result of `DESCRIBE` query. Added setting `describe_compact_output`. If it is set to `true`, `DESCRIBE` query returns only names and types of columns without extra information. [#55129](https://github.com/ClickHouse/ClickHouse/pull/55129) ([Anton Popov](https://github.com/CurtizJ)). +* Sometimes `OPTIMIZE` with `optimize_throw_if_noop=1` may fail with an error `unknown reason` while the real cause of it - different projections in different parts. This behavior is fixed. [#55130](https://github.com/ClickHouse/ClickHouse/pull/55130) ([Nikita Mikhaylov](https://github.com/nikitamikhaylov)). +* Allow to have several `MaterializedPostgreSQL` tables following the same Postgres table. By default this behaviour is not enabled (for compatibility, because it is a backward-incompatible change), but can be turned on with setting `materialized_postgresql_use_unique_replication_consumer_identifier`. Closes [#54918](https://github.com/ClickHouse/ClickHouse/issues/54918). [#55145](https://github.com/ClickHouse/ClickHouse/pull/55145) ([Kseniia Sumarokova](https://github.com/kssenii)). +* Allow to parse negative `DateTime64` and `DateTime` with fractional part from short strings. [#55146](https://github.com/ClickHouse/ClickHouse/pull/55146) ([Andrey Zvonov](https://github.com/zvonand)). +* To improve compatibility with MySQL, 1. `information_schema.tables` now includes the new field `table_rows`, and 2. `information_schema.columns` now includes the new field `extra`. [#55215](https://github.com/ClickHouse/ClickHouse/pull/55215) ([Robert Schulze](https://github.com/rschu1ze)). +* Clickhouse-client won't show "0 rows in set" if it is zero and if exception was thrown. [#55240](https://github.com/ClickHouse/ClickHouse/pull/55240) ([Salvatore Mesoraca](https://github.com/aiven-sal)). +* Support rename table without keyword `TABLE` like `RENAME db.t1 to db.t2`. [#55373](https://github.com/ClickHouse/ClickHouse/pull/55373) ([凌涛](https://github.com/lingtaolf)). +* Add `internal_replication` to `system.clusters`. [#55377](https://github.com/ClickHouse/ClickHouse/pull/55377) ([Konstantin Morozov](https://github.com/k-morozov)). +* Select remote proxy resolver based on request protocol, add proxy feature docs and remove `DB::ProxyConfiguration::Protocol::ANY`. [#55430](https://github.com/ClickHouse/ClickHouse/pull/55430) ([Arthur Passos](https://github.com/arthurpassos)). +* Avoid retrying keeper operations on INSERT after table shutdown. [#55519](https://github.com/ClickHouse/ClickHouse/pull/55519) ([Azat Khuzhin](https://github.com/azat)). +* `SHOW COLUMNS` now correctly reports type `FixedString` as `BLOB` if setting `use_mysql_types_in_show_columns` is on. Also added two new settings, `mysql_map_string_to_text_in_show_columns` and `mysql_map_fixed_string_to_text_in_show_columns` to switch the output for types `String` and `FixedString` as `TEXT` or `BLOB`. [#55617](https://github.com/ClickHouse/ClickHouse/pull/55617) ([Serge Klochkov](https://github.com/slvrtrn)). +* During ReplicatedMergeTree tables startup clickhouse server checks set of parts for unexpected parts (exists locally, but not in zookeeper). All unexpected parts move to detached directory and instead of them server tries to restore some ancestor (covered) parts. Now server tries to restore closest ancestors instead of random covered parts. [#55645](https://github.com/ClickHouse/ClickHouse/pull/55645) ([alesapin](https://github.com/alesapin)). +* The advanced dashboard now supports draggable charts on touch devices. This closes [#54206](https://github.com/ClickHouse/ClickHouse/issues/54206). [#55649](https://github.com/ClickHouse/ClickHouse/pull/55649) ([Alexey Milovidov](https://github.com/alexey-milovidov)). +* Use the default query format if declared when outputting exception with `http_write_exception_in_output_format`. [#55739](https://github.com/ClickHouse/ClickHouse/pull/55739) ([Raúl Marín](https://github.com/Algunenano)). +* Provide a better message for common MATERIALIZED VIEW pitfalls. [#55826](https://github.com/ClickHouse/ClickHouse/pull/55826) ([Raúl Marín](https://github.com/Algunenano)). +* If you dropped the current database, you will still be able to run some queries in `clickhouse-local` and switch to another database. This makes the behavior consistent with `clickhouse-client`. This closes [#55834](https://github.com/ClickHouse/ClickHouse/issues/55834). [#55853](https://github.com/ClickHouse/ClickHouse/pull/55853) ([Alexey Milovidov](https://github.com/alexey-milovidov)). +* Functions `(add|subtract)(Year|Quarter|Month|Week|Day|Hour|Minute|Second|Millisecond|Microsecond|Nanosecond)` now support string-encoded date arguments, e.g. `SELECT addDays('2023-10-22', 1)`. This increases compatibility with MySQL and is needed by Tableau Online. [#55869](https://github.com/ClickHouse/ClickHouse/pull/55869) ([Robert Schulze](https://github.com/rschu1ze)). +* The setting `apply_deleted_mask` when disabled allows to read rows that where marked as deleted by lightweight DELETE queries. This is useful for debugging. [#55952](https://github.com/ClickHouse/ClickHouse/pull/55952) ([Alexander Gololobov](https://github.com/davenger)). +* Allow skipping `null` values when serailizing Tuple to json objects, which makes it possible to keep compatiability with Spark's `to_json` function, which is also useful for gluten. [#55956](https://github.com/ClickHouse/ClickHouse/pull/55956) ([李扬](https://github.com/taiyang-li)). +* Functions `(add|sub)Date()` now support string-encoded date arguments, e.g. `SELECT addDate('2023-10-22 11:12:13', INTERVAL 5 MINUTE)`. The same support for string-encoded date arguments is added to the plus and minus operators, e.g. `SELECT '2023-10-23' + INTERVAL 1 DAY`. This increases compatibility with MySQL and is needed by Tableau Online. [#55960](https://github.com/ClickHouse/ClickHouse/pull/55960) ([Robert Schulze](https://github.com/rschu1ze)). +* Allow unquoted strings with CR (`\r`) in CSV format. Closes [#39930](https://github.com/ClickHouse/ClickHouse/issues/39930). [#56046](https://github.com/ClickHouse/ClickHouse/pull/56046) ([Kruglov Pavel](https://github.com/Avogar)). +* Allow to run `clickhouse-keeper` using embedded config. [#56086](https://github.com/ClickHouse/ClickHouse/pull/56086) ([Maksim Kita](https://github.com/kitaisreal)). +* Set limit of the maximum configuration value for `queued.min.messages` to avoid problem with start fetching data with Kafka. [#56121](https://github.com/ClickHouse/ClickHouse/pull/56121) ([Stas Morozov](https://github.com/r3b-fish)). +* Fixed a typo in SQL function `minSampleSizeContinous` (renamed `minSampleSizeContinuous`). Old name is preserved for backward compatibility. This closes: [#56139](https://github.com/ClickHouse/ClickHouse/issues/56139). [#56143](https://github.com/ClickHouse/ClickHouse/pull/56143) ([Dorota Szeremeta](https://github.com/orotaday)). +* Print path for broken parts on disk before shutting down the server. Before this change if a part is corrupted on disk and server cannot start, it was almost impossible to understand which part is broken. This is fixed. [#56181](https://github.com/ClickHouse/ClickHouse/pull/56181) ([Duc Canh Le](https://github.com/canhld94)). + +#### Build/Testing/Packaging Improvement +* If the database in Docker is already initialized, it doesn't need to be initialized again upon subsequent launches. This can potentially fix the issue of infinite container restarts when the database fails to load within 1000 attempts (relevant for very large databases and multi-node setups). [#50724](https://github.com/ClickHouse/ClickHouse/pull/50724) ([Alexander Nikolaev](https://github.com/AlexNik)). +* Resource with source code including submodules is built in Darwin special build task. It may be used to build ClickHouse without checking out the submodules. [#51435](https://github.com/ClickHouse/ClickHouse/pull/51435) ([Ilya Yatsishin](https://github.com/qoega)). +* An error was occuring when building ClickHouse with the AVX series of instructions enabled globally (which isn't recommended). The reason is that snappy does not enable `SNAPPY_HAVE_X86_CRC32`. [#55049](https://github.com/ClickHouse/ClickHouse/pull/55049) ([monchickey](https://github.com/monchickey)). +* Solve issue with launching standalone `clickhouse-keeper` from `clickhouse-server` package. [#55226](https://github.com/ClickHouse/ClickHouse/pull/55226) ([Mikhail f. Shiryaev](https://github.com/Felixoid)). +* In the tests, RabbitMQ version is updated to 3.12.6. Improved logs collection for RabbitMQ tests. [#55424](https://github.com/ClickHouse/ClickHouse/pull/55424) ([Ilya Yatsishin](https://github.com/qoega)). +* Modified the error message difference between openssl and boringssl to fix the functional test. [#55975](https://github.com/ClickHouse/ClickHouse/pull/55975) ([MeenaRenganathan22](https://github.com/MeenaRenganathan22)). +* Use upstream repo for apache datasketches. [#55787](https://github.com/ClickHouse/ClickHouse/pull/55787) ([Nikita Taranov](https://github.com/nickitat)). + +#### Bug Fix (user-visible misbehavior in an official stable release) +* Skip hardlinking inverted index files in mutation [#47663](https://github.com/ClickHouse/ClickHouse/pull/47663) ([cangyin](https://github.com/cangyin)). +* Fixed bug of `match` function (regex) with pattern containing alternation produces incorrect key condition. Closes #53222. [#54696](https://github.com/ClickHouse/ClickHouse/pull/54696) ([Yakov Olkhovskiy](https://github.com/yakov-olkhovskiy)). +* Fix 'Cannot find column' in read-in-order optimization with ARRAY JOIN [#51746](https://github.com/ClickHouse/ClickHouse/pull/51746) ([Nikolai Kochetov](https://github.com/KochetovNicolai)). +* Support missed experimental `Object(Nullable(json))` subcolumns in query. [#54052](https://github.com/ClickHouse/ClickHouse/pull/54052) ([zps](https://github.com/VanDarkholme7)). +* Re-add fix for `accurateCastOrNull()` [#54629](https://github.com/ClickHouse/ClickHouse/pull/54629) ([Salvatore Mesoraca](https://github.com/aiven-sal)). +* Fix detecting `DEFAULT` for columns of a Distributed table created without AS [#55060](https://github.com/ClickHouse/ClickHouse/pull/55060) ([Vitaly Baranov](https://github.com/vitlibar)). +* Proper cleanup in case of exception in ctor of ShellCommandSource [#55103](https://github.com/ClickHouse/ClickHouse/pull/55103) ([Alexander Gololobov](https://github.com/davenger)). +* Fix deadlock in LDAP assigned role update [#55119](https://github.com/ClickHouse/ClickHouse/pull/55119) ([Julian Maicher](https://github.com/jmaicher)). +* Suppress error statistics update for internal exceptions [#55128](https://github.com/ClickHouse/ClickHouse/pull/55128) ([Robert Schulze](https://github.com/rschu1ze)). +* Fix deadlock in backups [#55132](https://github.com/ClickHouse/ClickHouse/pull/55132) ([alesapin](https://github.com/alesapin)). +* Fix storage Iceberg files retrieval [#55144](https://github.com/ClickHouse/ClickHouse/pull/55144) ([Kseniia Sumarokova](https://github.com/kssenii)). +* Fix partition pruning of extra columns in set. [#55172](https://github.com/ClickHouse/ClickHouse/pull/55172) ([Amos Bird](https://github.com/amosbird)). +* Fix recalculation of skip indexes in ALTER UPDATE queries when table has adaptive granularity [#55202](https://github.com/ClickHouse/ClickHouse/pull/55202) ([Duc Canh Le](https://github.com/canhld94)). +* Fix for background download in fs cache [#55252](https://github.com/ClickHouse/ClickHouse/pull/55252) ([Kseniia Sumarokova](https://github.com/kssenii)). +* Avoid possible memory leaks in compressors in case of missing buffer finalization [#55262](https://github.com/ClickHouse/ClickHouse/pull/55262) ([Azat Khuzhin](https://github.com/azat)). +* Fix functions execution over sparse columns [#55275](https://github.com/ClickHouse/ClickHouse/pull/55275) ([Azat Khuzhin](https://github.com/azat)). +* Fix incorrect merging of Nested for SELECT FINAL FROM SummingMergeTree [#55276](https://github.com/ClickHouse/ClickHouse/pull/55276) ([Azat Khuzhin](https://github.com/azat)). +* Fix bug with inability to drop detached partition in replicated merge tree on top of S3 without zero copy [#55309](https://github.com/ClickHouse/ClickHouse/pull/55309) ([alesapin](https://github.com/alesapin)). +* Fix a crash in MergeSortingPartialResultTransform (due to zero chunks after `remerge`) [#55335](https://github.com/ClickHouse/ClickHouse/pull/55335) ([Azat Khuzhin](https://github.com/azat)). +* Fix data-race in CreatingSetsTransform (on errors) due to throwing shared exception [#55338](https://github.com/ClickHouse/ClickHouse/pull/55338) ([Azat Khuzhin](https://github.com/azat)). +* Fix trash optimization (up to a certain extent) [#55353](https://github.com/ClickHouse/ClickHouse/pull/55353) ([Alexey Milovidov](https://github.com/alexey-milovidov)). +* Fix leak in StorageHDFS [#55370](https://github.com/ClickHouse/ClickHouse/pull/55370) ([Azat Khuzhin](https://github.com/azat)). +* Fix parsing of arrays in cast operator [#55417](https://github.com/ClickHouse/ClickHouse/pull/55417) ([Anton Popov](https://github.com/CurtizJ)). +* Fix filtering by virtual columns with OR filter in query [#55418](https://github.com/ClickHouse/ClickHouse/pull/55418) ([Azat Khuzhin](https://github.com/azat)). +* Fix MongoDB connection issues [#55419](https://github.com/ClickHouse/ClickHouse/pull/55419) ([Nikolay Degterinsky](https://github.com/evillique)). +* Fix MySQL interface boolean representation [#55427](https://github.com/ClickHouse/ClickHouse/pull/55427) ([Serge Klochkov](https://github.com/slvrtrn)). +* Fix MySQL text protocol DateTime formatting and LowCardinality(Nullable(T)) types reporting [#55479](https://github.com/ClickHouse/ClickHouse/pull/55479) ([Serge Klochkov](https://github.com/slvrtrn)). +* Make `use_mysql_types_in_show_columns` affect only `SHOW COLUMNS` [#55481](https://github.com/ClickHouse/ClickHouse/pull/55481) ([Robert Schulze](https://github.com/rschu1ze)). +* Fix stack symbolizer parsing `DW_FORM_ref_addr` incorrectly and sometimes crashing [#55483](https://github.com/ClickHouse/ClickHouse/pull/55483) ([Michael Kolupaev](https://github.com/al13n321)). +* Destroy fiber in case of exception in cancelBefore in AsyncTaskExecutor [#55516](https://github.com/ClickHouse/ClickHouse/pull/55516) ([Kruglov Pavel](https://github.com/Avogar)). +* Fix Query Parameters not working with custom HTTP handlers [#55521](https://github.com/ClickHouse/ClickHouse/pull/55521) ([Konstantin Bogdanov](https://github.com/thevar1able)). +* Fix checking of non handled data for Values format [#55527](https://github.com/ClickHouse/ClickHouse/pull/55527) ([Azat Khuzhin](https://github.com/azat)). +* Fix 'Invalid cursor state' in odbc interacting with MS SQL Server [#55558](https://github.com/ClickHouse/ClickHouse/pull/55558) ([vdimir](https://github.com/vdimir)). +* Fix max execution time and 'break' overflow mode [#55577](https://github.com/ClickHouse/ClickHouse/pull/55577) ([Alexander Gololobov](https://github.com/davenger)). +* Fix crash in QueryNormalizer with cyclic aliases [#55602](https://github.com/ClickHouse/ClickHouse/pull/55602) ([vdimir](https://github.com/vdimir)). +* Disable wrong optimization and add a test [#55609](https://github.com/ClickHouse/ClickHouse/pull/55609) ([Alexey Milovidov](https://github.com/alexey-milovidov)). +* Merging [#52352](https://github.com/ClickHouse/ClickHouse/issues/52352) [#55621](https://github.com/ClickHouse/ClickHouse/pull/55621) ([Alexey Milovidov](https://github.com/alexey-milovidov)). +* Add a test to avoid incorrect decimal sorting [#55662](https://github.com/ClickHouse/ClickHouse/pull/55662) ([Amos Bird](https://github.com/amosbird)). +* Fix progress bar for s3 and azure Cluster functions with url without globs [#55666](https://github.com/ClickHouse/ClickHouse/pull/55666) ([Kruglov Pavel](https://github.com/Avogar)). +* Fix filtering by virtual columns with OR filter in query (resubmit) [#55678](https://github.com/ClickHouse/ClickHouse/pull/55678) ([Azat Khuzhin](https://github.com/azat)). +* Fixes and improvements for Iceberg storage [#55695](https://github.com/ClickHouse/ClickHouse/pull/55695) ([Kruglov Pavel](https://github.com/Avogar)). +* Fix data race in CreatingSetsTransform (v2) [#55786](https://github.com/ClickHouse/ClickHouse/pull/55786) ([Azat Khuzhin](https://github.com/azat)). +* Throw exception when parsing illegal string as float if precise_float_parsing is true [#55861](https://github.com/ClickHouse/ClickHouse/pull/55861) ([李扬](https://github.com/taiyang-li)). +* Disable predicate pushdown if the CTE contains stateful functions [#55871](https://github.com/ClickHouse/ClickHouse/pull/55871) ([Raúl Marín](https://github.com/Algunenano)). +* Fix normalize ASTSelectWithUnionQuery, as it was stripping `FORMAT` from the query [#55887](https://github.com/ClickHouse/ClickHouse/pull/55887) ([flynn](https://github.com/ucasfl)). +* Try to fix possible segfault in Native ORC input format [#55891](https://github.com/ClickHouse/ClickHouse/pull/55891) ([Kruglov Pavel](https://github.com/Avogar)). +* Fix window functions in case of sparse columns. [#55895](https://github.com/ClickHouse/ClickHouse/pull/55895) ([János Benjamin Antal](https://github.com/antaljanosbenjamin)). +* fix: StorageNull supports subcolumns [#55912](https://github.com/ClickHouse/ClickHouse/pull/55912) ([FFish](https://github.com/wxybear)). +* Do not write retriable errors for Replicated mutate/merge into error log [#55944](https://github.com/ClickHouse/ClickHouse/pull/55944) ([Azat Khuzhin](https://github.com/azat)). +* Fix `SHOW DATABASES LIMIT ` [#55962](https://github.com/ClickHouse/ClickHouse/pull/55962) ([Raúl Marín](https://github.com/Algunenano)). +* Fix autogenerated Protobuf schema with fields with underscore [#55974](https://github.com/ClickHouse/ClickHouse/pull/55974) ([Kruglov Pavel](https://github.com/Avogar)). +* Fix dateTime64ToSnowflake64() with non-default scale [#55983](https://github.com/ClickHouse/ClickHouse/pull/55983) ([Robert Schulze](https://github.com/rschu1ze)). +* Fix output/input of Arrow dictionary column [#55989](https://github.com/ClickHouse/ClickHouse/pull/55989) ([Kruglov Pavel](https://github.com/Avogar)). +* Fix fetching schema from schema registry in AvroConfluent [#55991](https://github.com/ClickHouse/ClickHouse/pull/55991) ([Kruglov Pavel](https://github.com/Avogar)). +* Fix 'Block structure mismatch' on concurrent ALTER and INSERTs in Buffer table [#55995](https://github.com/ClickHouse/ClickHouse/pull/55995) ([Michael Kolupaev](https://github.com/al13n321)). +* Fix incorrect free space accounting for least_used JBOD policy [#56030](https://github.com/ClickHouse/ClickHouse/pull/56030) ([Azat Khuzhin](https://github.com/azat)). +* Fix missing scalar issue when evaluating subqueries inside table functions [#56057](https://github.com/ClickHouse/ClickHouse/pull/56057) ([Amos Bird](https://github.com/amosbird)). +* Fix wrong query result when http_write_exception_in_output_format=1 [#56135](https://github.com/ClickHouse/ClickHouse/pull/56135) ([Kruglov Pavel](https://github.com/Avogar)). +* Fix schema cache for fallback JSON->JSONEachRow with changed settings [#56172](https://github.com/ClickHouse/ClickHouse/pull/56172) ([Kruglov Pavel](https://github.com/Avogar)). +* Add error handler to odbc-bridge [#56185](https://github.com/ClickHouse/ClickHouse/pull/56185) ([Yakov Olkhovskiy](https://github.com/yakov-olkhovskiy)). + + ### ClickHouse release 23.9, 2023-09-28 #### Backward Incompatible Change diff --git a/CMakeLists.txt b/CMakeLists.txt index a5b94efefc5..063cfc77302 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -21,8 +21,11 @@ include (cmake/clang_tidy.cmake) include (cmake/git.cmake) include (cmake/utils.cmake) +# This is needed to set up the CMAKE_INSTALL_BINDIR variable. +include (GNUInstallDirs) + # Ignore export() since we don't use it, -# but it gets broken with a global targets via link_libraries() +# but it gets broken with global targets via link_libraries() macro (export) endmacro () @@ -164,7 +167,7 @@ if (OS_LINUX) # and whatever is poisoning it by LD_PRELOAD should not link to our symbols. # - The clickhouse-odbc-bridge and clickhouse-library-bridge binaries # should not expose their symbols to ODBC drivers and libraries. - set (CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -Wl,--no-export-dynamic") + set (CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -Wl,--no-export-dynamic -Wl,--gc-sections") endif () if (OS_DARWIN) @@ -187,9 +190,10 @@ if (NOT CMAKE_BUILD_TYPE_UC STREQUAL "RELEASE") endif () endif() -if (CMAKE_BUILD_TYPE_UC STREQUAL "RELEASE" - OR CMAKE_BUILD_TYPE_UC STREQUAL "RELWITHDEBINFO" - OR CMAKE_BUILD_TYPE_UC STREQUAL "MINSIZEREL") +if (NOT (SANITIZE_COVERAGE OR WITH_COVERAGE) + AND (CMAKE_BUILD_TYPE_UC STREQUAL "RELEASE" + OR CMAKE_BUILD_TYPE_UC STREQUAL "RELWITHDEBINFO" + OR CMAKE_BUILD_TYPE_UC STREQUAL "MINSIZEREL")) set (OMIT_HEAVY_DEBUG_SYMBOLS_DEFAULT ON) else() set (OMIT_HEAVY_DEBUG_SYMBOLS_DEFAULT OFF) @@ -273,6 +277,11 @@ option (ENABLE_BUILD_PROFILING "Enable profiling of build time" OFF) if (ENABLE_BUILD_PROFILING) if (COMPILER_CLANG) set (COMPILER_FLAGS "${COMPILER_FLAGS} -ftime-trace") + + if (LINKER_NAME MATCHES "lld") + set (CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -Wl,--time-trace") + set (CMAKE_MODULE_LINKER_FLAGS "${CMAKE_MODULE_LINKER_FLAGS} -Wl,--time-trace") + endif () else () message (${RECONFIGURE_MESSAGE_LEVEL} "Build profiling is only available with CLang") endif () @@ -286,9 +295,6 @@ set (CMAKE_C_STANDARD 11) set (CMAKE_C_EXTENSIONS ON) # required by most contribs written in C set (CMAKE_C_STANDARD_REQUIRED ON) -# Compiler-specific coverage flags e.g. -fcoverage-mapping -option(WITH_COVERAGE "Profile the resulting binary/binaries" OFF) - if (COMPILER_CLANG) # Enable C++14 sized global deallocation functions. It should be enabled by setting -std=c++14 but I'm not sure. # See https://reviews.llvm.org/D112921 @@ -304,26 +310,20 @@ if (COMPILER_CLANG) set(BRANCHES_WITHIN_32B_BOUNDARIES "-mbranches-within-32B-boundaries") set(COMPILER_FLAGS "${COMPILER_FLAGS} ${BRANCHES_WITHIN_32B_BOUNDARIES}") endif() - - if (WITH_COVERAGE) - set(COMPILER_FLAGS "${COMPILER_FLAGS} -fprofile-instr-generate -fcoverage-mapping") - # If we want to disable coverage for specific translation units - set(WITHOUT_COVERAGE "-fno-profile-instr-generate -fno-coverage-mapping") - endif() endif () set (COMPILER_FLAGS "${COMPILER_FLAGS}") # Our built-in unwinder only supports DWARF version up to 4. -set (DEBUG_INFO_FLAGS "-g -gdwarf-4") +set (DEBUG_INFO_FLAGS "-g") # Disable omit frame pointer compiler optimization using -fno-omit-frame-pointer option(DISABLE_OMIT_FRAME_POINTER "Disable omit frame pointer compiler optimization" OFF) if (DISABLE_OMIT_FRAME_POINTER) - set (CMAKE_CXX_FLAGS_ADD "${CMAKE_CXX_FLAGS_ADD} -fno-omit-frame-pointer") - set (CMAKE_C_FLAGS_ADD "${CMAKE_C_FLAGS_ADD} -fno-omit-frame-pointer") - set (CMAKE_ASM_FLAGS_ADD "${CMAKE_ASM_FLAGS_ADD} -fno-omit-frame-pointer") + set (CMAKE_CXX_FLAGS_ADD "${CMAKE_CXX_FLAGS_ADD} -fno-omit-frame-pointer -mno-omit-leaf-frame-pointer") + set (CMAKE_C_FLAGS_ADD "${CMAKE_C_FLAGS_ADD} -fno-omit-frame-pointer -mno-omit-leaf-frame-pointer") + set (CMAKE_ASM_FLAGS_ADD "${CMAKE_ASM_FLAGS_ADD} -fno-omit-frame-pointer -mno-omit-leaf-frame-pointer") endif() set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${COMPILER_FLAGS} ${CMAKE_CXX_FLAGS_ADD}") @@ -463,14 +463,6 @@ endif () message (STATUS "Building for: ${CMAKE_SYSTEM} ${CMAKE_SYSTEM_PROCESSOR} ${CMAKE_LIBRARY_ARCHITECTURE}") -include (GNUInstallDirs) - -# When testing for memory leaks with Valgrind, don't link tcmalloc or jemalloc. - -if (TARGET global-group) - install (EXPORT global DESTINATION cmake) -endif () - add_subdirectory (contrib EXCLUDE_FROM_ALL) if (NOT ENABLE_JEMALLOC) @@ -554,10 +546,16 @@ if (ENABLE_RUST) endif() endif() +if (CMAKE_BUILD_TYPE_UC STREQUAL "RELWITHDEBINFO" AND NOT SANITIZE AND OS_LINUX AND (ARCH_AMD64 OR ARCH_AARCH64)) + set(CHECK_LARGE_OBJECT_SIZES_DEFAULT ON) +else () + set(CHECK_LARGE_OBJECT_SIZES_DEFAULT OFF) +endif () +option(CHECK_LARGE_OBJECT_SIZES "Check that there are no large object files after build." ${CHECK_LARGE_OBJECT_SIZES_DEFAULT}) + add_subdirectory (base) add_subdirectory (src) add_subdirectory (programs) -add_subdirectory (tests) add_subdirectory (utils) if (FUZZER) diff --git a/README.md b/README.md index 67d4f46988f..d0fd19c0b73 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,17 @@ -[ClickHouse — open source distributed column-oriented DBMS](https://clickhouse.com?utm_source=github) +
-ClickHouse® is an open-source column-oriented database management system that allows generating analytical data reports in real-time. +[![Website](https://img.shields.io/website?up_message=AVAILABLE&down_message=DOWN&url=https%3A%2F%2Fclickhouse.com&style=for-the-badge)](https://clickhouse.com) +[![Apache 2.0 License](https://img.shields.io/badge/license-Apache%202.0-blueviolet?style=for-the-badge)](https://www.apache.org/licenses/LICENSE-2.0) + + + + + The ClickHouse company logo. + + +

ClickHouse® is an open-source column-oriented database management system that allows generating analytical data reports in real-time.

+ +
## How To Install (Linux, macOS, FreeBSD) ``` @@ -22,10 +33,7 @@ curl https://clickhouse.com/ | sh ## Upcoming Events -* [**v23.9 Community Call**]([https://clickhouse.com/company/events/v23-8-community-release-call](https://clickhouse.com/company/events/v23-9-community-release-call)?utm_source=github&utm_medium=social&utm_campaign=release-webinar-2023-08) - Sep 28 - 23.9 is rapidly approaching. Original creator, co-founder, and CTO of ClickHouse Alexey Milovidov will walk us through the highlights of the release. -* [**ClickHouse Meetup in Amsterdam**](https://www.meetup.com/clickhouse-netherlands-user-group/events/296334590/) - Oct 31 -* [**ClickHouse Meetup in Beijing**](https://www.meetup.com/clickhouse-beijing-user-group/events/296334856/) - Nov 4 -* [**ClickHouse Meetup in San Francisco**](https://www.meetup.com/clickhouse-silicon-valley-meetup-group/events/296334923/) - Nov 8 +* [**ClickHouse Meetup in San Francisco**](https://www.meetup.com/clickhouse-silicon-valley-meetup-group/events/296334923/) - Nov 14 * [**ClickHouse Meetup in Singapore**](https://www.meetup.com/clickhouse-singapore-meetup-group/events/296334976/) - Nov 15 * [**ClickHouse Meetup in Berlin**](https://www.meetup.com/clickhouse-berlin-user-group/events/296488501/) - Nov 30 * [**ClickHouse Meetup in NYC**](https://www.meetup.com/clickhouse-new-york-user-group/events/296488779/) - Dec 11 @@ -35,7 +43,7 @@ Also, keep an eye out for upcoming meetups around the world. Somewhere else you ## Recent Recordings * **Recent Meetup Videos**: [Meetup Playlist](https://www.youtube.com/playlist?list=PL0Z2YDlm0b3iNDUzpY1S3L_iV4nARda_U) Whenever possible recordings of the ClickHouse Community Meetups are edited and presented as individual talks. Current featuring "Modern SQL in 2023", "Fast, Concurrent, and Consistent Asynchronous INSERTS in ClickHouse", and "Full-Text Indices: Design and Experiments" -* **Recording available**: [**v23.6 Release Webinar**](https://www.youtube.com/watch?v=cuf_hYn7dqU) All the features of 23.6, one convenient video! Watch it now! +* **Recording available**: [**v23.10 Release Webinar**](https://www.youtube.com/watch?v=PGQS6uPb970) All the features of 23.10, one convenient video! Watch it now! * **All release webinar recordings**: [YouTube playlist](https://www.youtube.com/playlist?list=PL0Z2YDlm0b3jAlSy1JxyP8zluvXaN3nxU) @@ -45,4 +53,4 @@ We are a globally diverse and distributed team, united behind a common goal of c Check out our **current openings** here: https://clickhouse.com/company/careers -Cant find what you are looking for, but want to let us know you are interested in joining ClickHouse? Email careers@clickhouse.com! +Can't find what you are looking for, but want to let us know you are interested in joining ClickHouse? Email careers@clickhouse.com! diff --git a/SECURITY.md b/SECURITY.md index 82b7254f8c1..5477628cee4 100644 --- a/SECURITY.md +++ b/SECURITY.md @@ -13,9 +13,10 @@ The following versions of ClickHouse server are currently being supported with s | Version | Supported | |:-|:-| +| 23.10 | ✔️ | | 23.9 | ✔️ | | 23.8 | ✔️ | -| 23.7 | ✔️ | +| 23.7 | ❌ | | 23.6 | ❌ | | 23.5 | ❌ | | 23.4 | ❌ | diff --git a/base/base/CMakeLists.txt b/base/base/CMakeLists.txt index 8ab3c8a0711..3886932d198 100644 --- a/base/base/CMakeLists.txt +++ b/base/base/CMakeLists.txt @@ -1,3 +1,5 @@ +add_compile_options($<$,$>:${COVERAGE_FLAGS}>) + if (USE_CLANG_TIDY) set (CMAKE_CXX_CLANG_TIDY "${CLANG_TIDY_PATH}") endif () @@ -45,6 +47,10 @@ else () target_compile_definitions(common PUBLIC WITH_COVERAGE=0) endif () +if (TARGET ch_contrib::crc32_s390x) + target_link_libraries(common PUBLIC ch_contrib::crc32_s390x) +endif() + target_include_directories(common PUBLIC .. "${CMAKE_CURRENT_BINARY_DIR}/..") target_link_libraries (common diff --git a/base/base/StringRef.h b/base/base/StringRef.h index 6456706fafe..150cd81e33c 100644 --- a/base/base/StringRef.h +++ b/base/base/StringRef.h @@ -35,6 +35,10 @@ #pragma clang diagnostic ignored "-Wreserved-identifier" #endif +#if defined(__s390x__) + #include + #define CRC_INT s390x_crc32c +#endif /** * The std::string_view-like container to avoid creating strings to find substrings in the hash table. @@ -264,8 +268,8 @@ inline size_t hashLessThan8(const char * data, size_t size) if (size >= 4) { - UInt64 a = unalignedLoad(data); - return hashLen16(size + (a << 3), unalignedLoad(data + size - 4)); + UInt64 a = unalignedLoadLittleEndian(data); + return hashLen16(size + (a << 3), unalignedLoadLittleEndian(data + size - 4)); } if (size > 0) @@ -285,8 +289,8 @@ inline size_t hashLessThan16(const char * data, size_t size) { if (size > 8) { - UInt64 a = unalignedLoad(data); - UInt64 b = unalignedLoad(data + size - 8); + UInt64 a = unalignedLoadLittleEndian(data); + UInt64 b = unalignedLoadLittleEndian(data + size - 8); return hashLen16(a, rotateByAtLeast1(b + size, static_cast(size))) ^ b; } @@ -315,13 +319,13 @@ struct CRC32Hash do { - UInt64 word = unalignedLoad(pos); + UInt64 word = unalignedLoadLittleEndian(pos); res = static_cast(CRC_INT(res, word)); pos += 8; } while (pos + 8 < end); - UInt64 word = unalignedLoad(end - 8); /// I'm not sure if this is normal. + UInt64 word = unalignedLoadLittleEndian(end - 8); /// I'm not sure if this is normal. res = static_cast(CRC_INT(res, word)); return res; diff --git a/base/base/coverage.cpp b/base/base/coverage.cpp index 1027638be3d..d70c3bcd82b 100644 --- a/base/base/coverage.cpp +++ b/base/base/coverage.cpp @@ -1,11 +1,15 @@ #include "coverage.h" -#if WITH_COVERAGE - #pragma GCC diagnostic ignored "-Wreserved-identifier" -# include -# include + +/// WITH_COVERAGE enables the default implementation of code coverage, +/// that dumps a map to the filesystem. + +#if WITH_COVERAGE + +#include +#include # if defined(__clang__) @@ -31,3 +35,131 @@ void dumpCoverageReportIfPossible() #endif } + + +/// SANITIZE_COVERAGE enables code instrumentation, +/// but leaves the callbacks implementation to us, +/// which we use to calculate coverage on a per-test basis +/// and to write it to system tables. + +#if defined(SANITIZE_COVERAGE) + +namespace +{ + bool pc_guards_initialized = false; + bool pc_table_initialized = false; + + uint32_t * guards_start = nullptr; + uint32_t * guards_end = nullptr; + + uintptr_t * coverage_array = nullptr; + size_t coverage_array_size = 0; + + uintptr_t * all_addresses_array = nullptr; + size_t all_addresses_array_size = 0; +} + +extern "C" +{ + +/// This is called at least once for every DSO for initialization. +/// But we will use it only for the main DSO. +void __sanitizer_cov_trace_pc_guard_init(uint32_t * start, uint32_t * stop) +{ + if (pc_guards_initialized) + return; + pc_guards_initialized = true; + + /// The function can be called multiple times, but we need to initialize only once. + if (start == stop || *start) + return; + + guards_start = start; + guards_end = stop; + coverage_array_size = stop - start; + + /// Note: we will leak this. + coverage_array = static_cast(malloc(sizeof(uintptr_t) * coverage_array_size)); + + resetCoverage(); +} + +/// This is called at least once for every DSO for initialization +/// and provides information about all instrumented addresses. +void __sanitizer_cov_pcs_init(const uintptr_t * pcs_begin, const uintptr_t * pcs_end) +{ + if (pc_table_initialized) + return; + pc_table_initialized = true; + + all_addresses_array = static_cast(malloc(sizeof(uintptr_t) * coverage_array_size)); + all_addresses_array_size = pcs_end - pcs_begin; + + /// They are not a real pointers, but also contain a flag in the most significant bit, + /// in which we are not interested for now. Reset it. + for (size_t i = 0; i < all_addresses_array_size; ++i) + all_addresses_array[i] = pcs_begin[i] & 0x7FFFFFFFFFFFFFFFULL; +} + +/// This is called at every basic block / edge, etc. +void __sanitizer_cov_trace_pc_guard(uint32_t * guard) +{ + /// Duplicate the guard check. + if (!*guard) + return; + *guard = 0; + + /// If you set *guard to 0 this code will not be called again for this edge. + /// Now we can get the PC and do whatever you want: + /// - store it somewhere or symbolize it and print right away. + /// The values of `*guard` are as you set them in + /// __sanitizer_cov_trace_pc_guard_init and so you can make them consecutive + /// and use them to dereference an array or a bit vector. + void * pc = __builtin_return_address(0); + + coverage_array[guard - guards_start] = reinterpret_cast(pc); +} + +} + +__attribute__((no_sanitize("coverage"))) +std::span getCoverage() +{ + return {coverage_array, coverage_array_size}; +} + +__attribute__((no_sanitize("coverage"))) +std::span getAllInstrumentedAddresses() +{ + return {all_addresses_array, all_addresses_array_size}; +} + +__attribute__((no_sanitize("coverage"))) +void resetCoverage() +{ + memset(coverage_array, 0, coverage_array_size * sizeof(*coverage_array)); + + /// The guard defines whether the __sanitizer_cov_trace_pc_guard should be called. + /// For example, you can unset it after first invocation to prevent excessive work. + /// Initially set all the guards to 1 to enable callbacks. + for (uint32_t * x = guards_start; x < guards_end; ++x) + *x = 1; +} + +#else + +std::span getCoverage() +{ + return {}; +} + +std::span getAllInstrumentedAddresses() +{ + return {}; +} + +void resetCoverage() +{ +} + +#endif diff --git a/base/base/coverage.h b/base/base/coverage.h index 4a57528b0ce..f75ed2d3553 100644 --- a/base/base/coverage.h +++ b/base/base/coverage.h @@ -1,5 +1,8 @@ #pragma once +#include +#include + /// Flush coverage report to file, depending on coverage system /// proposed by compiler (llvm for clang and gcov for gcc). /// @@ -7,3 +10,16 @@ /// Thread safe (use exclusive lock). /// Idempotent, may be called multiple times. void dumpCoverageReportIfPossible(); + +/// This is effective if SANITIZE_COVERAGE is enabled at build time. +/// Get accumulated unique program addresses of the instrumented parts of the code, +/// seen so far after program startup or after previous reset. +/// The returned span will be represented as a sparse map, containing mostly zeros, which you should filter away. +std::span getCoverage(); + +/// Get all instrumented addresses that could be in the coverage. +std::span getAllInstrumentedAddresses(); + +/// Reset the accumulated coverage. +/// This is useful to compare coverage of different tests, including differential coverage. +void resetCoverage(); diff --git a/base/base/crc32c_s390x.h b/base/base/crc32c_s390x.h new file mode 100644 index 00000000000..cbd285041d1 --- /dev/null +++ b/base/base/crc32c_s390x.h @@ -0,0 +1,26 @@ +#pragma once + +#include + +inline uint32_t s390x_crc32c_u8(uint32_t crc, uint8_t v) +{ + return crc32c_le_vx(crc, reinterpret_cast(&v), sizeof(v)); +} + +inline uint32_t s390x_crc32c_u16(uint32_t crc, uint16_t v) +{ + v = __builtin_bswap16(v); + return crc32c_le_vx(crc, reinterpret_cast(&v), sizeof(v)); +} + +inline uint32_t s390x_crc32c_u32(uint32_t crc, uint32_t v) +{ + v = __builtin_bswap32(v); + return crc32c_le_vx(crc, reinterpret_cast(&v), sizeof(v)); +} + +inline uint64_t s390x_crc32c(uint64_t crc, uint64_t v) +{ + v = __builtin_bswap64(v); + return crc32c_le_vx(static_cast(crc), reinterpret_cast(&v), sizeof(uint64_t)); +} diff --git a/base/base/defines.h b/base/base/defines.h index d852f6b9f63..02058a29096 100644 --- a/base/base/defines.h +++ b/base/base/defines.h @@ -119,17 +119,16 @@ #include namespace DB { - void abortOnFailedAssertion(const String & description); + [[noreturn]] void abortOnFailedAssertion(const String & description); } - #define chassert(x) static_cast(x) ? void(0) : ::DB::abortOnFailedAssertion(#x) + #define chassert(x) do { static_cast(x) ? void(0) : ::DB::abortOnFailedAssertion(#x); } while (0) #define UNREACHABLE() abort() // clang-format off #else /// Here sizeof() trick is used to suppress unused warning for result, /// since simple "(void)x" will evaluate the expression, while /// "sizeof(!(x))" will not. - #define NIL_EXPRESSION(x) (void)sizeof(!(x)) - #define chassert(x) NIL_EXPRESSION(x) + #define chassert(x) (void)sizeof(!(x)) #define UNREACHABLE() __builtin_unreachable() #endif #endif diff --git a/base/base/wide_integer_impl.h b/base/base/wide_integer_impl.h index fc4e9e551ca..c1fd7b69b7f 100644 --- a/base/base/wide_integer_impl.h +++ b/base/base/wide_integer_impl.h @@ -65,7 +65,7 @@ class IsTupleLike static void check(...); public: - static constexpr const bool value = !std::is_void(nullptr))>::value; + static constexpr const bool value = !std::is_void_v(nullptr))>; }; } @@ -79,7 +79,7 @@ class numeric_limits> { public: static constexpr bool is_specialized = true; - static constexpr bool is_signed = is_same::value; + static constexpr bool is_signed = is_same_v; static constexpr bool is_integer = true; static constexpr bool is_exact = true; static constexpr bool has_infinity = false; @@ -91,7 +91,7 @@ public: static constexpr bool is_iec559 = false; static constexpr bool is_bounded = true; static constexpr bool is_modulo = true; - static constexpr int digits = Bits - (is_same::value ? 1 : 0); + static constexpr int digits = Bits - (is_same_v ? 1 : 0); static constexpr int digits10 = digits * 0.30103 /*std::log10(2)*/; static constexpr int max_digits10 = 0; static constexpr int radix = 2; @@ -104,7 +104,7 @@ public: static constexpr wide::integer min() noexcept { - if (is_same::value) + if constexpr (is_same_v) { using T = wide::integer; T res{}; @@ -118,7 +118,7 @@ public: { using T = wide::integer; T res{}; - res.items[T::_impl::big(0)] = is_same::value + res.items[T::_impl::big(0)] = is_same_v ? std::numeric_limits::signed_base_type>::max() : std::numeric_limits::base_type>::max(); for (unsigned i = 1; i < wide::integer::_impl::item_count; ++i) diff --git a/base/glibc-compatibility/CMakeLists.txt b/base/glibc-compatibility/CMakeLists.txt index 0539f0c231d..c967fa5b11b 100644 --- a/base/glibc-compatibility/CMakeLists.txt +++ b/base/glibc-compatibility/CMakeLists.txt @@ -5,9 +5,6 @@ if (GLIBC_COMPATIBILITY) endif() enable_language(ASM) - include(CheckIncludeFile) - - check_include_file("sys/random.h" HAVE_SYS_RANDOM_H) add_headers_and_sources(glibc_compatibility .) add_headers_and_sources(glibc_compatibility musl) @@ -21,11 +18,6 @@ if (GLIBC_COMPATIBILITY) message (FATAL_ERROR "glibc_compatibility can only be used on x86_64 or aarch64.") endif () - list(REMOVE_ITEM glibc_compatibility_sources musl/getentropy.c) - if(HAVE_SYS_RANDOM_H) - list(APPEND glibc_compatibility_sources musl/getentropy.c) - endif() - # Need to omit frame pointers to match the performance of glibc set (CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -fomit-frame-pointer") @@ -43,12 +35,6 @@ if (GLIBC_COMPATIBILITY) target_link_libraries(global-libs INTERFACE glibc-compatibility ${MEMCPY_LIBRARY}) - install( - TARGETS glibc-compatibility ${MEMCPY_LIBRARY} - EXPORT global - ARCHIVE DESTINATION lib - ) - message (STATUS "Some symbols from glibc will be replaced for compatibility") elseif (CLICKHOUSE_OFFICIAL_BUILD) diff --git a/base/glibc-compatibility/memcpy/memcpy.cpp b/base/glibc-compatibility/memcpy/memcpy.cpp index ec43a2c3649..8bab35934d3 100644 --- a/base/glibc-compatibility/memcpy/memcpy.cpp +++ b/base/glibc-compatibility/memcpy/memcpy.cpp @@ -1,5 +1,6 @@ #include "memcpy.h" +__attribute__((no_sanitize("coverage"))) extern "C" void * memcpy(void * __restrict dst, const void * __restrict src, size_t size) { return inline_memcpy(dst, src, size); diff --git a/base/glibc-compatibility/memcpy/memcpy.h b/base/glibc-compatibility/memcpy/memcpy.h index 0930dfb5c67..86439dda061 100644 --- a/base/glibc-compatibility/memcpy/memcpy.h +++ b/base/glibc-compatibility/memcpy/memcpy.h @@ -93,7 +93,7 @@ * See https://habr.com/en/company/yandex/blog/457612/ */ - +__attribute__((no_sanitize("coverage"))) static inline void * inline_memcpy(void * __restrict dst_, const void * __restrict src_, size_t size) { /// We will use pointer arithmetic, so char pointer will be used. diff --git a/base/harmful/CMakeLists.txt b/base/harmful/CMakeLists.txt index 399f6ecc625..c19661875be 100644 --- a/base/harmful/CMakeLists.txt +++ b/base/harmful/CMakeLists.txt @@ -1,2 +1 @@ add_library(harmful harmful.c) -install(TARGETS harmful EXPORT global ARCHIVE DESTINATION lib) diff --git a/base/poco/Net/include/Poco/Net/HTTPClientSession.h b/base/poco/Net/include/Poco/Net/HTTPClientSession.h index 167a06eb7ff..7c0caa1c18b 100644 --- a/base/poco/Net/include/Poco/Net/HTTPClientSession.h +++ b/base/poco/Net/include/Poco/Net/HTTPClientSession.h @@ -68,7 +68,7 @@ namespace Net struct ProxyConfig /// HTTP proxy server configuration. { - ProxyConfig() : port(HTTP_PORT), protocol("http"), tunnel(true) { } + ProxyConfig() : port(HTTP_PORT), protocol("http"), tunnel(true), originalRequestProtocol("http") { } std::string host; /// Proxy server host name or IP address. @@ -87,6 +87,9 @@ namespace Net /// A regular expression defining hosts for which the proxy should be bypassed, /// e.g. "localhost|127\.0\.0\.1|192\.168\.0\.\d+". Can also be an empty /// string to disable proxy bypassing. + std::string originalRequestProtocol; + /// Original request protocol (http or https). + /// Required in the case of: HTTPS request over HTTP proxy with tunneling (CONNECT) off. }; HTTPClientSession(); diff --git a/base/poco/Net/src/HTTPClientSession.cpp b/base/poco/Net/src/HTTPClientSession.cpp index 2712c0c452e..2282cca682b 100644 --- a/base/poco/Net/src/HTTPClientSession.cpp +++ b/base/poco/Net/src/HTTPClientSession.cpp @@ -418,7 +418,7 @@ void HTTPClientSession::reconnect() std::string HTTPClientSession::proxyRequestPrefix() const { - std::string result("http://"); + std::string result(_proxyConfig.originalRequestProtocol + "://"); result.append(_host); /// Do not append default by default, since this may break some servers. /// One example of such server is GCS (Google Cloud Storage). diff --git a/base/poco/Net/src/HTTPServerSession.cpp b/base/poco/Net/src/HTTPServerSession.cpp index f6d3c4e5b92..d4f2b24879e 100644 --- a/base/poco/Net/src/HTTPServerSession.cpp +++ b/base/poco/Net/src/HTTPServerSession.cpp @@ -26,7 +26,6 @@ HTTPServerSession::HTTPServerSession(const StreamSocket& socket, HTTPServerParam _maxKeepAliveRequests(pParams->getMaxKeepAliveRequests()) { setTimeout(pParams->getTimeout()); - this->socket().setReceiveTimeout(pParams->getTimeout()); } diff --git a/base/poco/Net/src/HTTPSession.cpp b/base/poco/Net/src/HTTPSession.cpp index d2663baaf9f..8f951b3102c 100644 --- a/base/poco/Net/src/HTTPSession.cpp +++ b/base/poco/Net/src/HTTPSession.cpp @@ -93,9 +93,34 @@ void HTTPSession::setTimeout(const Poco::Timespan& timeout) void HTTPSession::setTimeout(const Poco::Timespan& connectionTimeout, const Poco::Timespan& sendTimeout, const Poco::Timespan& receiveTimeout) { - _connectionTimeout = connectionTimeout; - _sendTimeout = sendTimeout; - _receiveTimeout = receiveTimeout; + try + { + _connectionTimeout = connectionTimeout; + + if (_sendTimeout.totalMicroseconds() != sendTimeout.totalMicroseconds()) { + _sendTimeout = sendTimeout; + + if (connected()) + _socket.setSendTimeout(_sendTimeout); + } + + if (_receiveTimeout.totalMicroseconds() != receiveTimeout.totalMicroseconds()) { + _receiveTimeout = receiveTimeout; + + if (connected()) + _socket.setReceiveTimeout(_receiveTimeout); + } + } + catch (NetException &) + { +#ifndef NDEBUG + throw; +#else + // mute exceptions in release + // just in case when changing settings on socket is not allowed + // however it should be OK for timeouts +#endif + } } diff --git a/cmake/add_check.cmake b/cmake/add_check.cmake deleted file mode 100644 index ba30ee8676f..00000000000 --- a/cmake/add_check.cmake +++ /dev/null @@ -1,19 +0,0 @@ -# Adding test output on failure -enable_testing () - -if (NOT TARGET check) - if (CMAKE_CONFIGURATION_TYPES) - add_custom_target (check COMMAND ${CMAKE_CTEST_COMMAND} - --force-new-ctest-process --output-on-failure --build-config "$" - WORKING_DIRECTORY ${PROJECT_BINARY_DIR}) - else () - add_custom_target (check COMMAND ${CMAKE_CTEST_COMMAND} - --force-new-ctest-process --output-on-failure - WORKING_DIRECTORY ${PROJECT_BINARY_DIR}) - endif () -endif () - -macro (add_check target) - add_test (NAME test_${target} COMMAND ${target} WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}) - add_dependencies (check ${target}) -endmacro (add_check) diff --git a/cmake/autogenerated_versions.txt b/cmake/autogenerated_versions.txt index 2f6d43d6cd0..1e0a82a1403 100644 --- a/cmake/autogenerated_versions.txt +++ b/cmake/autogenerated_versions.txt @@ -2,11 +2,11 @@ # NOTE: has nothing common with DBMS_TCP_PROTOCOL_VERSION, # only DBMS_TCP_PROTOCOL_VERSION should be incremented on protocol changes. -SET(VERSION_REVISION 54479) +SET(VERSION_REVISION 54480) SET(VERSION_MAJOR 23) -SET(VERSION_MINOR 10) +SET(VERSION_MINOR 11) SET(VERSION_PATCH 1) -SET(VERSION_GITHASH 8f9a227de1f530cdbda52c145d41a6b0f1d29961) -SET(VERSION_DESCRIBE v23.10.1.1-testing) -SET(VERSION_STRING 23.10.1.1) +SET(VERSION_GITHASH 13adae0e42fd48de600486fc5d4b64d39f80c43e) +SET(VERSION_DESCRIBE v23.11.1.1-testing) +SET(VERSION_STRING 23.11.1.1) # end of autochange diff --git a/cmake/ccache.cmake b/cmake/ccache.cmake index e8bf856332a..0df70d82d2c 100644 --- a/cmake/ccache.cmake +++ b/cmake/ccache.cmake @@ -9,10 +9,10 @@ if (CMAKE_CXX_COMPILER_LAUNCHER MATCHES "ccache" OR CMAKE_C_COMPILER_LAUNCHER MA return() endif() -set(COMPILER_CACHE "auto" CACHE STRING "Speedup re-compilations using the caching tools; valid options are 'auto' (ccache, then sccache), 'ccache', 'sccache', or 'disabled'") +set(COMPILER_CACHE "auto" CACHE STRING "Speedup re-compilations using the caching tools; valid options are 'auto' (sccache, then ccache), 'ccache', 'sccache', or 'disabled'") if(COMPILER_CACHE STREQUAL "auto") - find_program (CCACHE_EXECUTABLE NAMES ccache sccache) + find_program (CCACHE_EXECUTABLE NAMES sccache ccache) elseif (COMPILER_CACHE STREQUAL "ccache") find_program (CCACHE_EXECUTABLE ccache) elseif(COMPILER_CACHE STREQUAL "sccache") @@ -21,7 +21,7 @@ elseif(COMPILER_CACHE STREQUAL "disabled") message(STATUS "Using *ccache: no (disabled via configuration)") return() else() - message(${RECONFIGURE_MESSAGE_LEVEL} "The COMPILER_CACHE must be one of (auto|ccache|sccache|disabled), value: '${COMPILER_CACHE}'") + message(${RECONFIGURE_MESSAGE_LEVEL} "The COMPILER_CACHE must be one of (auto|sccache|ccache|disabled), value: '${COMPILER_CACHE}'") endif() diff --git a/cmake/cpu_features.cmake b/cmake/cpu_features.cmake index 9fc3960c166..cfa9c314bc0 100644 --- a/cmake/cpu_features.cmake +++ b/cmake/cpu_features.cmake @@ -1,10 +1,5 @@ # https://software.intel.com/sites/landingpage/IntrinsicsGuide/ -include (CheckCXXSourceCompiles) -include (CMakePushCheckState) - -cmake_push_check_state () - # The variables HAVE_* determine if compiler has support for the flag to use the corresponding instruction set. # The options ENABLE_* determine if we will tell compiler to actually use the corresponding instruction set if compiler can do it. @@ -137,189 +132,54 @@ elseif (ARCH_AMD64) endif() # ClickHouse can be cross-compiled (e.g. on an ARM host for x86) but it is also possible to build ClickHouse on x86 w/o AVX for x86 w/ - # AVX. We only check that the compiler can emit certain SIMD instructions, we don't care if the host system is able to run the binary. - # Therefore, use check_cxx_source_compiles (= does the code compile+link?) instead of check_cxx_source_runs (= does the code - # compile+link+run). + # AVX. We only assume that the compiler can emit certain SIMD instructions, we don't care if the host system is able to run the binary. - set (TEST_FLAG "-mssse3") - set (CMAKE_REQUIRED_FLAGS "${TEST_FLAG} -O0") - check_cxx_source_compiles(" - #include - int main() { - __m64 a = _mm_abs_pi8(__m64()); - (void)a; - return 0; - } - " HAVE_SSSE3) - if (HAVE_SSSE3 AND ENABLE_SSSE3) - set (COMPILER_FLAGS "${COMPILER_FLAGS} ${TEST_FLAG}") + if (ENABLE_SSSE3) + set (COMPILER_FLAGS "${COMPILER_FLAGS} -mssse3") endif () - set (TEST_FLAG "-msse4.1") - set (CMAKE_REQUIRED_FLAGS "${TEST_FLAG} -O0") - check_cxx_source_compiles(" - #include - int main() { - auto a = _mm_insert_epi8(__m128i(), 0, 0); - (void)a; - return 0; - } - " HAVE_SSE41) - if (HAVE_SSE41 AND ENABLE_SSE41) - set (COMPILER_FLAGS "${COMPILER_FLAGS} ${TEST_FLAG}") + if (ENABLE_SSE41) + set (COMPILER_FLAGS "${COMPILER_FLAGS} -msse4.1") endif () - set (TEST_FLAG "-msse4.2") - set (CMAKE_REQUIRED_FLAGS "${TEST_FLAG} -O0") - check_cxx_source_compiles(" - #include - int main() { - auto a = _mm_crc32_u64(0, 0); - (void)a; - return 0; - } - " HAVE_SSE42) - if (HAVE_SSE42 AND ENABLE_SSE42) - set (COMPILER_FLAGS "${COMPILER_FLAGS} ${TEST_FLAG}") + if (ENABLE_SSE42) + set (COMPILER_FLAGS "${COMPILER_FLAGS} -msse4.2") endif () - set (TEST_FLAG "-mpclmul") - set (CMAKE_REQUIRED_FLAGS "${TEST_FLAG} -O0") - check_cxx_source_compiles(" - #include - int main() { - auto a = _mm_clmulepi64_si128(__m128i(), __m128i(), 0); - (void)a; - return 0; - } - " HAVE_PCLMULQDQ) - if (HAVE_PCLMULQDQ AND ENABLE_PCLMULQDQ) - set (COMPILER_FLAGS "${COMPILER_FLAGS} ${TEST_FLAG}") + if (ENABLE_PCLMULQDQ) + set (COMPILER_FLAGS "${COMPILER_FLAGS} -mpclmul") endif () - set (TEST_FLAG "-mpopcnt") - set (CMAKE_REQUIRED_FLAGS "${TEST_FLAG} -O0") - check_cxx_source_compiles(" - int main() { - auto a = __builtin_popcountll(0); - (void)a; - return 0; - } - " HAVE_POPCNT) - if (HAVE_POPCNT AND ENABLE_POPCNT) - set (COMPILER_FLAGS "${COMPILER_FLAGS} ${TEST_FLAG}") + if (ENABLE_BMI) + set (COMPILER_FLAGS "${COMPILER_FLAGS} -mbmi") endif () - set (TEST_FLAG "-mavx") - set (CMAKE_REQUIRED_FLAGS "${TEST_FLAG} -O0") - check_cxx_source_compiles(" - #include - int main() { - auto a = _mm256_insert_epi8(__m256i(), 0, 0); - (void)a; - return 0; - } - " HAVE_AVX) - if (HAVE_AVX AND ENABLE_AVX) - set (COMPILER_FLAGS "${COMPILER_FLAGS} ${TEST_FLAG}") + if (ENABLE_POPCNT) + set (COMPILER_FLAGS "${COMPILER_FLAGS} -mpopcnt") endif () - set (TEST_FLAG "-mavx2") - set (CMAKE_REQUIRED_FLAGS "${TEST_FLAG} -O0") - check_cxx_source_compiles(" - #include - int main() { - auto a = _mm256_add_epi16(__m256i(), __m256i()); - (void)a; - return 0; - } - " HAVE_AVX2) - if (HAVE_AVX2 AND ENABLE_AVX2) - set (COMPILER_FLAGS "${COMPILER_FLAGS} ${TEST_FLAG}") + if (ENABLE_AVX) + set (COMPILER_FLAGS "${COMPILER_FLAGS} -mavx") endif () - set (TEST_FLAG "-mavx512f -mavx512bw -mavx512vl") - set (CMAKE_REQUIRED_FLAGS "${TEST_FLAG} -O0") - check_cxx_source_compiles(" - #include - int main() { - auto a = _mm512_setzero_epi32(); - (void)a; - auto b = _mm512_add_epi16(__m512i(), __m512i()); - (void)b; - auto c = _mm_cmp_epi8_mask(__m128i(), __m128i(), 0); - (void)c; - return 0; - } - " HAVE_AVX512) - if (HAVE_AVX512 AND ENABLE_AVX512) - set (COMPILER_FLAGS "${COMPILER_FLAGS} ${TEST_FLAG}") - endif () - - set (TEST_FLAG "-mavx512vbmi") - set (CMAKE_REQUIRED_FLAGS "${TEST_FLAG} -O0") - check_cxx_source_compiles(" - #include - int main() { - auto a = _mm512_permutexvar_epi8(__m512i(), __m512i()); - (void)a; - return 0; - } - " HAVE_AVX512_VBMI) - if (HAVE_AVX512 AND ENABLE_AVX512 AND HAVE_AVX512_VBMI AND ENABLE_AVX512_VBMI) - set (COMPILER_FLAGS "${COMPILER_FLAGS} ${TEST_FLAG}") - endif () - - set (TEST_FLAG "-mbmi") - set (CMAKE_REQUIRED_FLAGS "${TEST_FLAG} -O0") - check_cxx_source_compiles(" - #include - int main() { - auto a = _blsr_u32(0); - (void)a; - return 0; - } - " HAVE_BMI) - if (HAVE_BMI AND ENABLE_BMI) - set (COMPILER_FLAGS "${COMPILER_FLAGS} ${TEST_FLAG}") - endif () - - set (TEST_FLAG "-mbmi2") - set (CMAKE_REQUIRED_FLAGS "${TEST_FLAG} -O0") - check_cxx_source_compiles(" - #include - int main() { - auto a = _pdep_u64(0, 0); - (void)a; - return 0; - } - " HAVE_BMI2) - if (HAVE_BMI2 AND HAVE_AVX2 AND ENABLE_AVX2 AND ENABLE_BMI2) - set (COMPILER_FLAGS "${COMPILER_FLAGS} ${TEST_FLAG}") - endif () - - # Limit avx2/avx512 flag for specific source build - set (X86_INTRINSICS_FLAGS "") - if (ENABLE_AVX2_FOR_SPEC_OP) - if (HAVE_BMI) - set (X86_INTRINSICS_FLAGS "${X86_INTRINSICS_FLAGS} -mbmi") + if (ENABLE_AVX2) + set (COMPILER_FLAGS "${COMPILER_FLAGS} -mavx2") + if (ENABLE_BMI2) + set (COMPILER_FLAGS "${COMPILER_FLAGS} -mbmi2") endif () - if (HAVE_AVX AND HAVE_AVX2) - set (X86_INTRINSICS_FLAGS "${X86_INTRINSICS_FLAGS} -mavx -mavx2") + endif () + + if (ENABLE_AVX512) + set (COMPILER_FLAGS "${COMPILER_FLAGS} -mavx512f -mavx512bw -mavx512vl") + if (ENABLE_AVX512_VBMI) + set (COMPILER_FLAGS "${COMPILER_FLAGS} -mavx512vbmi") endif () endif () if (ENABLE_AVX512_FOR_SPEC_OP) - set (X86_INTRINSICS_FLAGS "") - if (HAVE_BMI) - set (X86_INTRINSICS_FLAGS "${X86_INTRINSICS_FLAGS} -mbmi") - endif () - if (HAVE_AVX512) - set (X86_INTRINSICS_FLAGS "${X86_INTRINSICS_FLAGS} -mavx512f -mavx512bw -mavx512vl -mprefer-vector-width=256") - endif () + set (X86_INTRINSICS_FLAGS "-mbmi -mavx512f -mavx512bw -mavx512vl -mprefer-vector-width=256") endif () + else () # RISC-V + exotic platforms endif () - -cmake_pop_check_state () diff --git a/cmake/darwin/default_libs.cmake b/cmake/darwin/default_libs.cmake index 42b8473cb75..cf0210d9b45 100644 --- a/cmake/darwin/default_libs.cmake +++ b/cmake/darwin/default_libs.cmake @@ -22,9 +22,3 @@ link_libraries(global-group) target_link_libraries(global-group INTERFACE $ ) - -# FIXME: remove when all contribs will get custom cmake lists -install( - TARGETS global-group global-libs - EXPORT global -) diff --git a/cmake/darwin/toolchain-aarch64.cmake b/cmake/darwin/toolchain-aarch64.cmake index 569b02bb642..178153c1098 100644 --- a/cmake/darwin/toolchain-aarch64.cmake +++ b/cmake/darwin/toolchain-aarch64.cmake @@ -9,9 +9,3 @@ set (CMAKE_ASM_COMPILER_TARGET "aarch64-apple-darwin") set (CMAKE_OSX_SYSROOT "${CMAKE_CURRENT_LIST_DIR}/../toolchain/darwin-aarch64") set (CMAKE_TRY_COMPILE_TARGET_TYPE STATIC_LIBRARY) # disable linkage check - it doesn't work in CMake - -set (HAS_PRE_1970_EXITCODE "0" CACHE STRING "Result from TRY_RUN" FORCE) -set (HAS_PRE_1970_EXITCODE__TRYRUN_OUTPUT "" CACHE STRING "Output from TRY_RUN" FORCE) - -set (HAS_POST_2038_EXITCODE "0" CACHE STRING "Result from TRY_RUN" FORCE) -set (HAS_POST_2038_EXITCODE__TRYRUN_OUTPUT "" CACHE STRING "Output from TRY_RUN" FORCE) diff --git a/cmake/darwin/toolchain-x86_64.cmake b/cmake/darwin/toolchain-x86_64.cmake index c4527d2fc0d..b9cbe72a2b6 100644 --- a/cmake/darwin/toolchain-x86_64.cmake +++ b/cmake/darwin/toolchain-x86_64.cmake @@ -9,9 +9,3 @@ set (CMAKE_ASM_COMPILER_TARGET "x86_64-apple-darwin") set (CMAKE_OSX_SYSROOT "${CMAKE_CURRENT_LIST_DIR}/../toolchain/darwin-x86_64") set (CMAKE_TRY_COMPILE_TARGET_TYPE STATIC_LIBRARY) # disable linkage check - it doesn't work in CMake - -set (HAS_PRE_1970_EXITCODE "0" CACHE STRING "Result from TRY_RUN" FORCE) -set (HAS_PRE_1970_EXITCODE__TRYRUN_OUTPUT "" CACHE STRING "Output from TRY_RUN" FORCE) - -set (HAS_POST_2038_EXITCODE "0" CACHE STRING "Result from TRY_RUN" FORCE) -set (HAS_POST_2038_EXITCODE__TRYRUN_OUTPUT "" CACHE STRING "Output from TRY_RUN" FORCE) diff --git a/cmake/freebsd/default_libs.cmake b/cmake/freebsd/default_libs.cmake index 65bf296ee09..1eeb1a872bd 100644 --- a/cmake/freebsd/default_libs.cmake +++ b/cmake/freebsd/default_libs.cmake @@ -25,9 +25,3 @@ link_libraries(global-group) target_link_libraries(global-group INTERFACE $ ) - -# FIXME: remove when all contribs will get custom cmake lists -install( - TARGETS global-group global-libs - EXPORT global -) diff --git a/cmake/freebsd/toolchain-aarch64.cmake b/cmake/freebsd/toolchain-aarch64.cmake index 8a8da00f3be..53b7856ed03 100644 --- a/cmake/freebsd/toolchain-aarch64.cmake +++ b/cmake/freebsd/toolchain-aarch64.cmake @@ -9,13 +9,3 @@ set (CMAKE_ASM_COMPILER_TARGET "aarch64-unknown-freebsd12") set (CMAKE_SYSROOT "${CMAKE_CURRENT_LIST_DIR}/../../contrib/sysroot/freebsd-aarch64") set (CMAKE_TRY_COMPILE_TARGET_TYPE STATIC_LIBRARY) # disable linkage check - it doesn't work in CMake - -# Will be changed later, but somehow needed to be set here. -set (CMAKE_AR "ar") -set (CMAKE_RANLIB "ranlib") - -set (HAS_PRE_1970_EXITCODE "0" CACHE STRING "Result from TRY_RUN" FORCE) -set (HAS_PRE_1970_EXITCODE__TRYRUN_OUTPUT "" CACHE STRING "Output from TRY_RUN" FORCE) - -set (HAS_POST_2038_EXITCODE "0" CACHE STRING "Result from TRY_RUN" FORCE) -set (HAS_POST_2038_EXITCODE__TRYRUN_OUTPUT "" CACHE STRING "Output from TRY_RUN" FORCE) diff --git a/cmake/freebsd/toolchain-ppc64le.cmake b/cmake/freebsd/toolchain-ppc64le.cmake index c3f6594204d..bb23f0fbafc 100644 --- a/cmake/freebsd/toolchain-ppc64le.cmake +++ b/cmake/freebsd/toolchain-ppc64le.cmake @@ -9,13 +9,3 @@ set (CMAKE_ASM_COMPILER_TARGET "powerpc64le-unknown-freebsd13") set (CMAKE_SYSROOT "${CMAKE_CURRENT_LIST_DIR}/../../contrib/sysroot/freebsd-ppc64le") set (CMAKE_TRY_COMPILE_TARGET_TYPE STATIC_LIBRARY) # disable linkage check - it doesn't work in CMake - -# Will be changed later, but somehow needed to be set here. -set (CMAKE_AR "ar") -set (CMAKE_RANLIB "ranlib") - -set (HAS_PRE_1970_EXITCODE "0" CACHE STRING "Result from TRY_RUN" FORCE) -set (HAS_PRE_1970_EXITCODE__TRYRUN_OUTPUT "" CACHE STRING "Output from TRY_RUN" FORCE) - -set (HAS_POST_2038_EXITCODE "0" CACHE STRING "Result from TRY_RUN" FORCE) -set (HAS_POST_2038_EXITCODE__TRYRUN_OUTPUT "" CACHE STRING "Output from TRY_RUN" FORCE) diff --git a/cmake/freebsd/toolchain-x86_64.cmake b/cmake/freebsd/toolchain-x86_64.cmake index 460de6a7d39..4635880b4a6 100644 --- a/cmake/freebsd/toolchain-x86_64.cmake +++ b/cmake/freebsd/toolchain-x86_64.cmake @@ -9,13 +9,3 @@ set (CMAKE_ASM_COMPILER_TARGET "x86_64-pc-freebsd11") set (CMAKE_SYSROOT "${CMAKE_CURRENT_LIST_DIR}/../../contrib/sysroot/freebsd-x86_64") set (CMAKE_TRY_COMPILE_TARGET_TYPE STATIC_LIBRARY) # disable linkage check - it doesn't work in CMake - -# Will be changed later, but somehow needed to be set here. -set (CMAKE_AR "ar") -set (CMAKE_RANLIB "ranlib") - -set (HAS_PRE_1970_EXITCODE "0" CACHE STRING "Result from TRY_RUN" FORCE) -set (HAS_PRE_1970_EXITCODE__TRYRUN_OUTPUT "" CACHE STRING "Output from TRY_RUN" FORCE) - -set (HAS_POST_2038_EXITCODE "0" CACHE STRING "Result from TRY_RUN" FORCE) -set (HAS_POST_2038_EXITCODE__TRYRUN_OUTPUT "" CACHE STRING "Output from TRY_RUN" FORCE) diff --git a/cmake/fuzzer.cmake b/cmake/fuzzer.cmake index 52f301ab8ad..dd0c4b080fe 100644 --- a/cmake/fuzzer.cmake +++ b/cmake/fuzzer.cmake @@ -4,8 +4,8 @@ if (FUZZER) # NOTE: Eldar Zaitov decided to name it "libfuzzer" instead of "fuzzer" to keep in mind another possible fuzzer backends. # NOTE: no-link means that all the targets are built with instrumentation for fuzzer, but only some of them # (tests) have entry point for fuzzer and it's not checked. - set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${SAN_FLAGS} -fsanitize=fuzzer-no-link") - set (CMAKE_C_FLAGS "${CMAKE_C_FLAGS} ${SAN_FLAGS} -fsanitize=fuzzer-no-link") + set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${SAN_FLAGS} -fsanitize=fuzzer-no-link -DFUZZER=1") + set (CMAKE_C_FLAGS "${CMAKE_C_FLAGS} ${SAN_FLAGS} -fsanitize=fuzzer-no-link -DFUZZER=1") # NOTE: oss-fuzz can change LIB_FUZZING_ENGINE variable if (NOT LIB_FUZZING_ENGINE) diff --git a/cmake/limit_jobs.cmake b/cmake/limit_jobs.cmake index 28ccb62e10c..8e48fc9b9d8 100644 --- a/cmake/limit_jobs.cmake +++ b/cmake/limit_jobs.cmake @@ -21,7 +21,7 @@ if (NOT PARALLEL_COMPILE_JOBS AND MAX_COMPILER_MEMORY) set (PARALLEL_COMPILE_JOBS 1) endif () if (PARALLEL_COMPILE_JOBS LESS NUMBER_OF_LOGICAL_CORES) - message(WARNING "The auto-calculated compile jobs limit (${PARALLEL_COMPILE_JOBS}) underutilizes CPU cores (${NUMBER_OF_LOGICAL_CORES}). Set PARALLEL_COMPILE_JOBS to override.") + message("The auto-calculated compile jobs limit (${PARALLEL_COMPILE_JOBS}) underutilizes CPU cores (${NUMBER_OF_LOGICAL_CORES}). Set PARALLEL_COMPILE_JOBS to override.") endif() endif () @@ -32,7 +32,7 @@ if (NOT PARALLEL_LINK_JOBS AND MAX_LINKER_MEMORY) set (PARALLEL_LINK_JOBS 1) endif () if (PARALLEL_LINK_JOBS LESS NUMBER_OF_LOGICAL_CORES) - message(WARNING "The auto-calculated link jobs limit (${PARALLEL_LINK_JOBS}) underutilizes CPU cores (${NUMBER_OF_LOGICAL_CORES}). Set PARALLEL_LINK_JOBS to override.") + message("The auto-calculated link jobs limit (${PARALLEL_LINK_JOBS}) underutilizes CPU cores (${NUMBER_OF_LOGICAL_CORES}). Set PARALLEL_LINK_JOBS to override.") endif() endif () diff --git a/cmake/linux/default_libs.cmake b/cmake/linux/default_libs.cmake index 56a663a708e..8552097fa57 100644 --- a/cmake/linux/default_libs.cmake +++ b/cmake/linux/default_libs.cmake @@ -50,9 +50,3 @@ target_link_libraries(global-group INTERFACE $ -Wl,--end-group ) - -# FIXME: remove when all contribs will get custom cmake lists -install( - TARGETS global-group global-libs - EXPORT global -) diff --git a/cmake/linux/toolchain-aarch64.cmake b/cmake/linux/toolchain-aarch64.cmake index 2dedef8859f..b80cc01296d 100644 --- a/cmake/linux/toolchain-aarch64.cmake +++ b/cmake/linux/toolchain-aarch64.cmake @@ -9,10 +9,6 @@ set (CMAKE_C_COMPILER_TARGET "aarch64-linux-gnu") set (CMAKE_CXX_COMPILER_TARGET "aarch64-linux-gnu") set (CMAKE_ASM_COMPILER_TARGET "aarch64-linux-gnu") -# Will be changed later, but somehow needed to be set here. -set (CMAKE_AR "ar") -set (CMAKE_RANLIB "ranlib") - set (TOOLCHAIN_PATH "${CMAKE_CURRENT_LIST_DIR}/../../contrib/sysroot/linux-aarch64") set (CMAKE_SYSROOT "${TOOLCHAIN_PATH}/aarch64-linux-gnu/libc") @@ -20,9 +16,3 @@ set (CMAKE_SYSROOT "${TOOLCHAIN_PATH}/aarch64-linux-gnu/libc") set (CMAKE_C_FLAGS "${CMAKE_C_FLAGS} --gcc-toolchain=${TOOLCHAIN_PATH}") set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} --gcc-toolchain=${TOOLCHAIN_PATH}") set (CMAKE_ASM_FLAGS "${CMAKE_ASM_FLAGS} --gcc-toolchain=${TOOLCHAIN_PATH}") - -set (HAS_PRE_1970_EXITCODE "0" CACHE STRING "Result from TRY_RUN" FORCE) -set (HAS_PRE_1970_EXITCODE__TRYRUN_OUTPUT "" CACHE STRING "Output from TRY_RUN" FORCE) - -set (HAS_POST_2038_EXITCODE "0" CACHE STRING "Result from TRY_RUN" FORCE) -set (HAS_POST_2038_EXITCODE__TRYRUN_OUTPUT "" CACHE STRING "Output from TRY_RUN" FORCE) diff --git a/cmake/linux/toolchain-ppc64le.cmake b/cmake/linux/toolchain-ppc64le.cmake index c46ea954b71..98e8f7e8489 100644 --- a/cmake/linux/toolchain-ppc64le.cmake +++ b/cmake/linux/toolchain-ppc64le.cmake @@ -9,10 +9,6 @@ set (CMAKE_C_COMPILER_TARGET "powerpc64le-linux-gnu") set (CMAKE_CXX_COMPILER_TARGET "powerpc64le-linux-gnu") set (CMAKE_ASM_COMPILER_TARGET "powerpc64le-linux-gnu") -# Will be changed later, but somehow needed to be set here. -set (CMAKE_AR "ar") -set (CMAKE_RANLIB "ranlib") - set (TOOLCHAIN_PATH "${CMAKE_CURRENT_LIST_DIR}/../../contrib/sysroot/linux-powerpc64le") set (CMAKE_SYSROOT "${TOOLCHAIN_PATH}/powerpc64le-linux-gnu/libc") @@ -20,9 +16,3 @@ set (CMAKE_SYSROOT "${TOOLCHAIN_PATH}/powerpc64le-linux-gnu/libc") set (CMAKE_C_FLAGS "${CMAKE_C_FLAGS} --gcc-toolchain=${TOOLCHAIN_PATH}") set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} --gcc-toolchain=${TOOLCHAIN_PATH}") set (CMAKE_ASM_FLAGS "${CMAKE_ASM_FLAGS} --gcc-toolchain=${TOOLCHAIN_PATH}") - -set (HAS_PRE_1970_EXITCODE "0" CACHE STRING "Result from TRY_RUN" FORCE) -set (HAS_PRE_1970_EXITCODE__TRYRUN_OUTPUT "" CACHE STRING "Output from TRY_RUN" FORCE) - -set (HAS_POST_2038_EXITCODE "0" CACHE STRING "Result from TRY_RUN" FORCE) -set (HAS_POST_2038_EXITCODE__TRYRUN_OUTPUT "" CACHE STRING "Output from TRY_RUN" FORCE) diff --git a/cmake/linux/toolchain-riscv64.cmake b/cmake/linux/toolchain-riscv64.cmake index 7f876f88d72..ae5a38f08eb 100644 --- a/cmake/linux/toolchain-riscv64.cmake +++ b/cmake/linux/toolchain-riscv64.cmake @@ -9,10 +9,6 @@ set (CMAKE_C_COMPILER_TARGET "riscv64-linux-gnu") set (CMAKE_CXX_COMPILER_TARGET "riscv64-linux-gnu") set (CMAKE_ASM_COMPILER_TARGET "riscv64-linux-gnu") -# Will be changed later, but somehow needed to be set here. -set (CMAKE_AR "ar") -set (CMAKE_RANLIB "ranlib") - set (TOOLCHAIN_PATH "${CMAKE_CURRENT_LIST_DIR}/../../contrib/sysroot/linux-riscv64") set (CMAKE_SYSROOT "${TOOLCHAIN_PATH}") @@ -27,9 +23,3 @@ set (CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -fuse-ld=bfd") # ld.lld: error: section size decrease is too large # But GNU BinUtils work. set (LINKER_NAME "riscv64-linux-gnu-ld.bfd" CACHE STRING "Linker name" FORCE) - -set (HAS_PRE_1970_EXITCODE "0" CACHE STRING "Result from TRY_RUN" FORCE) -set (HAS_PRE_1970_EXITCODE__TRYRUN_OUTPUT "" CACHE STRING "Output from TRY_RUN" FORCE) - -set (HAS_POST_2038_EXITCODE "0" CACHE STRING "Result from TRY_RUN" FORCE) -set (HAS_POST_2038_EXITCODE__TRYRUN_OUTPUT "" CACHE STRING "Output from TRY_RUN" FORCE) diff --git a/cmake/linux/toolchain-s390x.cmake b/cmake/linux/toolchain-s390x.cmake index 945eb9affa4..d34329fb3bb 100644 --- a/cmake/linux/toolchain-s390x.cmake +++ b/cmake/linux/toolchain-s390x.cmake @@ -9,10 +9,6 @@ set (CMAKE_C_COMPILER_TARGET "s390x-linux-gnu") set (CMAKE_CXX_COMPILER_TARGET "s390x-linux-gnu") set (CMAKE_ASM_COMPILER_TARGET "s390x-linux-gnu") -# Will be changed later, but somehow needed to be set here. -set (CMAKE_AR "ar") -set (CMAKE_RANLIB "ranlib") - set (TOOLCHAIN_PATH "${CMAKE_CURRENT_LIST_DIR}/../../contrib/sysroot/linux-s390x") set (CMAKE_SYSROOT "${TOOLCHAIN_PATH}/s390x-linux-gnu/libc") @@ -23,9 +19,3 @@ set (CMAKE_ASM_FLAGS "${CMAKE_ASM_FLAGS} --gcc-toolchain=${TOOLCHAIN_PATH}") set (CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -fuse-ld=mold -Wl,-L${CMAKE_SYSROOT}/usr/lib64") set (CMAKE_MODULE_LINKER_FLAGS "${CMAKE_MODULE_LINKER_FLAGS} -fuse-ld=mold -Wl,-L${CMAKE_SYSROOT}/usr/lib64") set (CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} -fuse-ld=mold -Wl,-L${CMAKE_SYSROOT}/usr/lib64") - -set (HAS_PRE_1970_EXITCODE "0" CACHE STRING "Result from TRY_RUN" FORCE) -set (HAS_PRE_1970_EXITCODE__TRYRUN_OUTPUT "" CACHE STRING "Output from TRY_RUN" FORCE) - -set (HAS_POST_2038_EXITCODE "0" CACHE STRING "Result from TRY_RUN" FORCE) -set (HAS_POST_2038_EXITCODE__TRYRUN_OUTPUT "" CACHE STRING "Output from TRY_RUN" FORCE) diff --git a/cmake/linux/toolchain-x86_64-musl.cmake b/cmake/linux/toolchain-x86_64-musl.cmake index bc327e5ac25..fa7b3eaf0d1 100644 --- a/cmake/linux/toolchain-x86_64-musl.cmake +++ b/cmake/linux/toolchain-x86_64-musl.cmake @@ -9,10 +9,6 @@ set (CMAKE_C_COMPILER_TARGET "x86_64-linux-musl") set (CMAKE_CXX_COMPILER_TARGET "x86_64-linux-musl") set (CMAKE_ASM_COMPILER_TARGET "x86_64-linux-musl") -# Will be changed later, but somehow needed to be set here. -set (CMAKE_AR "ar") -set (CMAKE_RANLIB "ranlib") - set (TOOLCHAIN_PATH "${CMAKE_CURRENT_LIST_DIR}/../../contrib/sysroot/linux-x86_64-musl") set (CMAKE_SYSROOT "${TOOLCHAIN_PATH}") @@ -21,11 +17,5 @@ set (CMAKE_C_FLAGS "${CMAKE_C_FLAGS} --gcc-toolchain=${TOOLCHAIN_PATH}") set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} --gcc-toolchain=${TOOLCHAIN_PATH}") set (CMAKE_ASM_FLAGS "${CMAKE_ASM_FLAGS} --gcc-toolchain=${TOOLCHAIN_PATH}") -set (HAS_PRE_1970_EXITCODE "0" CACHE STRING "Result from TRY_RUN" FORCE) -set (HAS_PRE_1970_EXITCODE__TRYRUN_OUTPUT "" CACHE STRING "Output from TRY_RUN" FORCE) - -set (HAS_POST_2038_EXITCODE "0" CACHE STRING "Result from TRY_RUN" FORCE) -set (HAS_POST_2038_EXITCODE__TRYRUN_OUTPUT "" CACHE STRING "Output from TRY_RUN" FORCE) - set (USE_MUSL 1) add_definitions(-DUSE_MUSL=1) diff --git a/cmake/linux/toolchain-x86_64.cmake b/cmake/linux/toolchain-x86_64.cmake index 55b9df79f70..e341219a7e5 100644 --- a/cmake/linux/toolchain-x86_64.cmake +++ b/cmake/linux/toolchain-x86_64.cmake @@ -19,10 +19,6 @@ set (CMAKE_C_COMPILER_TARGET "x86_64-linux-gnu") set (CMAKE_CXX_COMPILER_TARGET "x86_64-linux-gnu") set (CMAKE_ASM_COMPILER_TARGET "x86_64-linux-gnu") -# Will be changed later, but somehow needed to be set here. -set (CMAKE_AR "ar") -set (CMAKE_RANLIB "ranlib") - set (TOOLCHAIN_PATH "${CMAKE_CURRENT_LIST_DIR}/../../contrib/sysroot/linux-x86_64") set (CMAKE_SYSROOT "${TOOLCHAIN_PATH}/x86_64-linux-gnu/libc") @@ -32,9 +28,3 @@ set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} --gcc-toolchain=${TOOLCHAIN_PATH}") set (CMAKE_ASM_FLAGS "${CMAKE_ASM_FLAGS} --gcc-toolchain=${TOOLCHAIN_PATH}") set (CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} --gcc-toolchain=${TOOLCHAIN_PATH}") set (CMAKE_MODULE_LINKER_FLAGS "${CMAKE_MODULE_LINKER_FLAGS} --gcc-toolchain=${TOOLCHAIN_PATH}") - -set (HAS_PRE_1970_EXITCODE "0" CACHE STRING "Result from TRY_RUN" FORCE) -set (HAS_PRE_1970_EXITCODE__TRYRUN_OUTPUT "" CACHE STRING "Output from TRY_RUN" FORCE) - -set (HAS_POST_2038_EXITCODE "0" CACHE STRING "Result from TRY_RUN" FORCE) -set (HAS_POST_2038_EXITCODE__TRYRUN_OUTPUT "" CACHE STRING "Output from TRY_RUN" FORCE) diff --git a/cmake/sanitize.cmake b/cmake/sanitize.cmake index f17283774eb..3f7a8498059 100644 --- a/cmake/sanitize.cmake +++ b/cmake/sanitize.cmake @@ -58,3 +58,27 @@ if (SANITIZE) message (FATAL_ERROR "Unknown sanitizer type: ${SANITIZE}") endif () endif() + +# Default coverage instrumentation (dumping the coverage map on exit) +option(WITH_COVERAGE "Instrumentation for code coverage with default implementation" OFF) + +if (WITH_COVERAGE) + message (INFORMATION "Enabled instrumentation for code coverage") + set(COVERAGE_FLAGS "-fprofile-instr-generate -fcoverage-mapping") +endif() + +option (SANITIZE_COVERAGE "Instrumentation for code coverage with custom callbacks" OFF) + +if (SANITIZE_COVERAGE) + message (INFORMATION "Enabled instrumentation for code coverage") + + # We set this define for whole build to indicate that at least some parts are compiled with coverage. + # And to expose it in system.build_options. + set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -DSANITIZE_COVERAGE=1") + set (CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -DSANITIZE_COVERAGE=1") + + # But the actual coverage will be enabled on per-library basis: for ClickHouse code, but not for 3rd-party. + set (COVERAGE_FLAGS "-fsanitize-coverage=trace-pc-guard,pc-table") +endif() + +set (WITHOUT_COVERAGE_FLAGS "-fno-profile-instr-generate -fno-coverage-mapping -fno-sanitize-coverage=trace-pc-guard,pc-table") diff --git a/cmake/split_debug_symbols.cmake b/cmake/split_debug_symbols.cmake index d6821eb6c48..67c2c386f20 100644 --- a/cmake/split_debug_symbols.cmake +++ b/cmake/split_debug_symbols.cmake @@ -1,3 +1,5 @@ +# Generates a separate file with debug symbols while stripping it from the main binary. +# This is needed for Debian packages. macro(clickhouse_split_debug_symbols) set(oneValueArgs TARGET DESTINATION_DIR BINARY_PATH) diff --git a/contrib/AMQP-CPP b/contrib/AMQP-CPP index 818c2d8ad96..00f09897ce0 160000 --- a/contrib/AMQP-CPP +++ b/contrib/AMQP-CPP @@ -1 +1 @@ -Subproject commit 818c2d8ad96a08a5d20fece7d1e1e8855a2b0860 +Subproject commit 00f09897ce020a84e38f87dc416af4a19c5da9ae diff --git a/contrib/CMakeLists.txt b/contrib/CMakeLists.txt index 390b0241e7d..a8f0705df88 100644 --- a/contrib/CMakeLists.txt +++ b/contrib/CMakeLists.txt @@ -1,16 +1,7 @@ #"${folder}/CMakeLists.txt" Third-party libraries may have substandard code. -set (CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -w") -set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -w") - -if (WITH_COVERAGE) - set (WITHOUT_COVERAGE_LIST ${WITHOUT_COVERAGE}) - separate_arguments(WITHOUT_COVERAGE_LIST) - # disable coverage for contib files and build with optimisations - if (COMPILER_CLANG) - add_compile_options(-O3 -DNDEBUG -finline-functions -finline-hint-functions ${WITHOUT_COVERAGE_LIST}) - endif() -endif() +set (CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -w -ffunction-sections -fdata-sections") +set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -w -ffunction-sections -fdata-sections") if (SANITIZE STREQUAL "undefined") # 3rd-party libraries usually not intended to work with UBSan. @@ -53,6 +44,7 @@ else () endif () add_contrib (miniselect-cmake miniselect) add_contrib (pdqsort-cmake pdqsort) +add_contrib (pocketfft-cmake pocketfft) add_contrib (crc32-vpmsum-cmake crc32-vpmsum) add_contrib (sparsehash-c11-cmake sparsehash-c11) add_contrib (abseil-cpp-cmake abseil-cpp) diff --git a/contrib/NuRaft b/contrib/NuRaft index eb1572129c7..b7ea89b817a 160000 --- a/contrib/NuRaft +++ b/contrib/NuRaft @@ -1 +1 @@ -Subproject commit eb1572129c71beb2156dcdaadc3fb136954aed96 +Subproject commit b7ea89b817a18dc0eafc1f909d568869f02d2d04 diff --git a/contrib/abseil-cpp b/contrib/abseil-cpp index 5655528c418..3bd86026c93 160000 --- a/contrib/abseil-cpp +++ b/contrib/abseil-cpp @@ -1 +1 @@ -Subproject commit 5655528c41830f733160de4fb0b99073841bae9e +Subproject commit 3bd86026c93da5a40006fd53403dff9d5f5e30e3 diff --git a/contrib/abseil-cpp-cmake/CMakeLists.txt b/contrib/abseil-cpp-cmake/CMakeLists.txt index 2901daf32db..e6c3268c57a 100644 --- a/contrib/abseil-cpp-cmake/CMakeLists.txt +++ b/contrib/abseil-cpp-cmake/CMakeLists.txt @@ -1,33 +1,3428 @@ set(ABSL_ROOT_DIR "${ClickHouse_SOURCE_DIR}/contrib/abseil-cpp") +set(ABSL_COMMON_INCLUDE_DIRS "${ABSL_ROOT_DIR}") + +# +# Copyright 2017 The Abseil Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +function(absl_cc_library) + cmake_parse_arguments(ABSL_CC_LIB + "DISABLE_INSTALL;PUBLIC;TESTONLY" + "NAME" + "HDRS;SRCS;COPTS;DEFINES;LINKOPTS;DEPS" + ${ARGN} + ) + + set(_NAME "absl_${ABSL_CC_LIB_NAME}") + + # Check if this is a header-only library + set(ABSL_CC_SRCS "${ABSL_CC_LIB_SRCS}") + foreach(src_file IN LISTS ABSL_CC_SRCS) + if(${src_file} MATCHES ".*\\.(h|inc)") + list(REMOVE_ITEM ABSL_CC_SRCS "${src_file}") + endif() + endforeach() + + if(ABSL_CC_SRCS STREQUAL "") + set(ABSL_CC_LIB_IS_INTERFACE 1) + else() + set(ABSL_CC_LIB_IS_INTERFACE 0) + endif() + + if(NOT ABSL_CC_LIB_IS_INTERFACE) + add_library(${_NAME} "") + target_sources(${_NAME} PRIVATE ${ABSL_CC_LIB_SRCS} ${ABSL_CC_LIB_HDRS}) + target_link_libraries(${_NAME} + PUBLIC ${ABSL_CC_LIB_DEPS} + PRIVATE + ${ABSL_CC_LIB_LINKOPTS} + ${ABSL_DEFAULT_LINKOPTS} + ) + + target_include_directories(${_NAME} + PUBLIC "${ABSL_COMMON_INCLUDE_DIRS}") + target_compile_options(${_NAME} + PRIVATE ${ABSL_CC_LIB_COPTS}) + target_compile_definitions(${_NAME} PUBLIC ${ABSL_CC_LIB_DEFINES}) + + else() + # Generating header-only library + add_library(${_NAME} INTERFACE) + target_include_directories(${_NAME} + INTERFACE "${ABSL_COMMON_INCLUDE_DIRS}") + + target_link_libraries(${_NAME} + INTERFACE + ${ABSL_CC_LIB_DEPS} + ${ABSL_CC_LIB_LINKOPTS} + ${ABSL_DEFAULT_LINKOPTS} + ) + target_compile_definitions(${_NAME} INTERFACE ${ABSL_CC_LIB_DEFINES}) + + endif() + + add_library(absl::${ABSL_CC_LIB_NAME} ALIAS ${_NAME}) +endfunction() + + +set(DIR ${ABSL_ROOT_DIR}/absl/algorithm) + +absl_cc_library( + NAME + algorithm + HDRS + "${DIR}/algorithm.h" + COPTS + ${ABSL_DEFAULT_COPTS} + DEPS + absl::config + PUBLIC +) + +absl_cc_library( + NAME + algorithm_container + HDRS + "${DIR}/container.h" + COPTS + ${ABSL_DEFAULT_COPTS} + DEPS + absl::algorithm + absl::core_headers + absl::meta + PUBLIC +) + +set(DIR ${ABSL_ROOT_DIR}/absl/base) + +# Internal-only target, do not depend on directly. +absl_cc_library( + NAME + atomic_hook + HDRS + "${DIR}/internal/atomic_hook.h" + DEPS + absl::config + absl::core_headers + COPTS + ${ABSL_DEFAULT_COPTS} +) + +# Internal-only target, do not depend on directly. +absl_cc_library( + NAME + errno_saver + HDRS + "${DIR}/internal/errno_saver.h" + DEPS + absl::config + COPTS + ${ABSL_DEFAULT_COPTS} +) + +absl_cc_library( + NAME + log_severity + HDRS + "${DIR}/log_severity.h" + SRCS + "${DIR}/log_severity.cc" + DEPS + absl::config + absl::core_headers + COPTS + ${ABSL_DEFAULT_COPTS} +) + +absl_cc_library( + NAME + nullability + HDRS + "${DIR}/nullability.h" + SRCS + "${DIR}/internal/nullability_impl.h" + DEPS + absl::core_headers + absl::type_traits + COPTS + ${ABSL_DEFAULT_COPTS} +) + +# Internal-only target, do not depend on directly. +absl_cc_library( + NAME + raw_logging_internal + HDRS + "${DIR}/internal/raw_logging.h" + SRCS + "${DIR}/internal/raw_logging.cc" + DEPS + absl::atomic_hook + absl::config + absl::core_headers + absl::errno_saver + absl::log_severity + COPTS + ${ABSL_DEFAULT_COPTS} +) + +# Internal-only target, do not depend on directly. +absl_cc_library( + NAME + spinlock_wait + HDRS + "${DIR}/internal/spinlock_wait.h" + SRCS + "${DIR}/internal/spinlock_akaros.inc" + "${DIR}/internal/spinlock_linux.inc" + "${DIR}/internal/spinlock_posix.inc" + "${DIR}/internal/spinlock_wait.cc" + "${DIR}/internal/spinlock_win32.inc" + COPTS + ${ABSL_DEFAULT_COPTS} + DEPS + absl::base_internal + absl::core_headers + absl::errno_saver +) + +absl_cc_library( + NAME + config + HDRS + "${DIR}/config.h" + "${DIR}/options.h" + "${DIR}/policy_checks.h" + COPTS + ${ABSL_DEFAULT_COPTS} + PUBLIC +) + +absl_cc_library( + NAME + dynamic_annotations + HDRS + "${DIR}/dynamic_annotations.h" + SRCS + "${DIR}/internal/dynamic_annotations.h" + COPTS + ${ABSL_DEFAULT_COPTS} + DEPS + absl::config + PUBLIC +) + +absl_cc_library( + NAME + core_headers + HDRS + "${DIR}/attributes.h" + "${DIR}/const_init.h" + "${DIR}/macros.h" + "${DIR}/optimization.h" + "${DIR}/port.h" + "${DIR}/thread_annotations.h" + COPTS + ${ABSL_DEFAULT_COPTS} + DEPS + absl::config + PUBLIC +) + +# Internal-only target, do not depend on directly. +absl_cc_library( + NAME + malloc_internal + HDRS + "${DIR}/internal/direct_mmap.h" + "${DIR}/internal/low_level_alloc.h" + SRCS + "${DIR}/internal/low_level_alloc.cc" + COPTS + ${ABSL_DEFAULT_COPTS} + DEPS + absl::base + absl::base_internal + absl::config + absl::core_headers + absl::dynamic_annotations + absl::raw_logging_internal + Threads::Threads +) + +# Internal-only target, do not depend on directly. +absl_cc_library( + NAME + base_internal + HDRS + "${DIR}/internal/hide_ptr.h" + "${DIR}/internal/identity.h" + "${DIR}/internal/inline_variable.h" + "${DIR}/internal/invoke.h" + "${DIR}/internal/scheduling_mode.h" + COPTS + ${ABSL_DEFAULT_COPTS} + DEPS + absl::config + absl::type_traits +) + +absl_cc_library( + NAME + base + HDRS + "${DIR}/call_once.h" + "${DIR}/casts.h" + "${DIR}/internal/cycleclock.h" + "${DIR}/internal/cycleclock_config.h" + "${DIR}/internal/low_level_scheduling.h" + "${DIR}/internal/per_thread_tls.h" + "${DIR}/internal/spinlock.h" + "${DIR}/internal/sysinfo.h" + "${DIR}/internal/thread_identity.h" + "${DIR}/internal/tsan_mutex_interface.h" + "${DIR}/internal/unscaledcycleclock.h" + "${DIR}/internal/unscaledcycleclock_config.h" + SRCS + "${DIR}/internal/cycleclock.cc" + "${DIR}/internal/spinlock.cc" + "${DIR}/internal/sysinfo.cc" + "${DIR}/internal/thread_identity.cc" + "${DIR}/internal/unscaledcycleclock.cc" + COPTS + ${ABSL_DEFAULT_COPTS} + LINKOPTS + ${ABSL_DEFAULT_LINKOPTS} + DEPS + absl::atomic_hook + absl::base_internal + absl::config + absl::core_headers + absl::dynamic_annotations + absl::log_severity + absl::raw_logging_internal + absl::spinlock_wait + absl::type_traits + Threads::Threads + PUBLIC +) + +# Internal-only target, do not depend on directly. +absl_cc_library( + NAME + throw_delegate + HDRS + "${DIR}/internal/throw_delegate.h" + SRCS + "${DIR}/internal/throw_delegate.cc" + COPTS + ${ABSL_DEFAULT_COPTS} + DEPS + absl::config + absl::raw_logging_internal +) + +# Internal-only target, do not depend on directly. +absl_cc_library( + NAME + pretty_function + HDRS + "${DIR}/internal/pretty_function.h" + COPTS + ${ABSL_DEFAULT_COPTS} +) + +# Internal-only target, do not depend on directly. +absl_cc_library( + NAME + endian + HDRS + "${DIR}/internal/endian.h" + "${DIR}/internal/unaligned_access.h" + COPTS + ${ABSL_DEFAULT_COPTS} + DEPS + absl::base + absl::config + absl::core_headers + PUBLIC +) + +# Internal-only target, do not depend on directly. +absl_cc_library( + NAME + scoped_set_env + SRCS + "${DIR}/internal/scoped_set_env.cc" + HDRS + "${DIR}/internal/scoped_set_env.h" + COPTS + ${ABSL_DEFAULT_COPTS} + DEPS + absl::config + absl::raw_logging_internal +) + +# Internal-only target, do not depend on directly. +absl_cc_library( + NAME + strerror + SRCS + "${DIR}/internal/strerror.cc" + HDRS + "${DIR}/internal/strerror.h" + COPTS + ${ABSL_DEFAULT_COPTS} + LINKOPTS + ${ABSL_DEFAULT_LINKOPTS} + DEPS + absl::config + absl::core_headers + absl::errno_saver +) + +# Internal-only target, do not depend on directly. +absl_cc_library( + NAME + fast_type_id + HDRS + "${DIR}/internal/fast_type_id.h" + COPTS + ${ABSL_DEFAULT_COPTS} + LINKOPTS + ${ABSL_DEFAULT_LINKOPTS} + DEPS + absl::config +) + +absl_cc_library( + NAME + prefetch + HDRS + "${DIR}/prefetch.h" + COPTS + ${ABSL_DEFAULT_COPTS} + LINKOPTS + ${ABSL_DEFAULT_LINKOPTS} + DEPS + absl::config + absl::core_headers +) + +set(DIR ${ABSL_ROOT_DIR}/absl/cleanup) + +# Internal-only target, do not depend on directly. +absl_cc_library( + NAME + cleanup_internal + HDRS + "${DIR}/internal/cleanup.h" + COPTS + ${ABSL_DEFAULT_COPTS} + DEPS + absl::base_internal + absl::core_headers + absl::utility + PUBLIC +) + +absl_cc_library( + NAME + cleanup + HDRS + "${DIR}/cleanup.h" + COPTS + ${ABSL_DEFAULT_COPTS} + DEPS + absl::cleanup_internal + absl::config + absl::core_headers + PUBLIC +) + +set(DIR ${ABSL_ROOT_DIR}/absl/container) + +absl_cc_library( + NAME + btree + HDRS + "${DIR}/btree_map.h" + "${DIR}/btree_set.h" + "${DIR}/internal/btree.h" + "${DIR}/internal/btree_container.h" + COPTS + ${ABSL_DEFAULT_COPTS} + LINKOPTS + ${ABSL_DEFAULT_LINKOPTS} + DEPS + absl::container_common + absl::common_policy_traits + absl::compare + absl::compressed_tuple + absl::container_memory + absl::cord + absl::core_headers + absl::layout + absl::memory + absl::raw_logging_internal + absl::strings + absl::throw_delegate + absl::type_traits + absl::utility +) + +# Internal-only target, do not depend on directly. +absl_cc_library( + NAME + compressed_tuple + HDRS + "${DIR}/internal/compressed_tuple.h" + COPTS + ${ABSL_DEFAULT_COPTS} + DEPS + absl::utility + PUBLIC +) + +absl_cc_library( + NAME + fixed_array + HDRS + "${DIR}/fixed_array.h" + COPTS + ${ABSL_DEFAULT_COPTS} + DEPS + absl::compressed_tuple + absl::algorithm + absl::config + absl::core_headers + absl::dynamic_annotations + absl::throw_delegate + absl::memory + PUBLIC +) + +# Internal-only target, do not depend on directly. +absl_cc_library( + NAME + inlined_vector_internal + HDRS + "${DIR}/internal/inlined_vector.h" + COPTS + ${ABSL_DEFAULT_COPTS} + DEPS + absl::compressed_tuple + absl::core_headers + absl::memory + absl::span + absl::type_traits + PUBLIC +) + +absl_cc_library( + NAME + inlined_vector + HDRS + "${DIR}/inlined_vector.h" + COPTS + ${ABSL_DEFAULT_COPTS} + DEPS + absl::algorithm + absl::core_headers + absl::inlined_vector_internal + absl::throw_delegate + absl::memory + absl::type_traits + PUBLIC +) + +# Internal-only target, do not depend on directly. +absl_cc_library( + NAME + counting_allocator + HDRS + "${DIR}/internal/counting_allocator.h" + COPTS + ${ABSL_DEFAULT_COPTS} + DEPS + absl::config +) + +absl_cc_library( + NAME + flat_hash_map + HDRS + "${DIR}/flat_hash_map.h" + COPTS + ${ABSL_DEFAULT_COPTS} + DEPS + absl::container_memory + absl::core_headers + absl::hash_function_defaults + absl::raw_hash_map + absl::algorithm_container + absl::memory + PUBLIC +) + +absl_cc_library( + NAME + flat_hash_set + HDRS + "${DIR}/flat_hash_set.h" + COPTS + ${ABSL_DEFAULT_COPTS} + DEPS + absl::container_memory + absl::hash_function_defaults + absl::raw_hash_set + absl::algorithm_container + absl::core_headers + absl::memory + PUBLIC +) + +absl_cc_library( + NAME + node_hash_map + HDRS + "${DIR}/node_hash_map.h" + COPTS + ${ABSL_DEFAULT_COPTS} + DEPS + absl::container_memory + absl::core_headers + absl::hash_function_defaults + absl::node_slot_policy + absl::raw_hash_map + absl::algorithm_container + absl::memory + PUBLIC +) + +absl_cc_library( + NAME + node_hash_set + HDRS + "${DIR}/node_hash_set.h" + COPTS + ${ABSL_DEFAULT_COPTS} + DEPS + absl::core_headers + absl::hash_function_defaults + absl::node_slot_policy + absl::raw_hash_set + absl::algorithm_container + absl::memory + PUBLIC +) + +# Internal-only target, do not depend on directly. +absl_cc_library( + NAME + container_memory + HDRS + "${DIR}/internal/container_memory.h" + COPTS + ${ABSL_DEFAULT_COPTS} + DEPS + absl::config + absl::memory + absl::type_traits + absl::utility + PUBLIC +) + +# Internal-only target, do not depend on directly. +absl_cc_library( + NAME + hash_function_defaults + HDRS + "${DIR}/internal/hash_function_defaults.h" + COPTS + ${ABSL_DEFAULT_COPTS} + DEPS + absl::config + absl::cord + absl::hash + absl::strings + PUBLIC +) + +# Internal-only target, do not depend on directly. +absl_cc_library( + NAME + hash_policy_traits + HDRS + "${DIR}/internal/hash_policy_traits.h" + COPTS + ${ABSL_DEFAULT_COPTS} + DEPS + absl::common_policy_traits + absl::meta + PUBLIC +) + +# Internal-only target, do not depend on directly. +absl_cc_library( + NAME + common_policy_traits + HDRS + "${DIR}/internal/common_policy_traits.h" + COPTS + ${ABSL_DEFAULT_COPTS} + DEPS + absl::meta + PUBLIC +) + +# Internal-only target, do not depend on directly. +absl_cc_library( + NAME + hashtablez_sampler + HDRS + "${DIR}/internal/hashtablez_sampler.h" + SRCS + "${DIR}/internal/hashtablez_sampler.cc" + "${DIR}/internal/hashtablez_sampler_force_weak_definition.cc" + COPTS + ${ABSL_DEFAULT_COPTS} + DEPS + absl::base + absl::config + absl::exponential_biased + absl::raw_logging_internal + absl::sample_recorder + absl::synchronization + absl::time +) + +# Internal-only target, do not depend on directly. +absl_cc_library( + NAME + hashtable_debug + HDRS + "${DIR}/internal/hashtable_debug.h" + COPTS + ${ABSL_DEFAULT_COPTS} + DEPS + absl::hashtable_debug_hooks +) + +# Internal-only target, do not depend on directly. +absl_cc_library( + NAME + hashtable_debug_hooks + HDRS + "${DIR}/internal/hashtable_debug_hooks.h" + COPTS + ${ABSL_DEFAULT_COPTS} + DEPS + absl::config + PUBLIC +) + +# Internal-only target, do not depend on directly. +absl_cc_library( + NAME + node_slot_policy + HDRS + "${DIR}/internal/node_slot_policy.h" + COPTS + ${ABSL_DEFAULT_COPTS} + DEPS + absl::config + PUBLIC +) + +# Internal-only target, do not depend on directly. +absl_cc_library( + NAME + raw_hash_map + HDRS + "${DIR}/internal/raw_hash_map.h" + COPTS + ${ABSL_DEFAULT_COPTS} + DEPS + absl::container_memory + absl::raw_hash_set + absl::throw_delegate + PUBLIC +) + +# Internal-only target, do not depend on directly. +absl_cc_library( + NAME + container_common + HDRS + "${DIR}/internal/common.h" + COPTS + ${ABSL_DEFAULT_COPTS} + DEPS + absl::type_traits +) + +# Internal-only target, do not depend on directly. +absl_cc_library( + NAME + raw_hash_set + HDRS + "${DIR}/internal/raw_hash_set.h" + SRCS + "${DIR}/internal/raw_hash_set.cc" + COPTS + ${ABSL_DEFAULT_COPTS} + DEPS + absl::bits + absl::compressed_tuple + absl::config + absl::container_common + absl::container_memory + absl::core_headers + absl::dynamic_annotations + absl::endian + absl::hash + absl::hash_policy_traits + absl::hashtable_debug_hooks + absl::hashtablez_sampler + absl::memory + absl::meta + absl::optional + absl::prefetch + absl::raw_logging_internal + absl::utility + PUBLIC +) + +# Internal-only target, do not depend on directly. +absl_cc_library( + NAME + layout + HDRS + "${DIR}/internal/layout.h" + COPTS + ${ABSL_DEFAULT_COPTS} + DEPS + absl::config + absl::core_headers + absl::meta + absl::strings + absl::span + absl::utility + PUBLIC +) + +set(DIR ${ABSL_ROOT_DIR}/absl/crc) + +# Internal-only target, do not depend on directly. +absl_cc_library( + NAME + crc_cpu_detect + HDRS + "${DIR}/internal/cpu_detect.h" + SRCS + "${DIR}/internal/cpu_detect.cc" + COPTS + ${ABSL_DEFAULT_COPTS} + DEPS + absl::base + absl::config +) + +# Internal-only target, do not depend on directly. +absl_cc_library( + NAME + crc_internal + HDRS + "${DIR}/internal/crc.h" + "${DIR}/internal/crc32_x86_arm_combined_simd.h" + SRCS + "${DIR}/internal/crc.cc" + "${DIR}/internal/crc_internal.h" + "${DIR}/internal/crc_x86_arm_combined.cc" + COPTS + ${ABSL_DEFAULT_COPTS} + DEPS + absl::crc_cpu_detect + absl::config + absl::core_headers + absl::endian + absl::prefetch + absl::raw_logging_internal + absl::memory + absl::bits +) + +absl_cc_library( + NAME + crc32c + HDRS + "${DIR}/crc32c.h" + "${DIR}/internal/crc32c.h" + "${DIR}/internal/crc_memcpy.h" + SRCS + "${DIR}/crc32c.cc" + "${DIR}/internal/crc32c_inline.h" + "${DIR}/internal/crc_memcpy_fallback.cc" + "${DIR}/internal/crc_memcpy_x86_arm_combined.cc" + "${DIR}/internal/crc_non_temporal_memcpy.cc" + COPTS + ${ABSL_DEFAULT_COPTS} + DEPS + absl::crc_cpu_detect + absl::crc_internal + absl::non_temporal_memcpy + absl::config + absl::core_headers + absl::endian + absl::prefetch + absl::str_format + absl::strings +) + +# Internal-only target, do not depend on directly. +absl_cc_library( + NAME + non_temporal_arm_intrinsics + HDRS + "${DIR}/internal/non_temporal_arm_intrinsics.h" + COPTS + ${ABSL_DEFAULT_COPTS} + DEPS + absl::config +) + +# Internal-only target, do not depend on directly. +absl_cc_library( + NAME + non_temporal_memcpy + HDRS + "${DIR}/internal/non_temporal_memcpy.h" + COPTS + ${ABSL_DEFAULT_COPTS} + DEPS + absl::non_temporal_arm_intrinsics + absl::config + absl::core_headers +) + +absl_cc_library( + NAME + crc_cord_state + HDRS + "${DIR}/internal/crc_cord_state.h" + SRCS + "${DIR}/internal/crc_cord_state.cc" + COPTS + ${ABSL_DEFAULT_COPTS} + DEPS + absl::crc32c + absl::config + absl::strings +) + +set(DIR ${ABSL_ROOT_DIR}/absl/debugging) + +absl_cc_library( + NAME + stacktrace + HDRS + "${DIR}/stacktrace.h" + "${DIR}/internal/stacktrace_aarch64-inl.inc" + "${DIR}/internal/stacktrace_arm-inl.inc" + "${DIR}/internal/stacktrace_config.h" + "${DIR}/internal/stacktrace_emscripten-inl.inc" + "${DIR}/internal/stacktrace_generic-inl.inc" + "${DIR}/internal/stacktrace_powerpc-inl.inc" + "${DIR}/internal/stacktrace_riscv-inl.inc" + "${DIR}/internal/stacktrace_unimplemented-inl.inc" + "${DIR}/internal/stacktrace_win32-inl.inc" + "${DIR}/internal/stacktrace_x86-inl.inc" + SRCS + "${DIR}/stacktrace.cc" + COPTS + ${ABSL_DEFAULT_COPTS} + DEPS + absl::debugging_internal + absl::config + absl::core_headers + absl::dynamic_annotations + absl::raw_logging_internal + PUBLIC +) + +absl_cc_library( + NAME + symbolize + HDRS + "${DIR}/symbolize.h" + "${DIR}/internal/symbolize.h" + SRCS + "${DIR}/symbolize.cc" + "${DIR}/symbolize_darwin.inc" + "${DIR}/symbolize_elf.inc" + "${DIR}/symbolize_emscripten.inc" + "${DIR}/symbolize_unimplemented.inc" + "${DIR}/symbolize_win32.inc" + COPTS + ${ABSL_DEFAULT_COPTS} + LINKOPTS + ${ABSL_DEFAULT_LINKOPTS} + DEPS + absl::debugging_internal + absl::demangle_internal + absl::base + absl::config + absl::core_headers + absl::dynamic_annotations + absl::malloc_internal + absl::raw_logging_internal + absl::strings + PUBLIC +) + +# Internal-only target, do not depend on directly. +absl_cc_library( + NAME + examine_stack + HDRS + "${DIR}/internal/examine_stack.h" + SRCS + "${DIR}/internal/examine_stack.cc" + COPTS + ${ABSL_DEFAULT_COPTS} + DEPS + absl::stacktrace + absl::symbolize + absl::config + absl::core_headers + absl::raw_logging_internal +) + +absl_cc_library( + NAME + failure_signal_handler + HDRS + "${DIR}/failure_signal_handler.h" + SRCS + "${DIR}/failure_signal_handler.cc" + COPTS + ${ABSL_DEFAULT_COPTS} + DEPS + absl::examine_stack + absl::stacktrace + absl::base + absl::config + absl::core_headers + absl::raw_logging_internal + PUBLIC +) + +# Internal-only target, do not depend on directly. +absl_cc_library( + NAME + debugging_internal + HDRS + "${DIR}/internal/address_is_readable.h" + "${DIR}/internal/elf_mem_image.h" + "${DIR}/internal/vdso_support.h" + SRCS + "${DIR}/internal/address_is_readable.cc" + "${DIR}/internal/elf_mem_image.cc" + "${DIR}/internal/vdso_support.cc" + COPTS + ${ABSL_DEFAULT_COPTS} + DEPS + absl::core_headers + absl::config + absl::dynamic_annotations + absl::errno_saver + absl::raw_logging_internal +) + +# Internal-only target, do not depend on directly. +absl_cc_library( + NAME + demangle_internal + HDRS + "${DIR}/internal/demangle.h" + SRCS + "${DIR}/internal/demangle.cc" + COPTS + ${ABSL_DEFAULT_COPTS} + DEPS + absl::base + absl::core_headers + PUBLIC +) + +absl_cc_library( + NAME + leak_check + HDRS + "${DIR}/leak_check.h" + SRCS + "${DIR}/leak_check.cc" + COPTS + ${ABSL_DEFAULT_COPTS} + DEPS + absl::config + absl::core_headers + PUBLIC +) + +# component target +absl_cc_library( + NAME + debugging + COPTS + ${ABSL_DEFAULT_COPTS} + DEPS + absl::stacktrace + absl::leak_check + PUBLIC +) + +set(DIR ${ABSL_ROOT_DIR}/absl/flags) + +# Internal-only target, do not depend on directly. +absl_cc_library( + NAME + flags_path_util + HDRS + "${DIR}/internal/path_util.h" + COPTS + ${ABSL_DEFAULT_COPTS} + LINKOPTS + ${ABSL_DEFAULT_LINKOPTS} + DEPS + absl::config + absl::strings + PUBLIC +) + +# Internal-only target, do not depend on directly. +absl_cc_library( + NAME + flags_program_name + SRCS + "${DIR}/internal/program_name.cc" + HDRS + "${DIR}/internal/program_name.h" + COPTS + ${ABSL_DEFAULT_COPTS} + LINKOPTS + ${ABSL_DEFAULT_LINKOPTS} + DEPS + absl::config + absl::core_headers + absl::flags_path_util + absl::strings + absl::synchronization + PUBLIC +) + +absl_cc_library( + NAME + flags_config + SRCS + "${DIR}/usage_config.cc" + HDRS + "${DIR}/config.h" + "${DIR}/usage_config.h" + COPTS + ${ABSL_DEFAULT_COPTS} + LINKOPTS + ${ABSL_DEFAULT_LINKOPTS} + DEPS + absl::config + absl::flags_path_util + absl::flags_program_name + absl::core_headers + absl::strings + absl::synchronization +) + +absl_cc_library( + NAME + flags_marshalling + SRCS + "${DIR}/marshalling.cc" + HDRS + "${DIR}/marshalling.h" + COPTS + ${ABSL_DEFAULT_COPTS} + LINKOPTS + ${ABSL_DEFAULT_LINKOPTS} + DEPS + absl::config + absl::core_headers + absl::log_severity + absl::int128 + absl::optional + absl::strings + absl::str_format +) + +# Internal-only target, do not depend on directly. +absl_cc_library( + NAME + flags_commandlineflag_internal + SRCS + "${DIR}/internal/commandlineflag.cc" + HDRS + "${DIR}/internal/commandlineflag.h" + COPTS + ${ABSL_DEFAULT_COPTS} + LINKOPTS + ${ABSL_DEFAULT_LINKOPTS} + DEPS + absl::config + absl::dynamic_annotations + absl::fast_type_id +) + +absl_cc_library( + NAME + flags_commandlineflag + SRCS + "${DIR}/commandlineflag.cc" + HDRS + "${DIR}/commandlineflag.h" + COPTS + ${ABSL_DEFAULT_COPTS} + LINKOPTS + ${ABSL_DEFAULT_LINKOPTS} + DEPS + absl::config + absl::fast_type_id + absl::flags_commandlineflag_internal + absl::optional + absl::strings +) + +# Internal-only target, do not depend on directly. +absl_cc_library( + NAME + flags_private_handle_accessor + SRCS + "${DIR}/internal/private_handle_accessor.cc" + HDRS + "${DIR}/internal/private_handle_accessor.h" + COPTS + ${ABSL_DEFAULT_COPTS} + LINKOPTS + ${ABSL_DEFAULT_LINKOPTS} + DEPS + absl::config + absl::flags_commandlineflag + absl::flags_commandlineflag_internal + absl::strings +) + +absl_cc_library( + NAME + flags_reflection + SRCS + "${DIR}/reflection.cc" + HDRS + "${DIR}/reflection.h" + "${DIR}/internal/registry.h" + COPTS + ${ABSL_DEFAULT_COPTS} + LINKOPTS + ${ABSL_DEFAULT_LINKOPTS} + DEPS + absl::config + absl::flags_commandlineflag + absl::flags_private_handle_accessor + absl::flags_config + absl::strings + absl::synchronization + absl::flat_hash_map +) + +# Internal-only target, do not depend on directly. +absl_cc_library( + NAME + flags_internal + SRCS + "${DIR}/internal/flag.cc" + HDRS + "${DIR}/internal/flag.h" + "${DIR}/internal/sequence_lock.h" + COPTS + ${ABSL_DEFAULT_COPTS} + LINKOPTS + ${ABSL_DEFAULT_LINKOPTS} + DEPS + absl::base + absl::config + absl::flags_commandlineflag + absl::flags_commandlineflag_internal + absl::flags_config + absl::flags_marshalling + absl::synchronization + absl::meta + absl::utility + PUBLIC +) + +absl_cc_library( + NAME + flags + SRCS + "${DIR}/flag.cc" + HDRS + "${DIR}/declare.h" + "${DIR}/flag.h" + "${DIR}/internal/flag_msvc.inc" + COPTS + ${ABSL_DEFAULT_COPTS} + LINKOPTS + ${ABSL_DEFAULT_LINKOPTS} + DEPS + absl::config + absl::flags_commandlineflag + absl::flags_config + absl::flags_internal + absl::flags_reflection + absl::base + absl::core_headers + absl::strings +) + +# Internal-only target, do not depend on directly. +absl_cc_library( + NAME + flags_usage_internal + SRCS + "${DIR}/internal/usage.cc" + HDRS + "${DIR}/internal/usage.h" + COPTS + ${ABSL_DEFAULT_COPTS} + LINKOPTS + ${ABSL_DEFAULT_LINKOPTS} + DEPS + absl::config + absl::flags_config + absl::flags + absl::flags_commandlineflag + absl::flags_internal + absl::flags_path_util + absl::flags_private_handle_accessor + absl::flags_program_name + absl::flags_reflection + absl::strings + absl::synchronization +) + +absl_cc_library( + NAME + flags_usage + SRCS + "${DIR}/usage.cc" + HDRS + "${DIR}/usage.h" + COPTS + ${ABSL_DEFAULT_COPTS} + LINKOPTS + ${ABSL_DEFAULT_LINKOPTS} + DEPS + absl::config + absl::core_headers + absl::flags_usage_internal + absl::raw_logging_internal + absl::strings + absl::synchronization +) + +absl_cc_library( + NAME + flags_parse + SRCS + "${DIR}/parse.cc" + HDRS + "${DIR}/internal/parse.h" + "${DIR}/parse.h" + COPTS + ${ABSL_DEFAULT_COPTS} + LINKOPTS + ${ABSL_DEFAULT_LINKOPTS} + DEPS + absl::algorithm_container + absl::config + absl::core_headers + absl::flags_config + absl::flags + absl::flags_commandlineflag + absl::flags_commandlineflag_internal + absl::flags_internal + absl::flags_private_handle_accessor + absl::flags_program_name + absl::flags_reflection + absl::flags_usage + absl::strings + absl::synchronization +) + +set(DIR ${ABSL_ROOT_DIR}/absl/functional) + +absl_cc_library( + NAME + any_invocable + SRCS + "${DIR}/internal/any_invocable.h" + HDRS + "${DIR}/any_invocable.h" + COPTS + ${ABSL_DEFAULT_COPTS} + DEPS + absl::base_internal + absl::config + absl::core_headers + absl::type_traits + absl::utility + PUBLIC +) + +absl_cc_library( + NAME + bind_front + SRCS + "${DIR}/internal/front_binder.h" + HDRS + "${DIR}/bind_front.h" + COPTS + ${ABSL_DEFAULT_COPTS} + DEPS + absl::base_internal + absl::compressed_tuple + PUBLIC +) + +absl_cc_library( + NAME + function_ref + SRCS + "${DIR}/internal/function_ref.h" + HDRS + "${DIR}/function_ref.h" + COPTS + ${ABSL_DEFAULT_COPTS} + DEPS + absl::base_internal + absl::core_headers + absl::any_invocable + absl::meta + PUBLIC +) + +set(DIR ${ABSL_ROOT_DIR}/absl/hash) + +absl_cc_library( + NAME + hash + HDRS + "${DIR}/hash.h" + SRCS + "${DIR}/internal/hash.cc" + "${DIR}/internal/hash.h" + COPTS + ${ABSL_DEFAULT_COPTS} + DEPS + absl::bits + absl::city + absl::config + absl::core_headers + absl::endian + absl::fixed_array + absl::function_ref + absl::meta + absl::int128 + absl::strings + absl::optional + absl::variant + absl::utility + absl::low_level_hash + PUBLIC +) + +# Internal-only target, do not depend on directly. +absl_cc_library( + NAME + city + HDRS + "${DIR}/internal/city.h" + SRCS + "${DIR}/internal/city.cc" + COPTS + ${ABSL_DEFAULT_COPTS} + DEPS + absl::config + absl::core_headers + absl::endian +) + +# Internal-only target, do not depend on directly. +absl_cc_library( + NAME + low_level_hash + HDRS + "${DIR}/internal/low_level_hash.h" + SRCS + "${DIR}/internal/low_level_hash.cc" + COPTS + ${ABSL_DEFAULT_COPTS} + DEPS + absl::config + absl::endian + absl::int128 + absl::prefetch +) + +set(DIR ${ABSL_ROOT_DIR}/absl/log) + +# Internal targets +absl_cc_library( + NAME + log_internal_check_impl + SRCS + HDRS + "${DIR}/internal/check_impl.h" + COPTS + ${ABSL_DEFAULT_COPTS} + LINKOPTS + ${ABSL_DEFAULT_LINKOPTS} + DEPS + absl::core_headers + absl::log_internal_check_op + absl::log_internal_conditions + absl::log_internal_message + absl::log_internal_strip +) + +absl_cc_library( + NAME + log_internal_check_op + SRCS + "${DIR}/internal/check_op.cc" + HDRS + "${DIR}/internal/check_op.h" + COPTS + ${ABSL_DEFAULT_COPTS} + LINKOPTS + ${ABSL_DEFAULT_LINKOPTS} + DEPS + absl::config + absl::core_headers + absl::log_internal_nullguard + absl::log_internal_nullstream + absl::log_internal_strip + absl::strings +) + +absl_cc_library( + NAME + log_internal_conditions + SRCS + "${DIR}/internal/conditions.cc" + HDRS + "${DIR}/internal/conditions.h" + COPTS + ${ABSL_DEFAULT_COPTS} + LINKOPTS + ${ABSL_DEFAULT_LINKOPTS} + DEPS + absl::base + absl::config + absl::core_headers + absl::log_internal_voidify +) + +absl_cc_library( + NAME + log_internal_config + SRCS + HDRS + "${DIR}/internal/config.h" + COPTS + ${ABSL_DEFAULT_COPTS} + LINKOPTS + ${ABSL_DEFAULT_LINKOPTS} + DEPS + absl::config + absl::core_headers +) + +absl_cc_library( + NAME + log_internal_flags + SRCS + HDRS + "${DIR}/internal/flags.h" + COPTS + ${ABSL_DEFAULT_COPTS} + LINKOPTS + ${ABSL_DEFAULT_LINKOPTS} + DEPS + absl::flags +) + +absl_cc_library( + NAME + log_internal_format + SRCS + "${DIR}/internal/log_format.cc" + HDRS + "${DIR}/internal/log_format.h" + COPTS + ${ABSL_DEFAULT_COPTS} + LINKOPTS + ${ABSL_DEFAULT_LINKOPTS} + DEPS + absl::config + absl::core_headers + absl::log_internal_append_truncated + absl::log_internal_config + absl::log_internal_globals + absl::log_severity + absl::strings + absl::str_format + absl::time + absl::span +) + +absl_cc_library( + NAME + log_internal_globals + SRCS + "${DIR}/internal/globals.cc" + HDRS + "${DIR}/internal/globals.h" + COPTS + ${ABSL_DEFAULT_COPTS} + LINKOPTS + ${ABSL_DEFAULT_LINKOPTS} + DEPS + absl::config + absl::core_headers + absl::log_severity + absl::raw_logging_internal + absl::strings + absl::time +) + +absl_cc_library( + NAME + log_internal_log_impl + SRCS + HDRS + "${DIR}/internal/log_impl.h" + COPTS + ${ABSL_DEFAULT_COPTS} + LINKOPTS + ${ABSL_DEFAULT_LINKOPTS} + DEPS + absl::log_internal_conditions + absl::log_internal_message + absl::log_internal_strip +) + +absl_cc_library( + NAME + log_internal_proto + SRCS + "${DIR}/internal/proto.cc" + HDRS + "${DIR}/internal/proto.h" + COPTS + ${ABSL_DEFAULT_COPTS} + LINKOPTS + ${ABSL_DEFAULT_LINKOPTS} + DEPS + absl::base + absl::config + absl::core_headers + absl::strings + absl::span +) + +absl_cc_library( + NAME + log_internal_message + SRCS + "${DIR}/internal/log_message.cc" + HDRS + "${DIR}/internal/log_message.h" + COPTS + ${ABSL_DEFAULT_COPTS} + LINKOPTS + ${ABSL_DEFAULT_LINKOPTS} + DEPS + absl::base + absl::config + absl::core_headers + absl::errno_saver + absl::inlined_vector + absl::examine_stack + absl::log_internal_append_truncated + absl::log_internal_format + absl::log_internal_globals + absl::log_internal_proto + absl::log_internal_log_sink_set + absl::log_internal_nullguard + absl::log_globals + absl::log_entry + absl::log_severity + absl::log_sink + absl::log_sink_registry + absl::memory + absl::raw_logging_internal + absl::strings + absl::strerror + absl::time + absl::span +) + +absl_cc_library( + NAME + log_internal_log_sink_set + SRCS + "${DIR}/internal/log_sink_set.cc" + HDRS + "${DIR}/internal/log_sink_set.h" + COPTS + ${ABSL_DEFAULT_COPTS} + LINKOPTS + ${ABSL_DEFAULT_LINKOPTS} + $<$:-llog> + DEPS + absl::base + absl::cleanup + absl::config + absl::core_headers + absl::log_internal_config + absl::log_internal_globals + absl::log_globals + absl::log_entry + absl::log_severity + absl::log_sink + absl::raw_logging_internal + absl::synchronization + absl::span + absl::strings +) + +absl_cc_library( + NAME + log_internal_nullguard + SRCS + "${DIR}/internal/nullguard.cc" + HDRS + "${DIR}/internal/nullguard.h" + COPTS + ${ABSL_DEFAULT_COPTS} + LINKOPTS + ${ABSL_DEFAULT_LINKOPTS} + DEPS + absl::config + absl::core_headers +) + +absl_cc_library( + NAME + log_internal_nullstream + SRCS + HDRS + "${DIR}/internal/nullstream.h" + COPTS + ${ABSL_DEFAULT_COPTS} + LINKOPTS + ${ABSL_DEFAULT_LINKOPTS} + DEPS + absl::config + absl::core_headers + absl::log_severity + absl::strings +) + +absl_cc_library( + NAME + log_internal_strip + SRCS + HDRS + "${DIR}/internal/strip.h" + COPTS + ${ABSL_DEFAULT_COPTS} + LINKOPTS + ${ABSL_DEFAULT_LINKOPTS} + DEPS + absl::log_internal_message + absl::log_internal_nullstream + absl::log_severity +) + +absl_cc_library( + NAME + log_internal_voidify + SRCS + HDRS + "${DIR}/internal/voidify.h" + COPTS + ${ABSL_DEFAULT_COPTS} + LINKOPTS + ${ABSL_DEFAULT_LINKOPTS} + DEPS + absl::config +) + +absl_cc_library( + NAME + log_internal_append_truncated + SRCS + HDRS + "${DIR}/internal/append_truncated.h" + COPTS + ${ABSL_DEFAULT_COPTS} + LINKOPTS + ${ABSL_DEFAULT_LINKOPTS} + DEPS + absl::config + absl::strings + absl::span +) + +# Public targets +absl_cc_library( + NAME + absl_check + SRCS + HDRS + "${DIR}/absl_check.h" + COPTS + ${ABSL_DEFAULT_COPTS} + LINKOPTS + ${ABSL_DEFAULT_LINKOPTS} + DEPS + absl::log_internal_check_impl + PUBLIC +) + +absl_cc_library( + NAME + absl_log + SRCS + HDRS + "${DIR}/absl_log.h" + COPTS + ${ABSL_DEFAULT_COPTS} + LINKOPTS + ${ABSL_DEFAULT_LINKOPTS} + DEPS + absl::log_internal_log_impl + PUBLIC +) + +absl_cc_library( + NAME + check + SRCS + HDRS + "${DIR}/check.h" + COPTS + ${ABSL_DEFAULT_COPTS} + LINKOPTS + ${ABSL_DEFAULT_LINKOPTS} + DEPS + absl::log_internal_check_impl + absl::core_headers + absl::log_internal_check_op + absl::log_internal_conditions + absl::log_internal_message + absl::log_internal_strip + PUBLIC +) + +absl_cc_library( + NAME + die_if_null + SRCS + "${DIR}/die_if_null.cc" + HDRS + "${DIR}/die_if_null.h" + COPTS + ${ABSL_DEFAULT_COPTS} + LINKOPTS + ${ABSL_DEFAULT_LINKOPTS} + DEPS + absl::config + absl::core_headers + absl::log + absl::strings + PUBLIC +) + +absl_cc_library( + NAME + log_flags + SRCS + "${DIR}/flags.cc" + HDRS + "${DIR}/flags.h" + COPTS + ${ABSL_DEFAULT_COPTS} + LINKOPTS + ${ABSL_DEFAULT_LINKOPTS} + DEPS + absl::config + absl::core_headers + absl::log_globals + absl::log_severity + absl::log_internal_config + absl::log_internal_flags + absl::flags + absl::flags_marshalling + absl::strings + PUBLIC +) + +absl_cc_library( + NAME + log_globals + SRCS + "${DIR}/globals.cc" + HDRS + "${DIR}/globals.h" + COPTS + ${ABSL_DEFAULT_COPTS} + LINKOPTS + ${ABSL_DEFAULT_LINKOPTS} + DEPS + absl::atomic_hook + absl::config + absl::core_headers + absl::hash + absl::log_severity + absl::raw_logging_internal + absl::strings +) + +absl_cc_library( + NAME + log_initialize + SRCS + "${DIR}/initialize.cc" + HDRS + "${DIR}/initialize.h" + COPTS + ${ABSL_DEFAULT_COPTS} + LINKOPTS + ${ABSL_DEFAULT_LINKOPTS} + DEPS + absl::config + absl::log_globals + absl::log_internal_globals + absl::time + PUBLIC +) + +absl_cc_library( + NAME + log + SRCS + HDRS + "${DIR}/log.h" + COPTS + ${ABSL_DEFAULT_COPTS} + LINKOPTS + ${ABSL_DEFAULT_LINKOPTS} + DEPS + absl::log_internal_log_impl + PUBLIC +) + +absl_cc_library( + NAME + log_entry + SRCS + "${DIR}/log_entry.cc" + HDRS + "${DIR}/log_entry.h" + COPTS + ${ABSL_DEFAULT_COPTS} + LINKOPTS + ${ABSL_DEFAULT_LINKOPTS} + DEPS + absl::config + absl::core_headers + absl::log_internal_config + absl::log_severity + absl::span + absl::strings + absl::time + PUBLIC +) + +absl_cc_library( + NAME + log_sink + SRCS + "${DIR}/log_sink.cc" + HDRS + "${DIR}/log_sink.h" + COPTS + ${ABSL_DEFAULT_COPTS} + LINKOPTS + ${ABSL_DEFAULT_LINKOPTS} + DEPS + absl::config + absl::log_entry + PUBLIC +) + +absl_cc_library( + NAME + log_sink_registry + SRCS + HDRS + "${DIR}/log_sink_registry.h" + COPTS + ${ABSL_DEFAULT_COPTS} + LINKOPTS + ${ABSL_DEFAULT_LINKOPTS} + DEPS + absl::config + absl::log_sink + absl::log_internal_log_sink_set + PUBLIC +) + +absl_cc_library( + NAME + log_streamer + SRCS + HDRS + "${DIR}/log_streamer.h" + COPTS + ${ABSL_DEFAULT_COPTS} + LINKOPTS + ${ABSL_DEFAULT_LINKOPTS} + DEPS + absl::config + absl::absl_log + absl::log_severity + absl::optional + absl::strings + absl::strings_internal + absl::utility + PUBLIC +) + +absl_cc_library( + NAME + log_internal_structured + HDRS + "${DIR}/internal/structured.h" + COPTS + ${ABSL_DEFAULT_COPTS} + LINKOPTS + ${ABSL_DEFAULT_LINKOPTS} + DEPS + absl::config + absl::log_internal_message + absl::strings +) + +absl_cc_library( + NAME + log_structured + HDRS + "${DIR}/structured.h" + COPTS + ${ABSL_DEFAULT_COPTS} + LINKOPTS + ${ABSL_DEFAULT_LINKOPTS} + DEPS + absl::config + absl::log_internal_structured + absl::strings + PUBLIC +) + +absl_cc_library( + NAME + log_internal_fnmatch + SRCS + "${DIR}/internal/fnmatch.cc" + HDRS + "${DIR}/internal/fnmatch.h" + COPTS + ${ABSL_DEFAULT_COPTS} + LINKOPTS + ${ABSL_DEFAULT_LINKOPTS} + DEPS + absl::config + absl::strings +) + +set(DIR ${ABSL_ROOT_DIR}/absl/memory) + +absl_cc_library( + NAME + memory + HDRS + "${DIR}/memory.h" + COPTS + ${ABSL_DEFAULT_COPTS} + DEPS + absl::core_headers + absl::meta + PUBLIC +) + +set(DIR ${ABSL_ROOT_DIR}/absl/meta) + +absl_cc_library( + NAME + type_traits + HDRS + "${DIR}/type_traits.h" + COPTS + ${ABSL_DEFAULT_COPTS} + DEPS + absl::config + absl::core_headers + PUBLIC +) + +# component target +absl_cc_library( + NAME + meta + COPTS + ${ABSL_DEFAULT_COPTS} + DEPS + absl::type_traits + PUBLIC +) + +set(DIR ${ABSL_ROOT_DIR}/absl/numeric) + +absl_cc_library( + NAME + bits + HDRS + "${DIR}/bits.h" + "${DIR}/internal/bits.h" + COPTS + ${ABSL_DEFAULT_COPTS} + DEPS + absl::core_headers + PUBLIC +) + +absl_cc_library( + NAME + int128 + HDRS + "${DIR}/int128.h" + SRCS + "${DIR}/int128.cc" + "${DIR}/int128_have_intrinsic.inc" + "${DIR}/int128_no_intrinsic.inc" + COPTS + ${ABSL_DEFAULT_COPTS} + DEPS + absl::config + absl::core_headers + absl::bits + PUBLIC +) + +# component target +absl_cc_library( + NAME + numeric + COPTS + ${ABSL_DEFAULT_COPTS} + DEPS + absl::int128 + PUBLIC +) + +absl_cc_library( + NAME + numeric_representation + HDRS + "${DIR}/internal/representation.h" + COPTS + ${ABSL_DEFAULT_COPTS} + DEPS + absl::config + PUBLIC +) + +absl_cc_library( + NAME + sample_recorder + HDRS + "${DIR}/internal/sample_recorder.h" + COPTS + ${ABSL_DEFAULT_COPTS} + DEPS + absl::base + absl::synchronization +) + +set(DIR ${ABSL_ROOT_DIR}/absl/profiling) + +absl_cc_library( + NAME + exponential_biased + SRCS + "${DIR}/internal/exponential_biased.cc" + HDRS + "${DIR}/internal/exponential_biased.h" + COPTS + ${ABSL_DEFAULT_COPTS} + DEPS + absl::config + absl::core_headers +) + +absl_cc_library( + NAME + periodic_sampler + SRCS + "${DIR}/internal/periodic_sampler.cc" + HDRS + "${DIR}/internal/periodic_sampler.h" + COPTS + ${ABSL_DEFAULT_COPTS} + DEPS + absl::core_headers + absl::exponential_biased +) + +set(DIR ${ABSL_ROOT_DIR}/absl/random) + +absl_cc_library( + NAME + random_random + HDRS + "${DIR}/random.h" + COPTS + ${ABSL_DEFAULT_COPTS} + LINKOPTS + ${ABSL_DEFAULT_LINKOPTS} + DEPS + absl::random_distributions + absl::random_internal_nonsecure_base + absl::random_internal_pcg_engine + absl::random_internal_pool_urbg + absl::random_internal_randen_engine + absl::random_seed_sequences +) + +absl_cc_library( + NAME + random_bit_gen_ref + HDRS + "${DIR}/bit_gen_ref.h" + COPTS + ${ABSL_DEFAULT_COPTS} + LINKOPTS + ${ABSL_DEFAULT_LINKOPTS} + DEPS + absl::core_headers + absl::random_internal_distribution_caller + absl::random_internal_fast_uniform_bits + absl::type_traits +) + +# Internal-only target, do not depend on directly. +absl_cc_library( + NAME + random_internal_mock_helpers + HDRS + "${DIR}/internal/mock_helpers.h" + COPTS + ${ABSL_DEFAULT_COPTS} + LINKOPTS + ${ABSL_DEFAULT_LINKOPTS} + DEPS + absl::fast_type_id + absl::optional +) + +absl_cc_library( + NAME + random_distributions + SRCS + "${DIR}/discrete_distribution.cc" + "${DIR}/gaussian_distribution.cc" + HDRS + "${DIR}/bernoulli_distribution.h" + "${DIR}/beta_distribution.h" + "${DIR}/discrete_distribution.h" + "${DIR}/distributions.h" + "${DIR}/exponential_distribution.h" + "${DIR}/gaussian_distribution.h" + "${DIR}/log_uniform_int_distribution.h" + "${DIR}/poisson_distribution.h" + "${DIR}/uniform_int_distribution.h" + "${DIR}/uniform_real_distribution.h" + "${DIR}/zipf_distribution.h" + COPTS + ${ABSL_DEFAULT_COPTS} + LINKOPTS + ${ABSL_DEFAULT_LINKOPTS} + DEPS + absl::base_internal + absl::config + absl::core_headers + absl::random_internal_generate_real + absl::random_internal_distribution_caller + absl::random_internal_fast_uniform_bits + absl::random_internal_fastmath + absl::random_internal_iostream_state_saver + absl::random_internal_traits + absl::random_internal_uniform_helper + absl::random_internal_wide_multiply + absl::strings + absl::type_traits +) + +absl_cc_library( + NAME + random_seed_gen_exception + SRCS + "${DIR}/seed_gen_exception.cc" + HDRS + "${DIR}/seed_gen_exception.h" + COPTS + ${ABSL_DEFAULT_COPTS} + LINKOPTS + ${ABSL_DEFAULT_LINKOPTS} + DEPS + absl::config +) + +absl_cc_library( + NAME + random_seed_sequences + SRCS + "${DIR}/seed_sequences.cc" + HDRS + "${DIR}/seed_sequences.h" + COPTS + ${ABSL_DEFAULT_COPTS} + LINKOPTS + ${ABSL_DEFAULT_LINKOPTS} + DEPS + absl::config + absl::inlined_vector + absl::random_internal_pool_urbg + absl::random_internal_salted_seed_seq + absl::random_internal_seed_material + absl::random_seed_gen_exception + absl::span +) + +# Internal-only target, do not depend on directly. +absl_cc_library( + NAME + random_internal_traits + HDRS + "${DIR}/internal/traits.h" + COPTS + ${ABSL_DEFAULT_COPTS} + LINKOPTS + ${ABSL_DEFAULT_LINKOPTS} + DEPS + absl::config +) + +# Internal-only target, do not depend on directly. +absl_cc_library( + NAME + random_internal_distribution_caller + HDRS + "${DIR}/internal/distribution_caller.h" + COPTS + ${ABSL_DEFAULT_COPTS} + LINKOPTS + ${ABSL_DEFAULT_LINKOPTS} + DEPS + absl::config + absl::utility + absl::fast_type_id +) + +# Internal-only target, do not depend on directly. +absl_cc_library( + NAME + random_internal_fast_uniform_bits + HDRS + "${DIR}/internal/fast_uniform_bits.h" + COPTS + ${ABSL_DEFAULT_COPTS} + LINKOPTS + ${ABSL_DEFAULT_LINKOPTS} + DEPS + absl::config +) + +# Internal-only target, do not depend on directly. +absl_cc_library( + NAME + random_internal_seed_material + SRCS + "${DIR}/internal/seed_material.cc" + HDRS + "${DIR}/internal/seed_material.h" + COPTS + ${ABSL_DEFAULT_COPTS} + LINKOPTS + ${ABSL_DEFAULT_LINKOPTS} + DEPS + absl::core_headers + absl::optional + absl::random_internal_fast_uniform_bits + absl::raw_logging_internal + absl::span + absl::strings +) + +# Internal-only target, do not depend on directly. +absl_cc_library( + NAME + random_internal_pool_urbg + SRCS + "${DIR}/internal/pool_urbg.cc" + HDRS + "${DIR}/internal/pool_urbg.h" + COPTS + ${ABSL_DEFAULT_COPTS} + LINKOPTS + ${ABSL_DEFAULT_LINKOPTS} + DEPS + absl::base + absl::config + absl::core_headers + absl::endian + absl::random_internal_randen + absl::random_internal_seed_material + absl::random_internal_traits + absl::random_seed_gen_exception + absl::raw_logging_internal + absl::span +) + +# Internal-only target, do not depend on directly. +absl_cc_library( + NAME + random_internal_salted_seed_seq + HDRS + "${DIR}/internal/salted_seed_seq.h" + COPTS + ${ABSL_DEFAULT_COPTS} + LINKOPTS + ${ABSL_DEFAULT_LINKOPTS} + DEPS + absl::inlined_vector + absl::optional + absl::span + absl::random_internal_seed_material + absl::type_traits +) + +# Internal-only target, do not depend on directly. +absl_cc_library( + NAME + random_internal_iostream_state_saver + HDRS + "${DIR}/internal/iostream_state_saver.h" + COPTS + ${ABSL_DEFAULT_COPTS} + LINKOPTS + ${ABSL_DEFAULT_LINKOPTS} + DEPS + absl::int128 + absl::type_traits +) + +# Internal-only target, do not depend on directly. +absl_cc_library( + NAME + random_internal_generate_real + HDRS + "${DIR}/internal/generate_real.h" + COPTS + ${ABSL_DEFAULT_COPTS} + LINKOPTS + ${ABSL_DEFAULT_LINKOPTS} + DEPS + absl::bits + absl::random_internal_fastmath + absl::random_internal_traits + absl::type_traits +) + +# Internal-only target, do not depend on directly. +absl_cc_library( + NAME + random_internal_wide_multiply + HDRS + "${DIR}/internal/wide_multiply.h" + COPTS + ${ABSL_DEFAULT_COPTS} + LINKOPTS + ${ABSL_DEFAULT_LINKOPTS} + DEPS + absl::bits + absl::config + absl::int128 +) + +# Internal-only target, do not depend on directly. +absl_cc_library( + NAME + random_internal_fastmath + HDRS + "${DIR}/internal/fastmath.h" + COPTS + ${ABSL_DEFAULT_COPTS} + LINKOPTS + ${ABSL_DEFAULT_LINKOPTS} + DEPS + absl::bits +) + +# Internal-only target, do not depend on directly. +absl_cc_library( + NAME + random_internal_nonsecure_base + HDRS + "${DIR}/internal/nonsecure_base.h" + COPTS + ${ABSL_DEFAULT_COPTS} + LINKOPTS + ${ABSL_DEFAULT_LINKOPTS} + DEPS + absl::core_headers + absl::inlined_vector + absl::random_internal_pool_urbg + absl::random_internal_salted_seed_seq + absl::random_internal_seed_material + absl::span + absl::type_traits +) + +# Internal-only target, do not depend on directly. +absl_cc_library( + NAME + random_internal_pcg_engine + HDRS + "${DIR}/internal/pcg_engine.h" + COPTS + ${ABSL_DEFAULT_COPTS} + LINKOPTS + ${ABSL_DEFAULT_LINKOPTS} + DEPS + absl::config + absl::int128 + absl::random_internal_fastmath + absl::random_internal_iostream_state_saver + absl::type_traits +) + +# Internal-only target, do not depend on directly. +absl_cc_library( + NAME + random_internal_randen_engine + HDRS + "${DIR}/internal/randen_engine.h" + COPTS + ${ABSL_DEFAULT_COPTS} + LINKOPTS + ${ABSL_DEFAULT_LINKOPTS} + DEPS + absl::endian + absl::random_internal_iostream_state_saver + absl::random_internal_randen + absl::raw_logging_internal + absl::type_traits +) + +# Internal-only target, do not depend on directly. +absl_cc_library( + NAME + random_internal_platform + HDRS + "${DIR}/internal/randen_traits.h" + "${DIR}/internal/platform.h" + SRCS + "${DIR}/internal/randen_round_keys.cc" + COPTS + ${ABSL_DEFAULT_COPTS} + LINKOPTS + ${ABSL_DEFAULT_LINKOPTS} + DEPS + absl::config +) + +# Internal-only target, do not depend on directly. +absl_cc_library( + NAME + random_internal_randen + SRCS + "${DIR}/internal/randen.cc" + HDRS + "${DIR}/internal/randen.h" + COPTS + ${ABSL_DEFAULT_COPTS} + LINKOPTS + ${ABSL_DEFAULT_LINKOPTS} + DEPS + absl::random_internal_platform + absl::random_internal_randen_hwaes + absl::random_internal_randen_slow +) + +# Internal-only target, do not depend on directly. +absl_cc_library( + NAME + random_internal_randen_slow + SRCS + "${DIR}/internal/randen_slow.cc" + HDRS + "${DIR}/internal/randen_slow.h" + COPTS + ${ABSL_DEFAULT_COPTS} + LINKOPTS + ${ABSL_DEFAULT_LINKOPTS} + DEPS + absl::random_internal_platform + absl::config +) + +# Internal-only target, do not depend on directly. +absl_cc_library( + NAME + random_internal_randen_hwaes + SRCS + "${DIR}/internal/randen_detect.cc" + HDRS + "${DIR}/internal/randen_detect.h" + "${DIR}/internal/randen_hwaes.h" + COPTS + ${ABSL_DEFAULT_COPTS} + ${ABSL_RANDOM_RANDEN_COPTS} + LINKOPTS + ${ABSL_DEFAULT_LINKOPTS} + DEPS + absl::random_internal_platform + absl::random_internal_randen_hwaes_impl + absl::config +) + +# Internal-only target, do not depend on directly. +absl_cc_library( + NAME + random_internal_randen_hwaes_impl + SRCS + "${DIR}/internal/randen_hwaes.cc" + "${DIR}/internal/randen_hwaes.h" + COPTS + ${ABSL_DEFAULT_COPTS} + ${ABSL_RANDOM_RANDEN_COPTS} + LINKOPTS + ${ABSL_DEFAULT_LINKOPTS} + DEPS + absl::random_internal_platform + absl::config +) + +# Internal-only target, do not depend on directly. +absl_cc_library( + NAME + random_internal_uniform_helper + HDRS + "${DIR}/internal/uniform_helper.h" + COPTS + ${ABSL_DEFAULT_COPTS} + LINKOPTS + ${ABSL_DEFAULT_LINKOPTS} + DEPS + absl::config + absl::random_internal_traits + absl::type_traits +) + +set(DIR ${ABSL_ROOT_DIR}/absl/status) + +absl_cc_library( + NAME + status + HDRS + "${DIR}/status.h" + SRCS + "${DIR}/internal/status_internal.h" + "${DIR}/internal/status_internal.cc" + "${DIR}/status.cc" + "${DIR}/status_payload_printer.h" + "${DIR}/status_payload_printer.cc" + COPTS + ${ABSL_DEFAULT_COPTS} + DEFINES + "$<$:_LINUX_SOURCE_COMPAT>" + DEPS + absl::atomic_hook + absl::config + absl::cord + absl::core_headers + absl::function_ref + absl::inlined_vector + absl::memory + absl::optional + absl::raw_logging_internal + absl::span + absl::stacktrace + absl::strerror + absl::str_format + absl::strings + absl::symbolize + PUBLIC +) + +absl_cc_library( + NAME + statusor + HDRS + "${DIR}/statusor.h" + SRCS + "${DIR}/statusor.cc" + "${DIR}/internal/statusor_internal.h" + COPTS + ${ABSL_DEFAULT_COPTS} + DEPS + absl::base + absl::config + absl::core_headers + absl::raw_logging_internal + absl::status + absl::strings + absl::type_traits + absl::utility + absl::variant + PUBLIC +) + +set(DIR ${ABSL_ROOT_DIR}/absl/strings) + +absl_cc_library( + NAME + string_view + HDRS + "${DIR}/string_view.h" + SRCS + "${DIR}/string_view.cc" + COPTS + ${ABSL_DEFAULT_COPTS} + DEPS + absl::base + absl::config + absl::core_headers + absl::throw_delegate + PUBLIC +) + +absl_cc_library( + NAME + strings + HDRS + "${DIR}/ascii.h" + "${DIR}/charconv.h" + "${DIR}/escaping.h" + "${DIR}/has_absl_stringify.h" + "${DIR}/internal/damerau_levenshtein_distance.h" + "${DIR}/internal/string_constant.h" + "${DIR}/match.h" + "${DIR}/numbers.h" + "${DIR}/str_cat.h" + "${DIR}/str_join.h" + "${DIR}/str_replace.h" + "${DIR}/str_split.h" + "${DIR}/strip.h" + "${DIR}/substitute.h" + SRCS + "${DIR}/ascii.cc" + "${DIR}/charconv.cc" + "${DIR}/escaping.cc" + "${DIR}/internal/charconv_bigint.cc" + "${DIR}/internal/charconv_bigint.h" + "${DIR}/internal/charconv_parse.cc" + "${DIR}/internal/charconv_parse.h" + "${DIR}/internal/damerau_levenshtein_distance.cc" + "${DIR}/internal/memutil.cc" + "${DIR}/internal/memutil.h" + "${DIR}/internal/stringify_sink.h" + "${DIR}/internal/stringify_sink.cc" + "${DIR}/internal/stl_type_traits.h" + "${DIR}/internal/str_join_internal.h" + "${DIR}/internal/str_split_internal.h" + "${DIR}/match.cc" + "${DIR}/numbers.cc" + "${DIR}/str_cat.cc" + "${DIR}/str_replace.cc" + "${DIR}/str_split.cc" + "${DIR}/substitute.cc" + COPTS + ${ABSL_DEFAULT_COPTS} + DEPS + absl::string_view + absl::strings_internal + absl::base + absl::bits + absl::charset + absl::config + absl::core_headers + absl::endian + absl::int128 + absl::memory + absl::raw_logging_internal + absl::throw_delegate + absl::type_traits + PUBLIC +) + +absl_cc_library( + NAME + charset + HDRS + charset.h + COPTS + ${ABSL_DEFAULT_COPTS} + DEPS + absl::core_headers + absl::string_view + PUBLIC +) + +# Internal-only target, do not depend on directly. +absl_cc_library( + NAME + strings_internal + HDRS + "${DIR}/internal/escaping.cc" + "${DIR}/internal/escaping.h" + "${DIR}/internal/ostringstream.h" + "${DIR}/internal/resize_uninitialized.h" + "${DIR}/internal/utf8.h" + SRCS + "${DIR}/internal/ostringstream.cc" + "${DIR}/internal/utf8.cc" + COPTS + ${ABSL_DEFAULT_COPTS} + DEPS + absl::config + absl::core_headers + absl::endian + absl::raw_logging_internal + absl::type_traits +) + +absl_cc_library( + NAME + str_format + HDRS + "${DIR}/str_format.h" + COPTS + ${ABSL_DEFAULT_COPTS} + DEPS + absl::str_format_internal + PUBLIC +) + +# Internal-only target, do not depend on directly. +absl_cc_library( + NAME + str_format_internal + HDRS + "${DIR}/internal/str_format/arg.h" + "${DIR}/internal/str_format/bind.h" + "${DIR}/internal/str_format/checker.h" + "${DIR}/internal/str_format/constexpr_parser.h" + "${DIR}/internal/str_format/extension.h" + "${DIR}/internal/str_format/float_conversion.h" + "${DIR}/internal/str_format/output.h" + "${DIR}/internal/str_format/parser.h" + SRCS + "${DIR}/internal/str_format/arg.cc" + "${DIR}/internal/str_format/bind.cc" + "${DIR}/internal/str_format/extension.cc" + "${DIR}/internal/str_format/float_conversion.cc" + "${DIR}/internal/str_format/output.cc" + "${DIR}/internal/str_format/parser.cc" + COPTS + ${ABSL_DEFAULT_COPTS} + DEPS + absl::bits + absl::strings + absl::config + absl::core_headers + absl::inlined_vector + absl::numeric_representation + absl::type_traits + absl::utility + absl::int128 + absl::span +) + +# Internal-only target, do not depend on directly. +absl_cc_library( + NAME + cord_internal + HDRS + "${DIR}/internal/cord_data_edge.h" + "${DIR}/internal/cord_internal.h" + "${DIR}/internal/cord_rep_btree.h" + "${DIR}/internal/cord_rep_btree_navigator.h" + "${DIR}/internal/cord_rep_btree_reader.h" + "${DIR}/internal/cord_rep_crc.h" + "${DIR}/internal/cord_rep_consume.h" + "${DIR}/internal/cord_rep_flat.h" + SRCS + "${DIR}/internal/cord_internal.cc" + "${DIR}/internal/cord_rep_btree.cc" + "${DIR}/internal/cord_rep_btree_navigator.cc" + "${DIR}/internal/cord_rep_btree_reader.cc" + "${DIR}/internal/cord_rep_crc.cc" + "${DIR}/internal/cord_rep_consume.cc" + COPTS + ${ABSL_DEFAULT_COPTS} + DEPS + absl::base_internal + absl::compressed_tuple + absl::config + absl::container_memory + absl::core_headers + absl::crc_cord_state + absl::endian + absl::inlined_vector + absl::layout + absl::raw_logging_internal + absl::strings + absl::throw_delegate + absl::type_traits +) + +# Internal-only target, do not depend on directly. +absl_cc_library( + NAME + cordz_update_tracker + HDRS + "${DIR}/internal/cordz_update_tracker.h" + COPTS + ${ABSL_DEFAULT_COPTS} + DEPS + absl::config +) + +# Internal-only target, do not depend on directly. +absl_cc_library( + NAME + cordz_functions + HDRS + "${DIR}/internal/cordz_functions.h" + SRCS + "${DIR}/internal/cordz_functions.cc" + COPTS + ${ABSL_DEFAULT_COPTS} + DEPS + absl::config + absl::core_headers + absl::exponential_biased + absl::raw_logging_internal +) + +# Internal-only target, do not depend on directly. +absl_cc_library( + NAME + cordz_statistics + HDRS + "${DIR}/internal/cordz_statistics.h" + COPTS + ${ABSL_DEFAULT_COPTS} + DEPS + absl::config + absl::core_headers + absl::cordz_update_tracker + absl::synchronization +) + +# Internal-only target, do not depend on directly. +absl_cc_library( + NAME + cordz_handle + HDRS + "${DIR}/internal/cordz_handle.h" + SRCS + "${DIR}/internal/cordz_handle.cc" + COPTS + ${ABSL_DEFAULT_COPTS} + DEPS + absl::base + absl::config + absl::raw_logging_internal + absl::synchronization +) + +# Internal-only target, do not depend on directly. +absl_cc_library( + NAME + cordz_info + HDRS + "${DIR}/internal/cordz_info.h" + SRCS + "${DIR}/internal/cordz_info.cc" + COPTS + ${ABSL_DEFAULT_COPTS} + DEPS + absl::base + absl::config + absl::cord_internal + absl::cordz_functions + absl::cordz_handle + absl::cordz_statistics + absl::cordz_update_tracker + absl::core_headers + absl::inlined_vector + absl::span + absl::raw_logging_internal + absl::stacktrace + absl::synchronization + absl::time +) + +# Internal-only target, do not depend on directly. +absl_cc_library( + NAME + cordz_sample_token + HDRS + "${DIR}/internal/cordz_sample_token.h" + SRCS + "${DIR}/internal/cordz_sample_token.cc" + COPTS + ${ABSL_DEFAULT_COPTS} + DEPS + absl::config + absl::cordz_handle + absl::cordz_info +) + +# Internal-only target, do not depend on directly. +absl_cc_library( + NAME + cordz_update_scope + HDRS + "${DIR}/internal/cordz_update_scope.h" + COPTS + ${ABSL_DEFAULT_COPTS} + DEPS + absl::config + absl::cord_internal + absl::cordz_info + absl::cordz_update_tracker + absl::core_headers +) + +absl_cc_library( + NAME + cord + HDRS + "${DIR}/cord.h" + "${DIR}/cord_buffer.h" + SRCS + "${DIR}/cord.cc" + "${DIR}/cord_analysis.cc" + "${DIR}/cord_analysis.h" + "${DIR}/cord_buffer.cc" + COPTS + ${ABSL_DEFAULT_COPTS} + DEPS + absl::base + absl::config + absl::cord_internal + absl::cordz_functions + absl::cordz_info + absl::cordz_update_scope + absl::cordz_update_tracker + absl::core_headers + absl::crc32c + absl::crc_cord_state + absl::endian + absl::function_ref + absl::inlined_vector + absl::optional + absl::raw_logging_internal + absl::span + absl::strings + absl::type_traits + PUBLIC +) + +set(DIR ${ABSL_ROOT_DIR}/absl/synchronization) + +# Internal-only target, do not depend on directly. +absl_cc_library( + NAME + graphcycles_internal + HDRS + "${DIR}/internal/graphcycles.h" + SRCS + "${DIR}/internal/graphcycles.cc" + COPTS + ${ABSL_DEFAULT_COPTS} + DEPS + absl::base + absl::base_internal + absl::config + absl::core_headers + absl::malloc_internal + absl::raw_logging_internal +) + +# Internal-only target, do not depend on directly. +absl_cc_library( + NAME + kernel_timeout_internal + HDRS + "${DIR}/internal/kernel_timeout.h" + SRCS + "${DIR}/internal/kernel_timeout.cc" + COPTS + ${ABSL_DEFAULT_COPTS} + DEPS + absl::base + absl::config + absl::core_headers + absl::raw_logging_internal + absl::time +) + +absl_cc_library( + NAME + synchronization + HDRS + "${DIR}/barrier.h" + "${DIR}/blocking_counter.h" + "${DIR}/internal/create_thread_identity.h" + "${DIR}/internal/futex.h" + "${DIR}/internal/futex_waiter.h" + "${DIR}/internal/per_thread_sem.h" + "${DIR}/internal/pthread_waiter.h" + "${DIR}/internal/sem_waiter.h" + "${DIR}/internal/stdcpp_waiter.h" + "${DIR}/internal/waiter.h" + "${DIR}/internal/waiter_base.h" + "${DIR}/internal/win32_waiter.h" + "${DIR}/mutex.h" + "${DIR}/notification.h" + SRCS + "${DIR}/barrier.cc" + "${DIR}/blocking_counter.cc" + "${DIR}/internal/create_thread_identity.cc" + "${DIR}/internal/futex_waiter.cc" + "${DIR}/internal/per_thread_sem.cc" + "${DIR}/internal/pthread_waiter.cc" + "${DIR}/internal/sem_waiter.cc" + "${DIR}/internal/stdcpp_waiter.cc" + "${DIR}/internal/waiter_base.cc" + "${DIR}/internal/win32_waiter.cc" + "${DIR}/notification.cc" + "${DIR}/mutex.cc" + COPTS + ${ABSL_DEFAULT_COPTS} + DEPS + absl::graphcycles_internal + absl::kernel_timeout_internal + absl::atomic_hook + absl::base + absl::base_internal + absl::config + absl::core_headers + absl::dynamic_annotations + absl::malloc_internal + absl::raw_logging_internal + absl::stacktrace + absl::symbolize + absl::time + Threads::Threads + PUBLIC +) + +set(DIR ${ABSL_ROOT_DIR}/absl/time) + +absl_cc_library( + NAME + time + HDRS + "${DIR}/civil_time.h" + "${DIR}/clock.h" + "${DIR}/time.h" + SRCS + "${DIR}/civil_time.cc" + "${DIR}/clock.cc" + "${DIR}/duration.cc" + "${DIR}/format.cc" + "${DIR}/internal/get_current_time_chrono.inc" + "${DIR}/internal/get_current_time_posix.inc" + "${DIR}/time.cc" + COPTS + ${ABSL_DEFAULT_COPTS} + DEPS + absl::base + absl::civil_time + absl::core_headers + absl::int128 + absl::raw_logging_internal + absl::strings + absl::time_zone + PUBLIC +) + +absl_cc_library( + NAME + civil_time + HDRS + "${DIR}/internal/cctz/include/cctz/civil_time.h" + "${DIR}/internal/cctz/include/cctz/civil_time_detail.h" + SRCS + "${DIR}/internal/cctz/src/civil_time_detail.cc" + COPTS + ${ABSL_DEFAULT_COPTS} +) + +absl_cc_library( + NAME + time_zone + HDRS + "${DIR}/internal/cctz/include/cctz/time_zone.h" + "${DIR}/internal/cctz/include/cctz/zone_info_source.h" + SRCS + "${DIR}/internal/cctz/src/time_zone_fixed.cc" + "${DIR}/internal/cctz/src/time_zone_fixed.h" + "${DIR}/internal/cctz/src/time_zone_format.cc" + "${DIR}/internal/cctz/src/time_zone_if.cc" + "${DIR}/internal/cctz/src/time_zone_if.h" + "${DIR}/internal/cctz/src/time_zone_impl.cc" + "${DIR}/internal/cctz/src/time_zone_impl.h" + "${DIR}/internal/cctz/src/time_zone_info.cc" + "${DIR}/internal/cctz/src/time_zone_info.h" + "${DIR}/internal/cctz/src/time_zone_libc.cc" + "${DIR}/internal/cctz/src/time_zone_libc.h" + "${DIR}/internal/cctz/src/time_zone_lookup.cc" + "${DIR}/internal/cctz/src/time_zone_posix.cc" + "${DIR}/internal/cctz/src/time_zone_posix.h" + "${DIR}/internal/cctz/src/tzfile.h" + "${DIR}/internal/cctz/src/zone_info_source.cc" + COPTS + ${ABSL_DEFAULT_COPTS} + DEPS + Threads::Threads + $<$:-Wl,-framework,CoreFoundation> +) + +set(DIR ${ABSL_ROOT_DIR}/absl/types) + +absl_cc_library( + NAME + any + HDRS + "${DIR}/any.h" + COPTS + ${ABSL_DEFAULT_COPTS} + DEPS + absl::bad_any_cast + absl::config + absl::core_headers + absl::fast_type_id + absl::type_traits + absl::utility + PUBLIC +) + +absl_cc_library( + NAME + bad_any_cast + HDRS + "${DIR}/bad_any_cast.h" + COPTS + ${ABSL_DEFAULT_COPTS} + DEPS + absl::bad_any_cast_impl + absl::config + PUBLIC +) + +# Internal-only target, do not depend on directly. +absl_cc_library( + NAME + bad_any_cast_impl + SRCS + "${DIR}/bad_any_cast.h" + "${DIR}/bad_any_cast.cc" + COPTS + ${ABSL_DEFAULT_COPTS} + DEPS + absl::config + absl::raw_logging_internal +) + +absl_cc_library( + NAME + span + HDRS + "${DIR}/span.h" + SRCS + "${DIR}/internal/span.h" + COPTS + ${ABSL_DEFAULT_COPTS} + DEPS + absl::algorithm + absl::core_headers + absl::throw_delegate + absl::type_traits + PUBLIC +) + +absl_cc_library( + NAME + optional + HDRS + "${DIR}/optional.h" + SRCS + "${DIR}/internal/optional.h" + COPTS + ${ABSL_DEFAULT_COPTS} + DEPS + absl::bad_optional_access + absl::base_internal + absl::config + absl::core_headers + absl::memory + absl::type_traits + absl::utility + PUBLIC +) + +absl_cc_library( + NAME + bad_optional_access + HDRS + "${DIR}/bad_optional_access.h" + SRCS + "${DIR}/bad_optional_access.cc" + COPTS + ${ABSL_DEFAULT_COPTS} + DEPS + absl::config + absl::raw_logging_internal + PUBLIC +) + +absl_cc_library( + NAME + bad_variant_access + HDRS + "${DIR}/bad_variant_access.h" + SRCS + "${DIR}/bad_variant_access.cc" + COPTS + ${ABSL_DEFAULT_COPTS} + DEPS + absl::config + absl::raw_logging_internal + PUBLIC +) + +absl_cc_library( + NAME + variant + HDRS + "${DIR}/variant.h" + SRCS + "${DIR}/internal/variant.h" + COPTS + ${ABSL_DEFAULT_COPTS} + DEPS + absl::bad_variant_access + absl::base_internal + absl::config + absl::core_headers + absl::type_traits + absl::utility + PUBLIC +) + +absl_cc_library( + NAME + compare + HDRS + "${DIR}/compare.h" + COPTS + ${ABSL_DEFAULT_COPTS} + DEPS + absl::core_headers + absl::type_traits + PUBLIC +) + +set(DIR ${ABSL_ROOT_DIR}/absl/utility) + +absl_cc_library( + NAME + utility + HDRS + "${DIR}/utility.h" + COPTS + ${ABSL_DEFAULT_COPTS} + DEPS + absl::base_internal + absl::config + absl::type_traits + PUBLIC +) + +absl_cc_library( + NAME + if_constexpr + HDRS + "${DIR}/internal/if_constexpr.h" + COPTS + ${ABSL_DEFAULT_COPTS} + DEPS + absl::config + PUBLIC +) -set(ABSL_PROPAGATE_CXX_STD ON) -add_subdirectory("${ABSL_ROOT_DIR}" "${ClickHouse_BINARY_DIR}/contrib/abseil-cpp") add_library(_abseil_swiss_tables INTERFACE) - -target_link_libraries(_abseil_swiss_tables INTERFACE - absl::flat_hash_map - absl::flat_hash_set -) - -get_target_property(FLAT_HASH_MAP_INCLUDE_DIR absl::flat_hash_map INTERFACE_INCLUDE_DIRECTORIES) -target_include_directories (_abseil_swiss_tables SYSTEM BEFORE INTERFACE ${FLAT_HASH_MAP_INCLUDE_DIR}) - -get_target_property(FLAT_HASH_SET_INCLUDE_DIR absl::flat_hash_set INTERFACE_INCLUDE_DIRECTORIES) -target_include_directories (_abseil_swiss_tables SYSTEM BEFORE INTERFACE ${FLAT_HASH_SET_INCLUDE_DIR}) - +target_include_directories (_abseil_swiss_tables SYSTEM BEFORE INTERFACE ${ABSL_ROOT_DIR}) add_library(ch_contrib::abseil_swiss_tables ALIAS _abseil_swiss_tables) - -set(ABSL_FORMAT_SRC - ${ABSL_ROOT_DIR}/absl/strings/internal/str_format/arg.cc - ${ABSL_ROOT_DIR}/absl/strings/internal/str_format/bind.cc - ${ABSL_ROOT_DIR}/absl/strings/internal/str_format/extension.cc - ${ABSL_ROOT_DIR}/absl/strings/internal/str_format/float_conversion.cc - ${ABSL_ROOT_DIR}/absl/strings/internal/str_format/output.cc - ${ABSL_ROOT_DIR}/absl/strings/internal/str_format/parser.cc -) - -add_library(_abseil_str_format ${ABSL_FORMAT_SRC}) -target_include_directories(_abseil_str_format PUBLIC ${ABSL_ROOT_DIR}) - -add_library(ch_contrib::abseil_str_format ALIAS _abseil_str_format) diff --git a/contrib/arrow b/contrib/arrow index 1d93838f69a..ba5c67934e8 160000 --- a/contrib/arrow +++ b/contrib/arrow @@ -1 +1 @@ -Subproject commit 1d93838f69a802639ca144ea5704a98e2481810d +Subproject commit ba5c67934e8274d649befcffab56731632dc5253 diff --git a/contrib/arrow-cmake/CMakeLists.txt b/contrib/arrow-cmake/CMakeLists.txt index 02e809c560f..96d1f4adda7 100644 --- a/contrib/arrow-cmake/CMakeLists.txt +++ b/contrib/arrow-cmake/CMakeLists.txt @@ -77,16 +77,16 @@ set(FLATBUFFERS_SRC_DIR "${ClickHouse_SOURCE_DIR}/contrib/flatbuffers") set(FLATBUFFERS_BINARY_DIR "${ClickHouse_BINARY_DIR}/contrib/flatbuffers") set(FLATBUFFERS_INCLUDE_DIR "${FLATBUFFERS_SRC_DIR}/include") -# set flatbuffers CMake options -set(FLATBUFFERS_BUILD_FLATLIB ON CACHE BOOL "Enable the build of the flatbuffers library") -set(FLATBUFFERS_BUILD_SHAREDLIB OFF CACHE BOOL "Disable the build of the flatbuffers shared library") -set(FLATBUFFERS_BUILD_TESTS OFF CACHE BOOL "Skip flatbuffers tests") +set(FLATBUFFERS_SRCS + ${FLATBUFFERS_SRC_DIR}/src/idl_parser.cpp + ${FLATBUFFERS_SRC_DIR}/src/idl_gen_text.cpp + ${FLATBUFFERS_SRC_DIR}/src/reflection.cpp + ${FLATBUFFERS_SRC_DIR}/src/util.cpp) -add_subdirectory(${FLATBUFFERS_SRC_DIR} "${FLATBUFFERS_BINARY_DIR}") +add_library(_flatbuffers STATIC ${FLATBUFFERS_SRCS}) +target_include_directories(_flatbuffers PUBLIC ${FLATBUFFERS_INCLUDE_DIR}) +target_compile_definitions(_flatbuffers PRIVATE -DFLATBUFFERS_LOCALE_INDEPENDENT=0) -add_library(_flatbuffers INTERFACE) -target_link_libraries(_flatbuffers INTERFACE flatbuffers) -target_include_directories(_flatbuffers INTERFACE ${FLATBUFFERS_INCLUDE_DIR}) # === hdfs # NOTE: cannot use ch_contrib::hdfs since it's INCLUDE_DIRECTORIES does not includes trailing "hdfs/" @@ -109,7 +109,6 @@ set (ORC_CXX_HAS_CSTDINT 1) set (ORC_CXX_HAS_THREAD_LOCAL 1) include(orc_check.cmake) -configure_file("${ORC_INCLUDE_DIR}/orc/orc-config.hh.in" "${ORC_BUILD_INCLUDE_DIR}/orc/orc-config.hh") configure_file("${ORC_SOURCE_SRC_DIR}/Adaptor.hh.in" "${ORC_BUILD_INCLUDE_DIR}/Adaptor.hh") @@ -128,7 +127,6 @@ set(ORC_SRCS "${ORC_SOURCE_SRC_DIR}/BpackingDefault.hh" "${ORC_SOURCE_SRC_DIR}/ByteRLE.cc" "${ORC_SOURCE_SRC_DIR}/ByteRLE.hh" - "${ORC_SOURCE_SRC_DIR}/CMakeLists.txt" "${ORC_SOURCE_SRC_DIR}/ColumnPrinter.cc" "${ORC_SOURCE_SRC_DIR}/ColumnReader.cc" "${ORC_SOURCE_SRC_DIR}/ColumnReader.hh" @@ -198,7 +196,9 @@ target_link_libraries(_orc PRIVATE ch_contrib::snappy ch_contrib::zlib ch_contrib::zstd) -target_include_directories(_orc SYSTEM BEFORE PUBLIC ${ORC_INCLUDE_DIR}) +target_include_directories(_orc SYSTEM BEFORE PUBLIC + ${ORC_INCLUDE_DIR} + "${ClickHouse_SOURCE_DIR}/contrib/arrow-cmake/cpp/src/orc/c++/include") target_include_directories(_orc SYSTEM BEFORE PUBLIC ${ORC_BUILD_INCLUDE_DIR}) target_include_directories(_orc SYSTEM PRIVATE ${ORC_SOURCE_SRC_DIR} @@ -212,8 +212,6 @@ target_include_directories(_orc SYSTEM PRIVATE set(LIBRARY_DIR "${ClickHouse_SOURCE_DIR}/contrib/arrow/cpp/src/arrow") -configure_file("${LIBRARY_DIR}/util/config.h.cmake" "${CMAKE_CURRENT_BINARY_DIR}/cpp/src/arrow/util/config.h") - # arrow/cpp/src/arrow/CMakeLists.txt (ARROW_SRCS + ARROW_COMPUTE + ARROW_IPC) set(ARROW_SRCS "${LIBRARY_DIR}/array/array_base.cc" @@ -230,6 +228,8 @@ set(ARROW_SRCS "${LIBRARY_DIR}/array/builder_nested.cc" "${LIBRARY_DIR}/array/builder_primitive.cc" "${LIBRARY_DIR}/array/builder_union.cc" + "${LIBRARY_DIR}/array/builder_run_end.cc" + "${LIBRARY_DIR}/array/array_run_end.cc" "${LIBRARY_DIR}/array/concatenate.cc" "${LIBRARY_DIR}/array/data.cc" "${LIBRARY_DIR}/array/diff.cc" @@ -309,9 +309,12 @@ set(ARROW_SRCS "${LIBRARY_DIR}/util/debug.cc" "${LIBRARY_DIR}/util/tracing.cc" "${LIBRARY_DIR}/util/atfork_internal.cc" + "${LIBRARY_DIR}/util/crc32.cc" + "${LIBRARY_DIR}/util/hashing.cc" + "${LIBRARY_DIR}/util/ree_util.cc" + "${LIBRARY_DIR}/util/union_util.cc" "${LIBRARY_DIR}/vendored/base64.cpp" "${LIBRARY_DIR}/vendored/datetime/tz.cpp" - "${LIBRARY_DIR}/vendored/musl/strptime.c" "${LIBRARY_DIR}/vendored/uriparser/UriCommon.c" "${LIBRARY_DIR}/vendored/uriparser/UriCompare.c" @@ -328,39 +331,20 @@ set(ARROW_SRCS "${LIBRARY_DIR}/vendored/uriparser/UriRecompose.c" "${LIBRARY_DIR}/vendored/uriparser/UriResolve.c" "${LIBRARY_DIR}/vendored/uriparser/UriShorten.c" + "${LIBRARY_DIR}/vendored/double-conversion/bignum.cc" + "${LIBRARY_DIR}/vendored/double-conversion/bignum-dtoa.cc" + "${LIBRARY_DIR}/vendored/double-conversion/cached-powers.cc" + "${LIBRARY_DIR}/vendored/double-conversion/double-to-string.cc" + "${LIBRARY_DIR}/vendored/double-conversion/fast-dtoa.cc" + "${LIBRARY_DIR}/vendored/double-conversion/fixed-dtoa.cc" + "${LIBRARY_DIR}/vendored/double-conversion/string-to-double.cc" + "${LIBRARY_DIR}/vendored/double-conversion/strtod.cc" "${LIBRARY_DIR}/compute/api_aggregate.cc" "${LIBRARY_DIR}/compute/api_scalar.cc" "${LIBRARY_DIR}/compute/api_vector.cc" "${LIBRARY_DIR}/compute/cast.cc" "${LIBRARY_DIR}/compute/exec.cc" - "${LIBRARY_DIR}/compute/exec/accumulation_queue.cc" - "${LIBRARY_DIR}/compute/exec/accumulation_queue.h" - "${LIBRARY_DIR}/compute/exec/aggregate.cc" - "${LIBRARY_DIR}/compute/exec/aggregate_node.cc" - "${LIBRARY_DIR}/compute/exec/asof_join_node.cc" - "${LIBRARY_DIR}/compute/exec/bloom_filter.cc" - "${LIBRARY_DIR}/compute/exec/exec_plan.cc" - "${LIBRARY_DIR}/compute/exec/expression.cc" - "${LIBRARY_DIR}/compute/exec/filter_node.cc" - "${LIBRARY_DIR}/compute/exec/hash_join.cc" - "${LIBRARY_DIR}/compute/exec/hash_join_dict.cc" - "${LIBRARY_DIR}/compute/exec/hash_join_node.cc" - "${LIBRARY_DIR}/compute/exec/key_hash.cc" - "${LIBRARY_DIR}/compute/exec/key_map.cc" - "${LIBRARY_DIR}/compute/exec/map_node.cc" - "${LIBRARY_DIR}/compute/exec/options.cc" - "${LIBRARY_DIR}/compute/exec/order_by_impl.cc" - "${LIBRARY_DIR}/compute/exec/partition_util.cc" - "${LIBRARY_DIR}/compute/exec/project_node.cc" - "${LIBRARY_DIR}/compute/exec/query_context.cc" - "${LIBRARY_DIR}/compute/exec/sink_node.cc" - "${LIBRARY_DIR}/compute/exec/source_node.cc" - "${LIBRARY_DIR}/compute/exec/swiss_join.cc" - "${LIBRARY_DIR}/compute/exec/task_util.cc" - "${LIBRARY_DIR}/compute/exec/tpch_node.cc" - "${LIBRARY_DIR}/compute/exec/union_node.cc" - "${LIBRARY_DIR}/compute/exec/util.cc" "${LIBRARY_DIR}/compute/function.cc" "${LIBRARY_DIR}/compute/function_internal.cc" "${LIBRARY_DIR}/compute/kernel.cc" @@ -403,8 +387,13 @@ set(ARROW_SRCS "${LIBRARY_DIR}/compute/kernels/vector_select_k.cc" "${LIBRARY_DIR}/compute/kernels/vector_selection.cc" "${LIBRARY_DIR}/compute/kernels/vector_sort.cc" + "${LIBRARY_DIR}/compute/kernels/vector_selection_internal.cc" + "${LIBRARY_DIR}/compute/kernels/vector_selection_filter_internal.cc" + "${LIBRARY_DIR}/compute/kernels/vector_selection_take_internal.cc" "${LIBRARY_DIR}/compute/light_array.cc" "${LIBRARY_DIR}/compute/registry.cc" + "${LIBRARY_DIR}/compute/expression.cc" + "${LIBRARY_DIR}/compute/ordering.cc" "${LIBRARY_DIR}/compute/row/compare_internal.cc" "${LIBRARY_DIR}/compute/row/encode_internal.cc" "${LIBRARY_DIR}/compute/row/grouper.cc" @@ -459,7 +448,7 @@ target_link_libraries(_arrow PUBLIC _orc) add_dependencies(_arrow protoc) target_include_directories(_arrow SYSTEM BEFORE PUBLIC ${ARROW_SRC_DIR}) -target_include_directories(_arrow SYSTEM BEFORE PUBLIC "${CMAKE_CURRENT_BINARY_DIR}/cpp/src") +target_include_directories(_arrow SYSTEM BEFORE PUBLIC "${ClickHouse_SOURCE_DIR}/contrib/arrow-cmake/cpp/src") target_include_directories(_arrow SYSTEM PRIVATE ${ARROW_SRC_DIR}) target_include_directories(_arrow SYSTEM PRIVATE ${HDFS_INCLUDE_DIR}) @@ -488,10 +477,10 @@ set(PARQUET_SRCS "${LIBRARY_DIR}/exception.cc" "${LIBRARY_DIR}/file_reader.cc" "${LIBRARY_DIR}/file_writer.cc" + "${LIBRARY_DIR}/page_index.cc" "${LIBRARY_DIR}/level_conversion.cc" "${LIBRARY_DIR}/level_comparison.cc" "${LIBRARY_DIR}/metadata.cc" - "${LIBRARY_DIR}/murmur3.cc" "${LIBRARY_DIR}/platform.cc" "${LIBRARY_DIR}/printer.cc" "${LIBRARY_DIR}/properties.cc" @@ -500,6 +489,8 @@ set(PARQUET_SRCS "${LIBRARY_DIR}/stream_reader.cc" "${LIBRARY_DIR}/stream_writer.cc" "${LIBRARY_DIR}/types.cc" + "${LIBRARY_DIR}/bloom_filter_reader.cc" + "${LIBRARY_DIR}/xxhasher.cc" "${GEN_LIBRARY_DIR}/parquet_constants.cpp" "${GEN_LIBRARY_DIR}/parquet_types.cpp" diff --git a/contrib/arrow-cmake/build/cmake/config.h.in b/contrib/arrow-cmake/build/cmake/config.h.in deleted file mode 120000 index eb28c214748..00000000000 --- a/contrib/arrow-cmake/build/cmake/config.h.in +++ /dev/null @@ -1 +0,0 @@ -../../../thrift/build/cmake/config.h.in \ No newline at end of file diff --git a/contrib/arrow-cmake/cpp/src/arrow/util/config.h b/contrib/arrow-cmake/cpp/src/arrow/util/config.h new file mode 100644 index 00000000000..cacff7b16cb --- /dev/null +++ b/contrib/arrow-cmake/cpp/src/arrow/util/config.h @@ -0,0 +1,61 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +#define ARROW_VERSION_MAJOR 11 +#define ARROW_VERSION_MINOR 0 +#define ARROW_VERSION_PATCH 0 +#define ARROW_VERSION ((ARROW_VERSION_MAJOR * 1000) + ARROW_VERSION_MINOR) * 1000 + ARROW_VERSION_PATCH + +#define ARROW_VERSION_STRING "11.0.0" + +#define ARROW_SO_VERSION "1100" +#define ARROW_FULL_SO_VERSION "1100.0.0" + +#define ARROW_CXX_COMPILER_ID "Clang" +#define ARROW_CXX_COMPILER_VERSION "ClickHouse" +#define ARROW_CXX_COMPILER_FLAGS "" + +#define ARROW_BUILD_TYPE "" + +#define ARROW_GIT_ID "" +#define ARROW_GIT_DESCRIPTION "" + +#define ARROW_PACKAGE_KIND "" + +/* #undef ARROW_COMPUTE */ +/* #undef ARROW_CSV */ +/* #undef ARROW_CUDA */ +/* #undef ARROW_DATASET */ +/* #undef ARROW_FILESYSTEM */ +/* #undef ARROW_FLIGHT */ +/* #undef ARROW_FLIGHT_SQL */ +/* #undef ARROW_IPC */ +/* #undef ARROW_JEMALLOC */ +/* #undef ARROW_JEMALLOC_VENDORED */ +/* #undef ARROW_JSON */ +/* #undef ARROW_ORC */ +/* #undef ARROW_PARQUET */ +/* #undef ARROW_SUBSTRAIT */ + +/* #undef ARROW_GCS */ +/* #undef ARROW_S3 */ +/* #undef ARROW_USE_NATIVE_INT128 */ +/* #undef ARROW_WITH_MUSL */ +/* #undef ARROW_WITH_OPENTELEMETRY */ +/* #undef ARROW_WITH_UCX */ + +/* #undef GRPCPP_PP_INCLUDE */ diff --git a/contrib/arrow-cmake/cpp/src/orc/c++/include/orc/orc-config.hh b/contrib/arrow-cmake/cpp/src/orc/c++/include/orc/orc-config.hh new file mode 100644 index 00000000000..1b0f71ddd40 --- /dev/null +++ b/contrib/arrow-cmake/cpp/src/orc/c++/include/orc/orc-config.hh @@ -0,0 +1,38 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ORC_CONFIG_HH +#define ORC_CONFIG_HH + +#define ORC_VERSION "" + +#define ORC_CXX_HAS_CSTDINT + +#ifdef ORC_CXX_HAS_CSTDINT + #include +#else + #include +#endif + +// Following MACROS should be keeped for backward compatibility. +#define ORC_NOEXCEPT noexcept +#define ORC_NULLPTR nullptr +#define ORC_OVERRIDE override +#define ORC_UNIQUE_PTR std::unique_ptr + +#endif diff --git a/contrib/aws-cmake/AwsFeatureTests.cmake b/contrib/aws-cmake/AwsFeatureTests.cmake index 54727e08d6b..e58b6634f42 100644 --- a/contrib/aws-cmake/AwsFeatureTests.cmake +++ b/contrib/aws-cmake/AwsFeatureTests.cmake @@ -1,114 +1,13 @@ # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0. -include(CheckCSourceRuns) - option(USE_CPU_EXTENSIONS "Whenever possible, use functions optimized for CPUs with specific extensions (ex: SSE, AVX)." ON) -# In the current (11/2/21) state of mingw64, the packaged gcc is not capable of emitting properly aligned avx2 instructions under certain circumstances. -# This leads to crashes for windows builds using mingw64 when invoking the avx2-enabled versions of certain functions. Until we can find a better -# work-around, disable avx2 (and all other extensions) in mingw builds. -# -# https://gcc.gnu.org/bugzilla/show_bug.cgi?id=54412 -# -if (MINGW) - message(STATUS "MINGW detected! Disabling avx2 and other CPU extensions") - set(USE_CPU_EXTENSIONS OFF) -endif() +if (ARCH_AMD64) + set (AWS_ARCH_INTEL 1) +elseif (ARCH_AARCH64) + set (AWS_ARCH_ARM64 1) +endif () -if(NOT CMAKE_CROSSCOMPILING) - check_c_source_runs(" - #include - bool foo(int a, int b, int *c) { - return __builtin_mul_overflow(a, b, c); - } - - int main() { - int out; - if (foo(1, 2, &out)) { - return 0; - } - - return 0; - }" AWS_HAVE_GCC_OVERFLOW_MATH_EXTENSIONS) - - if (USE_CPU_EXTENSIONS) - check_c_source_runs(" - int main() { - int foo = 42; - _mulx_u32(1, 2, &foo); - return foo != 2; - }" AWS_HAVE_MSVC_MULX) - endif() - -endif() - -check_c_source_compiles(" - #include - #if WINAPI_FAMILY_PARTITION(WINAPI_PARTITION_DESKTOP) - int main() { - return 0; - } - #else - it's not windows desktop - #endif -" AWS_HAVE_WINAPI_DESKTOP) - -check_c_source_compiles(" - int main() { -#if !(defined(__x86_64__) || defined(__i386__) || defined(_M_X64) || defined(_M_IX86)) -# error \"not intel\" -#endif - return 0; - } -" AWS_ARCH_INTEL) - -check_c_source_compiles(" - int main() { -#if !(defined(__aarch64__) || defined(_M_ARM64)) -# error \"not arm64\" -#endif - return 0; - } -" AWS_ARCH_ARM64) - -check_c_source_compiles(" - int main() { -#if !(defined(__arm__) || defined(_M_ARM)) -# error \"not arm\" -#endif - return 0; - } -" AWS_ARCH_ARM32) - -check_c_source_compiles(" -int main() { - int foo = 42, bar = 24; - __asm__ __volatile__(\"\":\"=r\"(foo):\"r\"(bar):\"memory\"); -}" AWS_HAVE_GCC_INLINE_ASM) - -check_c_source_compiles(" -#include -int main() { -#ifdef __linux__ - getauxval(AT_HWCAP); - getauxval(AT_HWCAP2); -#endif - return 0; -}" AWS_HAVE_AUXV) - -string(REGEX MATCH "^(aarch64|arm)" ARM_CPU "${CMAKE_SYSTEM_PROCESSOR}") -if(NOT LEGACY_COMPILER_SUPPORT OR ARM_CPU) - check_c_source_compiles(" - #include - int main() { - backtrace(NULL, 0); - return 0; - }" AWS_HAVE_EXECINFO) -endif() - -check_c_source_compiles(" -#include -int main() { - return 1; -}" AWS_HAVE_LINUX_IF_LINK_H) +set (AWS_HAVE_GCC_INLINE_ASM 1) +set (AWS_HAVE_AUXV 1) diff --git a/contrib/aws-cmake/AwsSIMD.cmake b/contrib/aws-cmake/AwsSIMD.cmake index bd6f4064e78..24f7628e86f 100644 --- a/contrib/aws-cmake/AwsSIMD.cmake +++ b/contrib/aws-cmake/AwsSIMD.cmake @@ -1,54 +1,13 @@ # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0. -include(CheckCCompilerFlag) -include(CheckIncludeFile) - if (USE_CPU_EXTENSIONS) - if (MSVC) - check_c_compiler_flag("/arch:AVX2" HAVE_M_AVX2_FLAG) - if (HAVE_M_AVX2_FLAG) - set(AVX2_CFLAGS "/arch:AVX2") - endif() - else() - check_c_compiler_flag(-mavx2 HAVE_M_AVX2_FLAG) - if (HAVE_M_AVX2_FLAG) - set(AVX2_CFLAGS "-mavx -mavx2") - endif() + if (ENABLE_AVX2) + set (AVX2_CFLAGS "-mavx -mavx2") + set (HAVE_AVX2_INTRINSICS 1) + set (HAVE_MM256_EXTRACT_EPI64 1) endif() - - - cmake_push_check_state() - set(CMAKE_REQUIRED_FLAGS "${CMAKE_REQUIRED_FLAGS} ${AVX2_CFLAGS}") - - check_c_source_compiles(" - #include - #include - #include - - int main() { - __m256i vec; - memset(&vec, 0, sizeof(vec)); - - _mm256_shuffle_epi8(vec, vec); - _mm256_set_epi32(1,2,3,4,5,6,7,8); - _mm256_permutevar8x32_epi32(vec, vec); - - return 0; - }" HAVE_AVX2_INTRINSICS) - - check_c_source_compiles(" - #include - #include - - int main() { - __m256i vec; - memset(&vec, 0, sizeof(vec)); - return (int)_mm256_extract_epi64(vec, 2); - }" HAVE_MM256_EXTRACT_EPI64) - - cmake_pop_check_state() -endif() # USE_CPU_EXTENSIONS +endif() macro(simd_add_definition_if target definition) if(${definition}) diff --git a/contrib/aws-cmake/AwsThreadAffinity.cmake b/contrib/aws-cmake/AwsThreadAffinity.cmake index 9e53481272c..7f30fb71b43 100644 --- a/contrib/aws-cmake/AwsThreadAffinity.cmake +++ b/contrib/aws-cmake/AwsThreadAffinity.cmake @@ -1,50 +1,9 @@ # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0. -include(CheckSymbolExists) - # Check if the platform supports setting thread affinity # (important for hitting full NIC entitlement on NUMA architectures) function(aws_set_thread_affinity_method target) - - # Non-POSIX, Android, and Apple platforms do not support thread affinity. - if (NOT UNIX OR ANDROID OR APPLE) - target_compile_definitions(${target} PRIVATE - -DAWS_AFFINITY_METHOD=AWS_AFFINITY_METHOD_NONE) - return() - endif() - - cmake_push_check_state() - list(APPEND CMAKE_REQUIRED_DEFINITIONS -D_GNU_SOURCE) - list(APPEND CMAKE_REQUIRED_LIBRARIES pthread) - - set(headers "pthread.h") - # BSDs put nonportable pthread declarations in a separate header. - if(CMAKE_SYSTEM_NAME MATCHES BSD) - set(headers "${headers};pthread_np.h") - endif() - - # Using pthread attrs is the preferred method, but is glibc-specific. - check_symbol_exists(pthread_attr_setaffinity_np "${headers}" USE_PTHREAD_ATTR_SETAFFINITY) - if (USE_PTHREAD_ATTR_SETAFFINITY) - target_compile_definitions(${target} PRIVATE - -DAWS_AFFINITY_METHOD=AWS_AFFINITY_METHOD_PTHREAD_ATTR) - return() - endif() - - # This method is still nonportable, but is supported by musl and BSDs. - check_symbol_exists(pthread_setaffinity_np "${headers}" USE_PTHREAD_SETAFFINITY) - if (USE_PTHREAD_SETAFFINITY) - target_compile_definitions(${target} PRIVATE - -DAWS_AFFINITY_METHOD=AWS_AFFINITY_METHOD_PTHREAD) - return() - endif() - - # If we got here, we expected thread affinity support but didn't find it. - # We still build with degraded NUMA performance, but show a warning. - message(WARNING "No supported method for setting thread affinity") - target_compile_definitions(${target} PRIVATE - -DAWS_AFFINITY_METHOD=AWS_AFFINITY_METHOD_NONE) - - cmake_pop_check_state() + # This code has been cut, because I don't care about it. + target_compile_definitions(${target} PRIVATE -DAWS_AFFINITY_METHOD=AWS_AFFINITY_METHOD_NONE) endfunction() diff --git a/contrib/aws-cmake/AwsThreadName.cmake b/contrib/aws-cmake/AwsThreadName.cmake index a67416b4f83..e17759435ed 100644 --- a/contrib/aws-cmake/AwsThreadName.cmake +++ b/contrib/aws-cmake/AwsThreadName.cmake @@ -1,61 +1,13 @@ # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0. -include(CheckSymbolExists) - # Check how the platform supports setting thread name function(aws_set_thread_name_method target) - - if (WINDOWS) - # On Windows we do a runtime check, instead of compile-time check - return() - elseif (APPLE) + if (APPLE) # All Apple platforms we support have the same function, so no need for compile-time check. return() endif() - cmake_push_check_state() - list(APPEND CMAKE_REQUIRED_DEFINITIONS -D_GNU_SOURCE) - list(APPEND CMAKE_REQUIRED_LIBRARIES pthread) - - # The start of the test program - set(c_source_start " - #define _GNU_SOURCE - #include - - #if defined(__FreeBSD__) || defined(__NETBSD__) - #include - #endif - - int main() { - pthread_t thread_id; - ") - - # The end of the test program - set(c_source_end "}") - # pthread_setname_np() usually takes 2 args - check_c_source_compiles(" - ${c_source_start} - pthread_setname_np(thread_id, \"asdf\"); - ${c_source_end}" - PTHREAD_SETNAME_TAKES_2ARGS) - if (PTHREAD_SETNAME_TAKES_2ARGS) - target_compile_definitions(${target} PRIVATE -DAWS_PTHREAD_SETNAME_TAKES_2ARGS) - return() - endif() - - # But on NetBSD it takes 3! - check_c_source_compiles(" - ${c_source_start} - pthread_setname_np(thread_id, \"asdf\", NULL); - ${c_source_end} - " PTHREAD_SETNAME_TAKES_3ARGS) - if (PTHREAD_SETNAME_TAKES_3ARGS) - target_compile_definitions(${target} PRIVATE -DAWS_PTHREAD_SETNAME_TAKES_3ARGS) - return() - endif() - - # And on many older/weirder platforms it's just not supported - cmake_pop_check_state() + target_compile_definitions(${target} PRIVATE -DAWS_PTHREAD_SETNAME_TAKES_2ARGS) endfunction() diff --git a/contrib/azure-cmake/CMakeLists.txt b/contrib/azure-cmake/CMakeLists.txt index 7aba81259d3..bb44c993e79 100644 --- a/contrib/azure-cmake/CMakeLists.txt +++ b/contrib/azure-cmake/CMakeLists.txt @@ -48,9 +48,8 @@ set(AZURE_SDK_INCLUDES "${AZURE_SDK_LIBRARY_DIR}/storage/azure-storage-blobs/inc/" ) -include("${AZURE_DIR}/cmake-modules/AzureTransportAdapters.cmake") - add_library(_azure_sdk ${AZURE_SDK_UNIFIED_SRC}) +target_compile_definitions(_azure_sdk PRIVATE BUILD_CURL_HTTP_TRANSPORT_ADAPTER) # Originally, on Windows azure-core is built with bcrypt and crypt32 by default if (TARGET OpenSSL::SSL) diff --git a/contrib/cassandra-cmake/CMakeLists.txt b/contrib/cassandra-cmake/CMakeLists.txt index 32611e0e151..0082364c130 100644 --- a/contrib/cassandra-cmake/CMakeLists.txt +++ b/contrib/cassandra-cmake/CMakeLists.txt @@ -68,8 +68,7 @@ list(APPEND INCLUDE_DIRS ${CASS_SRC_DIR}/third_party/hdr_histogram ${CASS_SRC_DIR}/third_party/http-parser ${CASS_SRC_DIR}/third_party/mt19937_64 - ${CASS_SRC_DIR}/third_party/rapidjson/rapidjson - ${CASS_SRC_DIR}/third_party/sparsehash/src) + ${CASS_SRC_DIR}/third_party/rapidjson/rapidjson) list(APPEND INCLUDE_DIRS ${CASS_INCLUDE_DIR} ${CASS_SRC_DIR}) @@ -83,10 +82,6 @@ set(HAVE_MEMCPY 1) set(HAVE_LONG_LONG 1) set(HAVE_UINT16_T 1) -configure_file("${CASS_SRC_DIR}/third_party/sparsehash/config.h.cmake" "${CMAKE_CURRENT_BINARY_DIR}/sparsehash/internal/sparseconfig.h") - - - # Determine random availability if (OS_LINUX) #set (HAVE_GETRANDOM 1) - not on every Linux kernel @@ -116,17 +111,17 @@ configure_file( ${CASS_ROOT_DIR}/driver_config.hpp.in ${CMAKE_CURRENT_BINARY_DIR}/driver_config.hpp) - add_library(_cassandra ${SOURCES} $ $ $) -target_link_libraries(_cassandra ch_contrib::zlib ch_contrib::minizip) +target_link_libraries(_cassandra ch_contrib::zlib ch_contrib::minizip ch_contrib::sparsehash) target_include_directories(_cassandra PRIVATE ${CMAKE_CURRENT_BINARY_DIR} ${INCLUDE_DIRS}) target_include_directories(_cassandra SYSTEM BEFORE PUBLIC ${CASS_INCLUDE_DIR}) target_compile_definitions(_cassandra PRIVATE CASS_BUILDING) +target_compile_definitions(_cassandra PRIVATE -DSPARSEHASH_HASH=std::hash -Dsparsehash=google) target_link_libraries(_cassandra ch_contrib::uv) diff --git a/contrib/fastops-cmake/CMakeLists.txt b/contrib/fastops-cmake/CMakeLists.txt index e9aa4803583..1b09b736b2a 100644 --- a/contrib/fastops-cmake/CMakeLists.txt +++ b/contrib/fastops-cmake/CMakeLists.txt @@ -13,12 +13,10 @@ set(LIBRARY_DIR "${ClickHouse_SOURCE_DIR}/contrib/fastops") set(SRCS "") -if(HAVE_AVX) +if(ARCH_AMD64) set (SRCS ${SRCS} "${LIBRARY_DIR}/fastops/avx/ops_avx.cpp") set_source_files_properties("${LIBRARY_DIR}/fastops/avx/ops_avx.cpp" PROPERTIES COMPILE_FLAGS "-mavx -DNO_AVX2") -endif() -if(HAVE_AVX2) set (SRCS ${SRCS} "${LIBRARY_DIR}/fastops/avx2/ops_avx2.cpp") set_source_files_properties("${LIBRARY_DIR}/fastops/avx2/ops_avx2.cpp" PROPERTIES COMPILE_FLAGS "-mavx2 -mfma") endif() diff --git a/contrib/google-protobuf b/contrib/google-protobuf index 2a4fa1a4e95..0862007f6ca 160000 --- a/contrib/google-protobuf +++ b/contrib/google-protobuf @@ -1 +1 @@ -Subproject commit 2a4fa1a4e95012d754ac55d43c8bc462dd1c78a8 +Subproject commit 0862007f6ca1f5723c58f10f0ca34f3f25a63b2e diff --git a/contrib/google-protobuf-cmake/CMakeLists.txt b/contrib/google-protobuf-cmake/CMakeLists.txt index 268f0fbe0e4..dda6dfe85e4 100644 --- a/contrib/google-protobuf-cmake/CMakeLists.txt +++ b/contrib/google-protobuf-cmake/CMakeLists.txt @@ -20,7 +20,6 @@ endif() set(protobuf_source_dir "${ClickHouse_SOURCE_DIR}/contrib/google-protobuf") set(protobuf_binary_dir "${ClickHouse_BINARY_DIR}/contrib/google-protobuf") - add_definitions(-DGOOGLE_PROTOBUF_CMAKE_BUILD) add_definitions(-DHAVE_PTHREAD) @@ -30,17 +29,69 @@ include_directories( ${protobuf_binary_dir} ${protobuf_source_dir}/src) +add_library(utf8_range + ${protobuf_source_dir}/third_party/utf8_range/naive.c + ${protobuf_source_dir}/third_party/utf8_range/range2-neon.c + ${protobuf_source_dir}/third_party/utf8_range/range2-sse.c +) +include_directories(${protobuf_source_dir}/third_party/utf8_range) + +add_library(utf8_validity + ${protobuf_source_dir}/third_party/utf8_range/utf8_validity.cc +) +target_link_libraries(utf8_validity PUBLIC absl::strings) + +set(protobuf_absl_used_targets + absl::absl_check + absl::absl_log + absl::algorithm + absl::base + absl::bind_front + absl::bits + absl::btree + absl::cleanup + absl::cord + absl::core_headers + absl::debugging + absl::die_if_null + absl::dynamic_annotations + absl::flags + absl::flat_hash_map + absl::flat_hash_set + absl::function_ref + absl::hash + absl::layout + absl::log_initialize + absl::log_severity + absl::memory + absl::node_hash_map + absl::node_hash_set + absl::optional + absl::span + absl::status + absl::statusor + absl::strings + absl::synchronization + absl::time + absl::type_traits + absl::utility + absl::variant +) + set(libprotobuf_lite_files ${protobuf_source_dir}/src/google/protobuf/any_lite.cc ${protobuf_source_dir}/src/google/protobuf/arena.cc + ${protobuf_source_dir}/src/google/protobuf/arena_align.cc ${protobuf_source_dir}/src/google/protobuf/arenastring.cc + ${protobuf_source_dir}/src/google/protobuf/arenaz_sampler.cc ${protobuf_source_dir}/src/google/protobuf/extension_set.cc ${protobuf_source_dir}/src/google/protobuf/generated_enum_util.cc + ${protobuf_source_dir}/src/google/protobuf/generated_message_tctable_lite.cc ${protobuf_source_dir}/src/google/protobuf/generated_message_util.cc ${protobuf_source_dir}/src/google/protobuf/implicit_weak_message.cc + ${protobuf_source_dir}/src/google/protobuf/inlined_string_field.cc ${protobuf_source_dir}/src/google/protobuf/io/coded_stream.cc ${protobuf_source_dir}/src/google/protobuf/io/io_win32.cc - ${protobuf_source_dir}/src/google/protobuf/io/strtod.cc ${protobuf_source_dir}/src/google/protobuf/io/zero_copy_stream.cc ${protobuf_source_dir}/src/google/protobuf/io/zero_copy_stream_impl.cc ${protobuf_source_dir}/src/google/protobuf/io/zero_copy_stream_impl_lite.cc @@ -48,21 +99,15 @@ set(libprotobuf_lite_files ${protobuf_source_dir}/src/google/protobuf/message_lite.cc ${protobuf_source_dir}/src/google/protobuf/parse_context.cc ${protobuf_source_dir}/src/google/protobuf/repeated_field.cc - ${protobuf_source_dir}/src/google/protobuf/stubs/bytestream.cc + ${protobuf_source_dir}/src/google/protobuf/repeated_ptr_field.cc ${protobuf_source_dir}/src/google/protobuf/stubs/common.cc - ${protobuf_source_dir}/src/google/protobuf/stubs/int128.cc - ${protobuf_source_dir}/src/google/protobuf/stubs/status.cc - ${protobuf_source_dir}/src/google/protobuf/stubs/statusor.cc - ${protobuf_source_dir}/src/google/protobuf/stubs/stringpiece.cc - ${protobuf_source_dir}/src/google/protobuf/stubs/stringprintf.cc - ${protobuf_source_dir}/src/google/protobuf/stubs/structurally_valid.cc - ${protobuf_source_dir}/src/google/protobuf/stubs/strutil.cc - ${protobuf_source_dir}/src/google/protobuf/stubs/time.cc ${protobuf_source_dir}/src/google/protobuf/wire_format_lite.cc ) add_library(_libprotobuf-lite ${libprotobuf_lite_files}) -target_link_libraries(_libprotobuf-lite pthread) +target_link_libraries(_libprotobuf-lite + pthread + utf8_validity) if(${CMAKE_SYSTEM_NAME} STREQUAL "Android") target_link_libraries(_libprotobuf-lite log) endif() @@ -71,67 +116,93 @@ add_library(protobuf::libprotobuf-lite ALIAS _libprotobuf-lite) set(libprotobuf_files - ${protobuf_source_dir}/src/google/protobuf/any.cc ${protobuf_source_dir}/src/google/protobuf/any.pb.cc ${protobuf_source_dir}/src/google/protobuf/api.pb.cc + ${protobuf_source_dir}/src/google/protobuf/duration.pb.cc + ${protobuf_source_dir}/src/google/protobuf/empty.pb.cc + ${protobuf_source_dir}/src/google/protobuf/field_mask.pb.cc + ${protobuf_source_dir}/src/google/protobuf/source_context.pb.cc + ${protobuf_source_dir}/src/google/protobuf/struct.pb.cc + ${protobuf_source_dir}/src/google/protobuf/timestamp.pb.cc + ${protobuf_source_dir}/src/google/protobuf/type.pb.cc + ${protobuf_source_dir}/src/google/protobuf/wrappers.pb.cc + ${protobuf_source_dir}/src/google/protobuf/any.cc + ${protobuf_source_dir}/src/google/protobuf/any_lite.cc + ${protobuf_source_dir}/src/google/protobuf/arena.cc + ${protobuf_source_dir}/src/google/protobuf/arena_align.cc + ${protobuf_source_dir}/src/google/protobuf/arenastring.cc + ${protobuf_source_dir}/src/google/protobuf/arenaz_sampler.cc ${protobuf_source_dir}/src/google/protobuf/compiler/importer.cc ${protobuf_source_dir}/src/google/protobuf/compiler/parser.cc + ${protobuf_source_dir}/src/google/protobuf/cpp_features.pb.cc ${protobuf_source_dir}/src/google/protobuf/descriptor.cc ${protobuf_source_dir}/src/google/protobuf/descriptor.pb.cc ${protobuf_source_dir}/src/google/protobuf/descriptor_database.cc - ${protobuf_source_dir}/src/google/protobuf/duration.pb.cc ${protobuf_source_dir}/src/google/protobuf/dynamic_message.cc - ${protobuf_source_dir}/src/google/protobuf/empty.pb.cc + ${protobuf_source_dir}/src/google/protobuf/extension_set.cc ${protobuf_source_dir}/src/google/protobuf/extension_set_heavy.cc - ${protobuf_source_dir}/src/google/protobuf/field_mask.pb.cc + ${protobuf_source_dir}/src/google/protobuf/feature_resolver.cc + ${protobuf_source_dir}/src/google/protobuf/generated_enum_util.cc ${protobuf_source_dir}/src/google/protobuf/generated_message_bases.cc ${protobuf_source_dir}/src/google/protobuf/generated_message_reflection.cc + ${protobuf_source_dir}/src/google/protobuf/generated_message_tctable_full.cc + ${protobuf_source_dir}/src/google/protobuf/generated_message_tctable_gen.cc + ${protobuf_source_dir}/src/google/protobuf/generated_message_tctable_lite.cc + ${protobuf_source_dir}/src/google/protobuf/generated_message_util.cc + ${protobuf_source_dir}/src/google/protobuf/implicit_weak_message.cc + ${protobuf_source_dir}/src/google/protobuf/inlined_string_field.cc + ${protobuf_source_dir}/src/google/protobuf/io/coded_stream.cc ${protobuf_source_dir}/src/google/protobuf/io/gzip_stream.cc + ${protobuf_source_dir}/src/google/protobuf/io/io_win32.cc ${protobuf_source_dir}/src/google/protobuf/io/printer.cc + ${protobuf_source_dir}/src/google/protobuf/io/strtod.cc ${protobuf_source_dir}/src/google/protobuf/io/tokenizer.cc + ${protobuf_source_dir}/src/google/protobuf/io/zero_copy_sink.cc + ${protobuf_source_dir}/src/google/protobuf/io/zero_copy_stream.cc + ${protobuf_source_dir}/src/google/protobuf/io/zero_copy_stream_impl.cc + ${protobuf_source_dir}/src/google/protobuf/io/zero_copy_stream_impl_lite.cc + ${protobuf_source_dir}/src/google/protobuf/json/internal/lexer.cc + ${protobuf_source_dir}/src/google/protobuf/json/internal/message_path.cc + ${protobuf_source_dir}/src/google/protobuf/json/internal/parser.cc + ${protobuf_source_dir}/src/google/protobuf/json/internal/unparser.cc + ${protobuf_source_dir}/src/google/protobuf/json/internal/untyped_message.cc + ${protobuf_source_dir}/src/google/protobuf/json/internal/writer.cc + ${protobuf_source_dir}/src/google/protobuf/json/internal/zero_copy_buffered_stream.cc + ${protobuf_source_dir}/src/google/protobuf/json/json.cc + ${protobuf_source_dir}/src/google/protobuf/map.cc ${protobuf_source_dir}/src/google/protobuf/map_field.cc ${protobuf_source_dir}/src/google/protobuf/message.cc + ${protobuf_source_dir}/src/google/protobuf/message_lite.cc + ${protobuf_source_dir}/src/google/protobuf/parse_context.cc + ${protobuf_source_dir}/src/google/protobuf/port.cc + ${protobuf_source_dir}/src/google/protobuf/raw_ptr.cc + ${protobuf_source_dir}/src/google/protobuf/reflection_mode.cc ${protobuf_source_dir}/src/google/protobuf/reflection_ops.cc + ${protobuf_source_dir}/src/google/protobuf/repeated_field.cc ${protobuf_source_dir}/src/google/protobuf/repeated_ptr_field.cc ${protobuf_source_dir}/src/google/protobuf/service.cc - ${protobuf_source_dir}/src/google/protobuf/source_context.pb.cc - ${protobuf_source_dir}/src/google/protobuf/struct.pb.cc - ${protobuf_source_dir}/src/google/protobuf/stubs/substitute.cc + ${protobuf_source_dir}/src/google/protobuf/stubs/common.cc ${protobuf_source_dir}/src/google/protobuf/text_format.cc - ${protobuf_source_dir}/src/google/protobuf/timestamp.pb.cc - ${protobuf_source_dir}/src/google/protobuf/type.pb.cc ${protobuf_source_dir}/src/google/protobuf/unknown_field_set.cc ${protobuf_source_dir}/src/google/protobuf/util/delimited_message_util.cc ${protobuf_source_dir}/src/google/protobuf/util/field_comparator.cc ${protobuf_source_dir}/src/google/protobuf/util/field_mask_util.cc - ${protobuf_source_dir}/src/google/protobuf/util/internal/datapiece.cc - ${protobuf_source_dir}/src/google/protobuf/util/internal/default_value_objectwriter.cc - ${protobuf_source_dir}/src/google/protobuf/util/internal/error_listener.cc - ${protobuf_source_dir}/src/google/protobuf/util/internal/field_mask_utility.cc - ${protobuf_source_dir}/src/google/protobuf/util/internal/json_escaping.cc - ${protobuf_source_dir}/src/google/protobuf/util/internal/json_objectwriter.cc - ${protobuf_source_dir}/src/google/protobuf/util/internal/json_stream_parser.cc - ${protobuf_source_dir}/src/google/protobuf/util/internal/object_writer.cc - ${protobuf_source_dir}/src/google/protobuf/util/internal/proto_writer.cc - ${protobuf_source_dir}/src/google/protobuf/util/internal/protostream_objectsource.cc - ${protobuf_source_dir}/src/google/protobuf/util/internal/protostream_objectwriter.cc - ${protobuf_source_dir}/src/google/protobuf/util/internal/type_info.cc - ${protobuf_source_dir}/src/google/protobuf/util/internal/type_info_test_helper.cc - ${protobuf_source_dir}/src/google/protobuf/util/internal/utility.cc - ${protobuf_source_dir}/src/google/protobuf/util/json_util.cc ${protobuf_source_dir}/src/google/protobuf/util/message_differencer.cc ${protobuf_source_dir}/src/google/protobuf/util/time_util.cc ${protobuf_source_dir}/src/google/protobuf/util/type_resolver_util.cc ${protobuf_source_dir}/src/google/protobuf/wire_format.cc - ${protobuf_source_dir}/src/google/protobuf/wrappers.pb.cc + ${protobuf_source_dir}/src/google/protobuf/wire_format_lite.cc ) add_library(_libprotobuf ${libprotobuf_lite_files} ${libprotobuf_files}) if (ENABLE_FUZZING) target_compile_options(_libprotobuf PRIVATE "-fsanitize-recover=all") endif() -target_link_libraries(_libprotobuf pthread) -target_link_libraries(_libprotobuf ch_contrib::zlib) +target_link_libraries(_libprotobuf + pthread + ch_contrib::zlib + utf8_validity + ${protobuf_absl_used_targets}) if(${CMAKE_SYSTEM_NAME} STREQUAL "Android") target_link_libraries(_libprotobuf log) endif() @@ -140,23 +211,26 @@ add_library(protobuf::libprotobuf ALIAS _libprotobuf) set(libprotoc_files + ${protobuf_source_dir}/src/google/protobuf/compiler/allowlists/editions.cc ${protobuf_source_dir}/src/google/protobuf/compiler/code_generator.cc ${protobuf_source_dir}/src/google/protobuf/compiler/command_line_interface.cc ${protobuf_source_dir}/src/google/protobuf/compiler/cpp/enum.cc - ${protobuf_source_dir}/src/google/protobuf/compiler/cpp/enum_field.cc ${protobuf_source_dir}/src/google/protobuf/compiler/cpp/extension.cc ${protobuf_source_dir}/src/google/protobuf/compiler/cpp/field.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/cpp/field_generators/cord_field.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/cpp/field_generators/enum_field.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/cpp/field_generators/map_field.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/cpp/field_generators/message_field.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/cpp/field_generators/primitive_field.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/cpp/field_generators/string_field.cc ${protobuf_source_dir}/src/google/protobuf/compiler/cpp/file.cc ${protobuf_source_dir}/src/google/protobuf/compiler/cpp/generator.cc ${protobuf_source_dir}/src/google/protobuf/compiler/cpp/helpers.cc - ${protobuf_source_dir}/src/google/protobuf/compiler/cpp/map_field.cc ${protobuf_source_dir}/src/google/protobuf/compiler/cpp/message.cc - ${protobuf_source_dir}/src/google/protobuf/compiler/cpp/message_field.cc ${protobuf_source_dir}/src/google/protobuf/compiler/cpp/padding_optimizer.cc ${protobuf_source_dir}/src/google/protobuf/compiler/cpp/parse_function_generator.cc - ${protobuf_source_dir}/src/google/protobuf/compiler/cpp/primitive_field.cc ${protobuf_source_dir}/src/google/protobuf/compiler/cpp/service.cc - ${protobuf_source_dir}/src/google/protobuf/compiler/cpp/string_field.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/cpp/tracker.cc ${protobuf_source_dir}/src/google/protobuf/compiler/csharp/csharp_doc_comment.cc ${protobuf_source_dir}/src/google/protobuf/compiler/csharp/csharp_enum.cc ${protobuf_source_dir}/src/google/protobuf/compiler/csharp/csharp_enum_field.cc @@ -173,6 +247,7 @@ set(libprotoc_files ${protobuf_source_dir}/src/google/protobuf/compiler/csharp/csharp_repeated_primitive_field.cc ${protobuf_source_dir}/src/google/protobuf/compiler/csharp/csharp_source_generator_base.cc ${protobuf_source_dir}/src/google/protobuf/compiler/csharp/csharp_wrapper_field.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/csharp/names.cc ${protobuf_source_dir}/src/google/protobuf/compiler/java/context.cc ${protobuf_source_dir}/src/google/protobuf/compiler/java/doc_comment.cc ${protobuf_source_dir}/src/google/protobuf/compiler/java/enum.cc @@ -195,38 +270,55 @@ set(libprotoc_files ${protobuf_source_dir}/src/google/protobuf/compiler/java/message_field.cc ${protobuf_source_dir}/src/google/protobuf/compiler/java/message_field_lite.cc ${protobuf_source_dir}/src/google/protobuf/compiler/java/message_lite.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/java/message_serialization.cc ${protobuf_source_dir}/src/google/protobuf/compiler/java/name_resolver.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/java/names.cc ${protobuf_source_dir}/src/google/protobuf/compiler/java/primitive_field.cc ${protobuf_source_dir}/src/google/protobuf/compiler/java/primitive_field_lite.cc ${protobuf_source_dir}/src/google/protobuf/compiler/java/service.cc ${protobuf_source_dir}/src/google/protobuf/compiler/java/shared_code_generator.cc ${protobuf_source_dir}/src/google/protobuf/compiler/java/string_field.cc ${protobuf_source_dir}/src/google/protobuf/compiler/java/string_field_lite.cc - ${protobuf_source_dir}/src/google/protobuf/compiler/objectivec/objectivec_enum.cc - ${protobuf_source_dir}/src/google/protobuf/compiler/objectivec/objectivec_enum_field.cc - ${protobuf_source_dir}/src/google/protobuf/compiler/objectivec/objectivec_extension.cc - ${protobuf_source_dir}/src/google/protobuf/compiler/objectivec/objectivec_field.cc - ${protobuf_source_dir}/src/google/protobuf/compiler/objectivec/objectivec_file.cc - ${protobuf_source_dir}/src/google/protobuf/compiler/objectivec/objectivec_generator.cc - ${protobuf_source_dir}/src/google/protobuf/compiler/objectivec/objectivec_helpers.cc - ${protobuf_source_dir}/src/google/protobuf/compiler/objectivec/objectivec_map_field.cc - ${protobuf_source_dir}/src/google/protobuf/compiler/objectivec/objectivec_message.cc - ${protobuf_source_dir}/src/google/protobuf/compiler/objectivec/objectivec_message_field.cc - ${protobuf_source_dir}/src/google/protobuf/compiler/objectivec/objectivec_oneof.cc - ${protobuf_source_dir}/src/google/protobuf/compiler/objectivec/objectivec_primitive_field.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/objectivec/enum.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/objectivec/enum_field.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/objectivec/extension.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/objectivec/field.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/objectivec/file.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/objectivec/generator.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/objectivec/helpers.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/objectivec/import_writer.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/objectivec/line_consumer.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/objectivec/map_field.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/objectivec/message.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/objectivec/message_field.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/objectivec/names.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/objectivec/oneof.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/objectivec/primitive_field.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/objectivec/text_format_decode_data.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/php/names.cc ${protobuf_source_dir}/src/google/protobuf/compiler/php/php_generator.cc ${protobuf_source_dir}/src/google/protobuf/compiler/plugin.cc ${protobuf_source_dir}/src/google/protobuf/compiler/plugin.pb.cc ${protobuf_source_dir}/src/google/protobuf/compiler/python/generator.cc ${protobuf_source_dir}/src/google/protobuf/compiler/python/helpers.cc ${protobuf_source_dir}/src/google/protobuf/compiler/python/pyi_generator.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/retention.cc ${protobuf_source_dir}/src/google/protobuf/compiler/ruby/ruby_generator.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/rust/accessors/accessors.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/rust/accessors/singular_bytes.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/rust/accessors/singular_scalar.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/rust/context.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/rust/generator.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/rust/message.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/rust/naming.cc ${protobuf_source_dir}/src/google/protobuf/compiler/subprocess.cc ${protobuf_source_dir}/src/google/protobuf/compiler/zip_writer.cc ) add_library(_libprotoc ${libprotoc_files}) -target_link_libraries(_libprotoc _libprotobuf) +target_link_libraries(_libprotoc + _libprotobuf + ${protobuf_absl_used_targets}) add_library(protobuf::libprotoc ALIAS _libprotoc) set(protoc_files ${protobuf_source_dir}/src/google/protobuf/compiler/main.cc) @@ -235,7 +327,11 @@ if (CMAKE_HOST_SYSTEM_NAME STREQUAL CMAKE_SYSTEM_NAME AND CMAKE_HOST_SYSTEM_PROCESSOR STREQUAL CMAKE_SYSTEM_PROCESSOR) add_executable(protoc ${protoc_files}) - target_link_libraries(protoc _libprotoc _libprotobuf pthread) + target_link_libraries(protoc _libprotoc + _libprotobuf + pthread + utf8_validity + ${protobuf_absl_used_targets}) add_executable(protobuf::protoc ALIAS protoc) if (ENABLE_FUZZING) @@ -255,6 +351,8 @@ else () # This is quite ugly but I cannot make dependencies work propery. + set(abseil_source_dir "${ClickHouse_SOURCE_DIR}/contrib/abseil-cpp") + execute_process( COMMAND mkdir -p ${PROTOC_BUILD_DIR} COMMAND_ECHO STDOUT) @@ -269,7 +367,9 @@ else () "-Dprotobuf_BUILD_CONFORMANCE=0" "-Dprotobuf_BUILD_EXAMPLES=0" "-Dprotobuf_BUILD_PROTOC_BINARIES=1" - "${protobuf_source_dir}/cmake" + "-DABSL_ROOT_DIR=${abseil_source_dir}" + "-DABSL_ENABLE_INSTALL=0" + "${protobuf_source_dir}" WORKING_DIRECTORY "${PROTOC_BUILD_DIR}" COMMAND_ECHO STDOUT) @@ -278,38 +378,6 @@ else () COMMAND_ECHO STDOUT) endif () -# add_custom_command ( -# OUTPUT ${PROTOC_BUILD_DIR} -# COMMAND mkdir -p ${PROTOC_BUILD_DIR}) -# -# add_custom_command ( -# OUTPUT "${PROTOC_BUILD_DIR}/CMakeCache.txt" -# -# COMMAND ${CMAKE_COMMAND} -# -G"${CMAKE_GENERATOR}" -# -DCMAKE_MAKE_PROGRAM="${CMAKE_MAKE_PROGRAM}" -# -DCMAKE_C_COMPILER="${CMAKE_C_COMPILER}" -# -DCMAKE_CXX_COMPILER="${CMAKE_CXX_COMPILER}" -# -Dprotobuf_BUILD_TESTS=0 -# -Dprotobuf_BUILD_CONFORMANCE=0 -# -Dprotobuf_BUILD_EXAMPLES=0 -# -Dprotobuf_BUILD_PROTOC_BINARIES=1 -# "${protobuf_source_dir}/cmake" -# -# DEPENDS "${PROTOC_BUILD_DIR}" -# WORKING_DIRECTORY "${PROTOC_BUILD_DIR}" -# COMMENT "Configuring 'protoc' for host architecture." -# USES_TERMINAL) -# -# add_custom_command ( -# OUTPUT "${PROTOC_BUILD_DIR}/protoc" -# COMMAND ${CMAKE_COMMAND} --build "${PROTOC_BUILD_DIR}" -# DEPENDS "${PROTOC_BUILD_DIR}/CMakeCache.txt" -# COMMENT "Building 'protoc' for host architecture." -# USES_TERMINAL) -# -# add_custom_target (protoc-host DEPENDS "${PROTOC_BUILD_DIR}/protoc") - add_executable(protoc IMPORTED GLOBAL) set_target_properties (protoc PROPERTIES IMPORTED_LOCATION "${PROTOC_BUILD_DIR}/protoc") add_dependencies(protoc "${PROTOC_BUILD_DIR}/protoc") @@ -317,9 +385,25 @@ endif () include("${ClickHouse_SOURCE_DIR}/contrib/google-protobuf-cmake/protobuf_generate.cmake") +# These files needs to be installed to make it possible that users can use well-known protobuf types +set(google_proto_files + ${protobuf_source_dir}/src/google/protobuf/any.proto + ${protobuf_source_dir}/src/google/protobuf/api.proto + ${protobuf_source_dir}/src/google/protobuf/descriptor.proto + ${protobuf_source_dir}/src/google/protobuf/duration.proto + ${protobuf_source_dir}/src/google/protobuf/empty.proto + ${protobuf_source_dir}/src/google/protobuf/field_mask.proto + ${protobuf_source_dir}/src/google/protobuf/source_context.proto + ${protobuf_source_dir}/src/google/protobuf/struct.proto + ${protobuf_source_dir}/src/google/protobuf/timestamp.proto + ${protobuf_source_dir}/src/google/protobuf/type.proto + ${protobuf_source_dir}/src/google/protobuf/wrappers.proto +) + add_library(_protobuf INTERFACE) target_link_libraries(_protobuf INTERFACE _libprotobuf) target_include_directories(_protobuf INTERFACE "${Protobuf_INCLUDE_DIR}") +set_target_properties(_protobuf PROPERTIES google_proto_files "${google_proto_files}") add_library(ch_contrib::protobuf ALIAS _protobuf) add_library(_protoc INTERFACE) diff --git a/contrib/grpc b/contrib/grpc index bef8212d1e0..77b2737a709 160000 --- a/contrib/grpc +++ b/contrib/grpc @@ -1 +1 @@ -Subproject commit bef8212d1e01f99e406c282ceab3d42da08e09ce +Subproject commit 77b2737a709d43d8c6895e3f03ca62b00bd9201c diff --git a/contrib/grpc-cmake/CMakeLists.txt b/contrib/grpc-cmake/CMakeLists.txt index 09ed2fe3f80..b8b5f5580c4 100644 --- a/contrib/grpc-cmake/CMakeLists.txt +++ b/contrib/grpc-cmake/CMakeLists.txt @@ -9,50 +9,14 @@ endif() set(_gRPC_SOURCE_DIR "${ClickHouse_SOURCE_DIR}/contrib/grpc") set(_gRPC_BINARY_DIR "${ClickHouse_BINARY_DIR}/contrib/grpc") -# Use re2 from ClickHouse contrib, not from gRPC third_party. -set(gRPC_RE2_PROVIDER "clickhouse" CACHE STRING "" FORCE) -set(_gRPC_RE2_INCLUDE_DIR "") -set(_gRPC_RE2_LIBRARIES ch_contrib::re2) - -# Use zlib from ClickHouse contrib, not from gRPC third_party. -set(gRPC_ZLIB_PROVIDER "clickhouse" CACHE STRING "" FORCE) -set(_gRPC_ZLIB_INCLUDE_DIR "") -set(_gRPC_ZLIB_LIBRARIES ch_contrib::zlib) - -# Use protobuf from ClickHouse contrib, not from gRPC third_party. -set(gRPC_PROTOBUF_PROVIDER "clickhouse" CACHE STRING "" FORCE) -set(_gRPC_PROTOBUF_LIBRARIES ch_contrib::protobuf) -set(_gRPC_PROTOBUF_PROTOC "protoc") -set(_gRPC_PROTOBUF_PROTOC_EXECUTABLE $) -set(_gRPC_PROTOBUF_PROTOC_LIBRARIES ch_contrib::protoc) - if(TARGET OpenSSL::SSL) set(gRPC_USE_UNSECURE_LIBRARIES FALSE) else() set(gRPC_USE_UNSECURE_LIBRARIES TRUE) endif() -# Use OpenSSL from ClickHouse contrib, not from gRPC third_party. -set(gRPC_SSL_PROVIDER "clickhouse" CACHE STRING "" FORCE) -set(_gRPC_SSL_INCLUDE_DIR "") -set(_gRPC_SSL_LIBRARIES OpenSSL::Crypto OpenSSL::SSL) - -# Use abseil-cpp from ClickHouse contrib, not from gRPC third_party. -set(gRPC_ABSL_PROVIDER "clickhouse" CACHE STRING "" FORCE) - -# We don't want to build C# extensions. -set(gRPC_BUILD_CSHARP_EXT OFF) - -# TODO: Remove this. We generally like to compile with C++23 but grpc isn't ready yet. -set (CMAKE_CXX_STANDARD 20) - -set(_gRPC_CARES_LIBRARIES ch_contrib::c-ares) -set(gRPC_CARES_PROVIDER "clickhouse" CACHE STRING "" FORCE) -add_subdirectory("${_gRPC_SOURCE_DIR}" "${_gRPC_BINARY_DIR}") - -# The contrib/grpc/CMakeLists.txt redefined the PROTOBUF_GENERATE_GRPC_CPP() function for its own purposes, -# so we need to redefine it back. -include("${ClickHouse_SOURCE_DIR}/contrib/grpc-cmake/protobuf_generate_grpc.cmake") +include(grpc.cmake) +include(protobuf_generate_grpc.cmake) set(gRPC_CPP_PLUGIN $) set(gRPC_PYTHON_PLUGIN $) diff --git a/contrib/grpc-cmake/grpc.cmake b/contrib/grpc-cmake/grpc.cmake new file mode 100644 index 00000000000..c2488539211 --- /dev/null +++ b/contrib/grpc-cmake/grpc.cmake @@ -0,0 +1,1854 @@ +# This file was edited for ClickHouse. + +# GRPC global cmake file +# This currently builds C and C++ code. +# This file has been automatically generated from a template file. +# Please look at the templates directory instead. +# This file can be regenerated from the template by running +# tools/buildgen/generate_projects.sh +# +# Copyright 2015 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# We want to use C++23, but GRPC is not ready +set (CMAKE_CXX_STANDARD 20) + +set(_gRPC_ZLIB_INCLUDE_DIR "") +set(_gRPC_ZLIB_LIBRARIES ch_contrib::zlib) + +set(_gRPC_CARES_LIBRARIES ch_contrib::c-ares) + +set(_gRPC_RE2_INCLUDE_DIR "") +set(_gRPC_RE2_LIBRARIES ch_contrib::re2) + +set(_gRPC_SSL_INCLUDE_DIR "") +set(_gRPC_SSL_LIBRARIES OpenSSL::Crypto OpenSSL::SSL) + +set(_gRPC_PROTOBUF_LIBRARIES ch_contrib::protobuf) +set(_gRPC_PROTOBUF_PROTOC "protoc") +set(_gRPC_PROTOBUF_PROTOC_EXECUTABLE $) +set(_gRPC_PROTOBUF_PROTOC_LIBRARIES ch_contrib::protoc) + + +if(UNIX) + if(${CMAKE_SYSTEM_NAME} MATCHES "Linux") + set(_gRPC_PLATFORM_LINUX ON) + elseif(${CMAKE_SYSTEM_NAME} MATCHES "Darwin") + set(_gRPC_PLATFORM_MAC ON) + elseif(${CMAKE_SYSTEM_NAME} MATCHES "iOS") + set(_gRPC_PLATFORM_IOS ON) + elseif(${CMAKE_SYSTEM_NAME} MATCHES "Android") + set(_gRPC_PLATFORM_ANDROID ON) + else() + set(_gRPC_PLATFORM_POSIX ON) + endif() +endif() + +set(_gRPC_ADDRESS_SORTING_INCLUDE_DIR "${_gRPC_SOURCE_DIR}/third_party/address_sorting/include") +set(_gRPC_ADDRESS_SORTING_LIBRARIES address_sorting) + +set(UPB_ROOT_DIR ${_gRPC_SOURCE_DIR}/third_party/upb) + +set(_gRPC_UPB_INCLUDE_DIR "${UPB_ROOT_DIR}" "${_gRPC_SOURCE_DIR}/third_party/utf8_range") +set(_gRPC_UPB_GRPC_GENERATED_DIR "${_gRPC_SOURCE_DIR}/src//core/ext/upb-generated" "${_gRPC_SOURCE_DIR}/src//core/ext/upbdefs-generated") + +set(_gRPC_UPB_LIBRARIES upb) + +set(_gRPC_XXHASH_INCLUDE_DIR "${_gRPC_SOURCE_DIR}/third_party/xxhash") + +add_library(address_sorting + ${_gRPC_SOURCE_DIR}/third_party/address_sorting/address_sorting.c + ${_gRPC_SOURCE_DIR}/third_party/address_sorting/address_sorting_posix.c + ${_gRPC_SOURCE_DIR}/third_party/address_sorting/address_sorting_windows.c +) + +target_compile_features(address_sorting PUBLIC cxx_std_14) + +target_include_directories(address_sorting + PUBLIC ${_gRPC_SOURCE_DIR}/include + PRIVATE + ${_gRPC_SOURCE_DIR} + ${_gRPC_ADDRESS_SORTING_INCLUDE_DIR} + ${_gRPC_RE2_INCLUDE_DIR} + ${_gRPC_SSL_INCLUDE_DIR} + ${_gRPC_UPB_GENERATED_DIR} + ${_gRPC_UPB_GRPC_GENERATED_DIR} + ${_gRPC_UPB_INCLUDE_DIR} + ${_gRPC_XXHASH_INCLUDE_DIR} + ${_gRPC_ZLIB_INCLUDE_DIR} +) +target_link_libraries(address_sorting + ${_gRPC_ALLTARGETS_LIBRARIES} +) + + +add_library(gpr + ${_gRPC_SOURCE_DIR}/src/core/lib/config/config_vars.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/config/config_vars_non_generated.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/config/load_config.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/event_engine/thread_local.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/gpr/alloc.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/gpr/android/log.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/gpr/atm.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/gpr/iphone/cpu.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/gpr/linux/cpu.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/gpr/linux/log.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/gpr/log.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/gpr/msys/tmpfile.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/gpr/posix/cpu.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/gpr/posix/log.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/gpr/posix/string.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/gpr/posix/sync.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/gpr/posix/time.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/gpr/posix/tmpfile.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/gpr/string.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/gpr/sync.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/gpr/sync_abseil.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/gpr/time.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/gpr/time_precise.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/gpr/windows/cpu.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/gpr/windows/log.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/gpr/windows/string.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/gpr/windows/string_util.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/gpr/windows/sync.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/gpr/windows/time.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/gpr/windows/tmpfile.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/gpr/wrap_memcpy.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/gprpp/crash.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/gprpp/examine_stack.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/gprpp/fork.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/gprpp/host_port.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/gprpp/linux/env.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/gprpp/mpscq.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/gprpp/posix/env.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/gprpp/posix/stat.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/gprpp/posix/thd.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/gprpp/strerror.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/gprpp/tchar.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/gprpp/time_util.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/gprpp/windows/env.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/gprpp/windows/stat.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/gprpp/windows/thd.cc +) + +target_compile_features(gpr PUBLIC cxx_std_14) + +target_include_directories(gpr + PUBLIC ${_gRPC_SOURCE_DIR}/include + PRIVATE + ${_gRPC_SOURCE_DIR} + ${_gRPC_ADDRESS_SORTING_INCLUDE_DIR} + ${_gRPC_RE2_INCLUDE_DIR} + ${_gRPC_SSL_INCLUDE_DIR} + ${_gRPC_UPB_GENERATED_DIR} + ${_gRPC_UPB_GRPC_GENERATED_DIR} + ${_gRPC_UPB_INCLUDE_DIR} + ${_gRPC_XXHASH_INCLUDE_DIR} + ${_gRPC_ZLIB_INCLUDE_DIR} +) +target_link_libraries(gpr + ${_gRPC_ALLTARGETS_LIBRARIES} + absl::base + absl::core_headers + absl::flags + absl::flags_marshalling + absl::any_invocable + absl::memory + absl::random_random + absl::status + absl::cord + absl::str_format + absl::strings + absl::synchronization + absl::time + absl::optional + absl::variant +) +if(_gRPC_PLATFORM_ANDROID) + target_link_libraries(gpr + android + log + ) +endif() + + +add_library(grpc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/backend_metrics/backend_metric_filter.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/census/grpc_context.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/channel_idle/channel_idle_filter.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/channel_idle/idle_filter_state.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/client_channel/backend_metric.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/client_channel/backup_poller.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/client_channel/channel_connectivity.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/client_channel/client_channel.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/client_channel/client_channel_channelz.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/client_channel/client_channel_factory.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/client_channel/client_channel_plugin.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/client_channel/client_channel_service_config.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/client_channel/config_selector.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/client_channel/dynamic_filters.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/client_channel/global_subchannel_pool.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/client_channel/http_proxy_mapper.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/client_channel/lb_policy/address_filtering.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/client_channel/lb_policy/child_policy_handler.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/client_channel/lb_policy/endpoint_list.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/client_channel/lb_policy/grpclb/client_load_reporting_filter.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/client_channel/lb_policy/grpclb/grpclb.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/client_channel/lb_policy/grpclb/grpclb_balancer_addresses.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/client_channel/lb_policy/grpclb/grpclb_client_stats.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/client_channel/lb_policy/grpclb/load_balancer_api.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/client_channel/lb_policy/health_check_client.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/client_channel/lb_policy/oob_backend_metric.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/client_channel/lb_policy/outlier_detection/outlier_detection.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/client_channel/lb_policy/pick_first/pick_first.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/client_channel/lb_policy/priority/priority.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/client_channel/lb_policy/ring_hash/ring_hash.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/client_channel/lb_policy/rls/rls.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/client_channel/lb_policy/round_robin/round_robin.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/client_channel/lb_policy/weighted_round_robin/static_stride_scheduler.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/client_channel/lb_policy/weighted_round_robin/weighted_round_robin.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/client_channel/lb_policy/weighted_target/weighted_target.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/client_channel/lb_policy/xds/cds.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/client_channel/lb_policy/xds/xds_cluster_impl.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/client_channel/lb_policy/xds/xds_cluster_manager.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/client_channel/lb_policy/xds/xds_cluster_resolver.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/client_channel/lb_policy/xds/xds_override_host.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/client_channel/lb_policy/xds/xds_wrr_locality.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/client_channel/local_subchannel_pool.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/client_channel/resolver/binder/binder_resolver.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/client_channel/resolver/dns/c_ares/dns_resolver_ares.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/client_channel/resolver/dns/c_ares/grpc_ares_ev_driver_posix.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/client_channel/resolver/dns/c_ares/grpc_ares_ev_driver_windows.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/client_channel/resolver/dns/c_ares/grpc_ares_wrapper.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/client_channel/resolver/dns/c_ares/grpc_ares_wrapper_posix.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/client_channel/resolver/dns/c_ares/grpc_ares_wrapper_windows.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/client_channel/resolver/dns/dns_resolver_plugin.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/client_channel/resolver/dns/event_engine/event_engine_client_channel_resolver.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/client_channel/resolver/dns/event_engine/service_config_helper.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/client_channel/resolver/dns/native/dns_resolver.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/client_channel/resolver/fake/fake_resolver.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/client_channel/resolver/google_c2p/google_c2p_resolver.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/client_channel/resolver/polling_resolver.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/client_channel/resolver/sockaddr/sockaddr_resolver.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/client_channel/resolver/xds/xds_resolver.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/client_channel/retry_filter.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/client_channel/retry_filter_legacy_call_data.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/client_channel/retry_service_config.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/client_channel/retry_throttle.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/client_channel/service_config_channel_arg_filter.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/client_channel/subchannel.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/client_channel/subchannel_pool_interface.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/client_channel/subchannel_stream_client.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/deadline/deadline_filter.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/fault_injection/fault_injection_filter.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/fault_injection/fault_injection_service_config_parser.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/http/client/http_client_filter.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/http/client_authority_filter.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/http/http_filters_plugin.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/http/message_compress/compression_filter.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/http/server/http_server_filter.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/message_size/message_size_filter.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/rbac/rbac_filter.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/rbac/rbac_service_config_parser.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/server_config_selector/server_config_selector_filter.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/stateful_session/stateful_session_filter.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/stateful_session/stateful_session_service_config_parser.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/gcp/metadata_query.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/transport/chttp2/alpn/alpn.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/transport/chttp2/client/chttp2_connector.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/transport/chttp2/server/chttp2_server.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/transport/chttp2/transport/bin_decoder.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/transport/chttp2/transport/bin_encoder.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/transport/chttp2/transport/chttp2_transport.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/transport/chttp2/transport/decode_huff.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/transport/chttp2/transport/flow_control.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/transport/chttp2/transport/frame_data.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/transport/chttp2/transport/frame_goaway.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/transport/chttp2/transport/frame_ping.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/transport/chttp2/transport/frame_rst_stream.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/transport/chttp2/transport/frame_settings.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/transport/chttp2/transport/frame_window_update.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/transport/chttp2/transport/hpack_encoder.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/transport/chttp2/transport/hpack_encoder_table.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/transport/chttp2/transport/hpack_parse_result.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/transport/chttp2/transport/hpack_parser.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/transport/chttp2/transport/hpack_parser_table.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/transport/chttp2/transport/http2_settings.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/transport/chttp2/transport/http_trace.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/transport/chttp2/transport/huffsyms.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/transport/chttp2/transport/max_concurrent_streams_policy.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/transport/chttp2/transport/parsing.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/transport/chttp2/transport/ping_abuse_policy.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/transport/chttp2/transport/ping_callbacks.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/transport/chttp2/transport/ping_rate_policy.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/transport/chttp2/transport/stream_lists.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/transport/chttp2/transport/varint.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/transport/chttp2/transport/write_size_policy.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/transport/chttp2/transport/writing.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/transport/inproc/inproc_plugin.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/transport/inproc/inproc_transport.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/envoy/admin/v3/certs.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/envoy/admin/v3/clusters.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/envoy/admin/v3/config_dump.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/envoy/admin/v3/config_dump_shared.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/envoy/admin/v3/init_dump.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/envoy/admin/v3/listeners.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/envoy/admin/v3/memory.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/envoy/admin/v3/metrics.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/envoy/admin/v3/mutex_stats.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/envoy/admin/v3/server_info.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/envoy/admin/v3/tap.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/envoy/annotations/deprecation.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/envoy/annotations/resource.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/envoy/config/accesslog/v3/accesslog.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/envoy/config/bootstrap/v3/bootstrap.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/envoy/config/cluster/v3/circuit_breaker.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/envoy/config/cluster/v3/cluster.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/envoy/config/cluster/v3/filter.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/envoy/config/cluster/v3/outlier_detection.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/envoy/config/common/matcher/v3/matcher.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/envoy/config/core/v3/address.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/envoy/config/core/v3/backoff.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/envoy/config/core/v3/base.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/envoy/config/core/v3/config_source.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/envoy/config/core/v3/event_service_config.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/envoy/config/core/v3/extension.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/envoy/config/core/v3/grpc_method_list.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/envoy/config/core/v3/grpc_service.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/envoy/config/core/v3/health_check.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/envoy/config/core/v3/http_uri.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/envoy/config/core/v3/protocol.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/envoy/config/core/v3/proxy_protocol.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/envoy/config/core/v3/resolver.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/envoy/config/core/v3/socket_option.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/envoy/config/core/v3/substitution_format_string.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/envoy/config/core/v3/udp_socket_config.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/envoy/config/endpoint/v3/endpoint.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/envoy/config/endpoint/v3/endpoint_components.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/envoy/config/endpoint/v3/load_report.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/envoy/config/listener/v3/api_listener.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/envoy/config/listener/v3/listener.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/envoy/config/listener/v3/listener_components.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/envoy/config/listener/v3/quic_config.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/envoy/config/listener/v3/udp_listener_config.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/envoy/config/metrics/v3/metrics_service.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/envoy/config/metrics/v3/stats.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/envoy/config/overload/v3/overload.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/envoy/config/rbac/v3/rbac.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/envoy/config/route/v3/route.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/envoy/config/route/v3/route_components.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/envoy/config/route/v3/scoped_route.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/envoy/config/tap/v3/common.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/envoy/config/trace/v3/datadog.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/envoy/config/trace/v3/dynamic_ot.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/envoy/config/trace/v3/http_tracer.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/envoy/config/trace/v3/lightstep.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/envoy/config/trace/v3/opencensus.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/envoy/config/trace/v3/opentelemetry.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/envoy/config/trace/v3/service.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/envoy/config/trace/v3/skywalking.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/envoy/config/trace/v3/trace.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/envoy/config/trace/v3/xray.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/envoy/config/trace/v3/zipkin.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/envoy/data/accesslog/v3/accesslog.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/envoy/extensions/clusters/aggregate/v3/cluster.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/envoy/extensions/filters/common/fault/v3/fault.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/envoy/extensions/filters/http/fault/v3/fault.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/envoy/extensions/filters/http/rbac/v3/rbac.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/envoy/extensions/filters/http/router/v3/router.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/envoy/extensions/filters/http/stateful_session/v3/stateful_session.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/envoy/extensions/filters/network/http_connection_manager/v3/http_connection_manager.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/envoy/extensions/http/stateful_session/cookie/v3/cookie.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/envoy/extensions/load_balancing_policies/client_side_weighted_round_robin/v3/client_side_weighted_round_robin.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/envoy/extensions/load_balancing_policies/common/v3/common.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/envoy/extensions/load_balancing_policies/pick_first/v3/pick_first.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/envoy/extensions/load_balancing_policies/ring_hash/v3/ring_hash.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/envoy/extensions/load_balancing_policies/wrr_locality/v3/wrr_locality.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/envoy/extensions/transport_sockets/tls/v3/cert.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/envoy/extensions/transport_sockets/tls/v3/common.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/envoy/extensions/transport_sockets/tls/v3/secret.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/envoy/extensions/transport_sockets/tls/v3/tls.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/envoy/extensions/transport_sockets/tls/v3/tls_spiffe_validator_config.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/envoy/service/discovery/v3/ads.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/envoy/service/discovery/v3/discovery.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/envoy/service/load_stats/v3/lrs.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/envoy/service/status/v3/csds.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/envoy/type/http/v3/cookie.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/envoy/type/http/v3/path_transformation.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/envoy/type/matcher/v3/filter_state.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/envoy/type/matcher/v3/http_inputs.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/envoy/type/matcher/v3/metadata.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/envoy/type/matcher/v3/node.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/envoy/type/matcher/v3/number.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/envoy/type/matcher/v3/path.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/envoy/type/matcher/v3/regex.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/envoy/type/matcher/v3/status_code_input.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/envoy/type/matcher/v3/string.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/envoy/type/matcher/v3/struct.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/envoy/type/matcher/v3/value.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/envoy/type/metadata/v3/metadata.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/envoy/type/tracing/v3/custom_tag.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/envoy/type/v3/hash_policy.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/envoy/type/v3/http.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/envoy/type/v3/http_status.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/envoy/type/v3/percent.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/envoy/type/v3/range.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/envoy/type/v3/ratelimit_strategy.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/envoy/type/v3/ratelimit_unit.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/envoy/type/v3/semantic_version.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/envoy/type/v3/token_bucket.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/google/api/annotations.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/google/api/expr/v1alpha1/checked.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/google/api/expr/v1alpha1/syntax.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/google/api/http.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/google/api/httpbody.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/google/protobuf/any.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/google/protobuf/descriptor.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/google/protobuf/duration.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/google/protobuf/empty.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/google/protobuf/struct.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/google/protobuf/timestamp.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/google/protobuf/wrappers.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/google/rpc/status.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/opencensus/proto/trace/v1/trace_config.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/src/proto/grpc/gcp/altscontext.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/src/proto/grpc/gcp/handshaker.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/src/proto/grpc/gcp/transport_security_common.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/src/proto/grpc/health/v1/health.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/src/proto/grpc/lb/v1/load_balancer.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/src/proto/grpc/lookup/v1/rls.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/src/proto/grpc/lookup/v1/rls_config.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/udpa/annotations/migrate.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/udpa/annotations/security.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/udpa/annotations/sensitive.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/udpa/annotations/status.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/udpa/annotations/versioning.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/validate/validate.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/xds/annotations/v3/migrate.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/xds/annotations/v3/security.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/xds/annotations/v3/sensitive.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/xds/annotations/v3/status.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/xds/annotations/v3/versioning.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/xds/core/v3/authority.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/xds/core/v3/cidr.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/xds/core/v3/collection_entry.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/xds/core/v3/context_params.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/xds/core/v3/extension.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/xds/core/v3/resource.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/xds/core/v3/resource_locator.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/xds/core/v3/resource_name.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/xds/data/orca/v3/orca_load_report.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/xds/service/orca/v3/orca.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/xds/type/matcher/v3/cel.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/xds/type/matcher/v3/domain.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/xds/type/matcher/v3/http_inputs.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/xds/type/matcher/v3/ip.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/xds/type/matcher/v3/matcher.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/xds/type/matcher/v3/range.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/xds/type/matcher/v3/regex.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/xds/type/matcher/v3/string.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/xds/type/v3/cel.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/xds/type/v3/range.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/xds/type/v3/typed_struct.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/envoy/admin/v3/certs.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/envoy/admin/v3/clusters.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/envoy/admin/v3/config_dump.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/envoy/admin/v3/config_dump_shared.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/envoy/admin/v3/init_dump.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/envoy/admin/v3/listeners.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/envoy/admin/v3/memory.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/envoy/admin/v3/metrics.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/envoy/admin/v3/mutex_stats.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/envoy/admin/v3/server_info.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/envoy/admin/v3/tap.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/envoy/annotations/deprecation.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/envoy/annotations/resource.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/envoy/config/accesslog/v3/accesslog.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/envoy/config/bootstrap/v3/bootstrap.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/envoy/config/cluster/v3/circuit_breaker.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/envoy/config/cluster/v3/cluster.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/envoy/config/cluster/v3/filter.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/envoy/config/cluster/v3/outlier_detection.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/envoy/config/common/matcher/v3/matcher.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/envoy/config/core/v3/address.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/envoy/config/core/v3/backoff.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/envoy/config/core/v3/base.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/envoy/config/core/v3/config_source.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/envoy/config/core/v3/event_service_config.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/envoy/config/core/v3/extension.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/envoy/config/core/v3/grpc_method_list.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/envoy/config/core/v3/grpc_service.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/envoy/config/core/v3/health_check.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/envoy/config/core/v3/http_uri.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/envoy/config/core/v3/protocol.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/envoy/config/core/v3/proxy_protocol.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/envoy/config/core/v3/resolver.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/envoy/config/core/v3/socket_option.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/envoy/config/core/v3/substitution_format_string.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/envoy/config/core/v3/udp_socket_config.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/envoy/config/endpoint/v3/endpoint.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/envoy/config/endpoint/v3/endpoint_components.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/envoy/config/endpoint/v3/load_report.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/envoy/config/listener/v3/api_listener.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/envoy/config/listener/v3/listener.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/envoy/config/listener/v3/listener_components.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/envoy/config/listener/v3/quic_config.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/envoy/config/listener/v3/udp_listener_config.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/envoy/config/metrics/v3/metrics_service.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/envoy/config/metrics/v3/stats.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/envoy/config/overload/v3/overload.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/envoy/config/rbac/v3/rbac.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/envoy/config/route/v3/route.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/envoy/config/route/v3/route_components.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/envoy/config/route/v3/scoped_route.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/envoy/config/tap/v3/common.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/envoy/config/trace/v3/datadog.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/envoy/config/trace/v3/dynamic_ot.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/envoy/config/trace/v3/http_tracer.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/envoy/config/trace/v3/lightstep.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/envoy/config/trace/v3/opencensus.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/envoy/config/trace/v3/opentelemetry.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/envoy/config/trace/v3/service.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/envoy/config/trace/v3/skywalking.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/envoy/config/trace/v3/trace.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/envoy/config/trace/v3/xray.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/envoy/config/trace/v3/zipkin.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/envoy/data/accesslog/v3/accesslog.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/envoy/extensions/clusters/aggregate/v3/cluster.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/envoy/extensions/filters/common/fault/v3/fault.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/envoy/extensions/filters/http/fault/v3/fault.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/envoy/extensions/filters/http/rbac/v3/rbac.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/envoy/extensions/filters/http/router/v3/router.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/envoy/extensions/filters/http/stateful_session/v3/stateful_session.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/envoy/extensions/filters/network/http_connection_manager/v3/http_connection_manager.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/envoy/extensions/http/stateful_session/cookie/v3/cookie.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/envoy/extensions/transport_sockets/tls/v3/cert.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/envoy/extensions/transport_sockets/tls/v3/common.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/envoy/extensions/transport_sockets/tls/v3/secret.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/envoy/extensions/transport_sockets/tls/v3/tls.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/envoy/extensions/transport_sockets/tls/v3/tls_spiffe_validator_config.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/envoy/service/discovery/v3/ads.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/envoy/service/discovery/v3/discovery.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/envoy/service/load_stats/v3/lrs.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/envoy/service/status/v3/csds.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/envoy/type/http/v3/cookie.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/envoy/type/http/v3/path_transformation.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/envoy/type/matcher/v3/filter_state.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/envoy/type/matcher/v3/http_inputs.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/envoy/type/matcher/v3/metadata.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/envoy/type/matcher/v3/node.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/envoy/type/matcher/v3/number.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/envoy/type/matcher/v3/path.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/envoy/type/matcher/v3/regex.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/envoy/type/matcher/v3/status_code_input.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/envoy/type/matcher/v3/string.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/envoy/type/matcher/v3/struct.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/envoy/type/matcher/v3/value.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/envoy/type/metadata/v3/metadata.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/envoy/type/tracing/v3/custom_tag.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/envoy/type/v3/hash_policy.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/envoy/type/v3/http.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/envoy/type/v3/http_status.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/envoy/type/v3/percent.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/envoy/type/v3/range.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/envoy/type/v3/ratelimit_strategy.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/envoy/type/v3/ratelimit_unit.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/envoy/type/v3/semantic_version.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/envoy/type/v3/token_bucket.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/google/api/annotations.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/google/api/expr/v1alpha1/checked.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/google/api/expr/v1alpha1/syntax.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/google/api/http.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/google/api/httpbody.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/google/protobuf/any.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/google/protobuf/descriptor.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/google/protobuf/duration.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/google/protobuf/empty.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/google/protobuf/struct.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/google/protobuf/timestamp.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/google/protobuf/wrappers.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/google/rpc/status.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/opencensus/proto/trace/v1/trace_config.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/src/proto/grpc/lookup/v1/rls_config.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/udpa/annotations/migrate.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/udpa/annotations/security.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/udpa/annotations/sensitive.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/udpa/annotations/status.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/udpa/annotations/versioning.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/validate/validate.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/xds/annotations/v3/migrate.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/xds/annotations/v3/security.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/xds/annotations/v3/sensitive.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/xds/annotations/v3/status.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/xds/annotations/v3/versioning.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/xds/core/v3/authority.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/xds/core/v3/cidr.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/xds/core/v3/collection_entry.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/xds/core/v3/context_params.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/xds/core/v3/extension.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/xds/core/v3/resource.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/xds/core/v3/resource_locator.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/xds/core/v3/resource_name.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/xds/type/matcher/v3/cel.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/xds/type/matcher/v3/domain.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/xds/type/matcher/v3/http_inputs.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/xds/type/matcher/v3/ip.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/xds/type/matcher/v3/matcher.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/xds/type/matcher/v3/range.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/xds/type/matcher/v3/regex.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/xds/type/matcher/v3/string.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/xds/type/v3/cel.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/xds/type/v3/range.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upbdefs-generated/xds/type/v3/typed_struct.upbdefs.c + ${_gRPC_SOURCE_DIR}/src/core/ext/xds/certificate_provider_store.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/xds/file_watcher_certificate_provider_factory.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/xds/xds_api.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/xds/xds_audit_logger_registry.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/xds/xds_bootstrap.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/xds/xds_bootstrap_grpc.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/xds/xds_certificate_provider.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/xds/xds_channel_stack_modifier.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/xds/xds_client.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/xds/xds_client_grpc.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/xds/xds_client_stats.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/xds/xds_cluster.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/xds/xds_cluster_specifier_plugin.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/xds/xds_common_types.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/xds/xds_endpoint.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/xds/xds_health_status.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/xds/xds_http_fault_filter.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/xds/xds_http_filters.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/xds/xds_http_rbac_filter.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/xds/xds_http_stateful_session_filter.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/xds/xds_lb_policy_registry.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/xds/xds_listener.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/xds/xds_route_config.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/xds/xds_routing.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/xds/xds_server_config_fetcher.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/xds/xds_transport_grpc.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/address_utils/parse_address.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/address_utils/sockaddr_utils.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/backoff/backoff.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/backoff/random_early_detection.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/channel/call_tracer.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/channel/channel_args.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/channel/channel_args_preconditioning.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/channel/channel_stack.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/channel/channel_stack_builder.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/channel/channel_stack_builder_impl.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/channel/channel_trace.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/channel/channelz.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/channel/channelz_registry.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/channel/connected_channel.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/channel/promise_based_filter.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/channel/server_call_tracer_filter.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/channel/status_util.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/compression/compression.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/compression/compression_internal.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/compression/message_compress.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/config/core_configuration.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/debug/event_log.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/debug/histogram_view.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/debug/stats.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/debug/stats_data.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/debug/trace.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/event_engine/ares_resolver.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/event_engine/cf_engine/cf_engine.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/event_engine/cf_engine/cfstream_endpoint.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/event_engine/cf_engine/dns_service_resolver.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/event_engine/channel_args_endpoint_config.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/event_engine/default_event_engine.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/event_engine/default_event_engine_factory.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/event_engine/event_engine.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/event_engine/forkable.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/event_engine/memory_allocator.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/event_engine/posix_engine/ev_epoll1_linux.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/event_engine/posix_engine/ev_poll_posix.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/event_engine/posix_engine/event_poller_posix_default.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/event_engine/posix_engine/internal_errqueue.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/event_engine/posix_engine/lockfree_event.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/event_engine/posix_engine/posix_endpoint.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/event_engine/posix_engine/posix_engine.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/event_engine/posix_engine/posix_engine_listener.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/event_engine/posix_engine/posix_engine_listener_utils.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/event_engine/posix_engine/tcp_socket_utils.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/event_engine/posix_engine/timer.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/event_engine/posix_engine/timer_heap.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/event_engine/posix_engine/timer_manager.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/event_engine/posix_engine/traced_buffer_list.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/event_engine/posix_engine/wakeup_fd_eventfd.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/event_engine/posix_engine/wakeup_fd_pipe.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/event_engine/posix_engine/wakeup_fd_posix_default.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/event_engine/resolved_address.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/event_engine/shim.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/event_engine/slice.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/event_engine/slice_buffer.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/event_engine/tcp_socket_utils.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/event_engine/thread_pool/thread_count.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/event_engine/thread_pool/thread_pool_factory.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/event_engine/thread_pool/work_stealing_thread_pool.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/event_engine/thready_event_engine/thready_event_engine.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/event_engine/time_util.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/event_engine/trace.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/event_engine/utils.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/event_engine/windows/iocp.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/event_engine/windows/win_socket.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/event_engine/windows/windows_endpoint.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/event_engine/windows/windows_engine.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/event_engine/windows/windows_listener.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/event_engine/work_queue/basic_work_queue.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/experiments/config.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/experiments/experiments.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/gprpp/load_file.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/gprpp/per_cpu.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/gprpp/ref_counted_string.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/gprpp/status_helper.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/gprpp/time.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/gprpp/time_averaged_stats.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/gprpp/validation_errors.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/gprpp/work_serializer.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/handshaker/proxy_mapper_registry.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/http/format_request.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/http/httpcli.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/http/httpcli_security_connector.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/http/parser.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/buffer_list.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/call_combiner.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/cfstream_handle.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/closure.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/combiner.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/dualstack_socket_posix.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/endpoint.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/endpoint_cfstream.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/endpoint_pair_posix.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/endpoint_pair_windows.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/error.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/error_cfstream.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/ev_apple.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/ev_epoll1_linux.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/ev_poll_posix.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/ev_posix.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/ev_windows.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/event_engine_shims/closure.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/event_engine_shims/endpoint.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/event_engine_shims/tcp_client.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/exec_ctx.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/executor.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/fork_posix.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/fork_windows.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/gethostname_fallback.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/gethostname_host_name_max.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/gethostname_sysconf.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/grpc_if_nametoindex_posix.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/grpc_if_nametoindex_unsupported.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/internal_errqueue.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/iocp_windows.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/iomgr.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/iomgr_internal.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/iomgr_posix.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/iomgr_posix_cfstream.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/iomgr_windows.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/load_file.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/lockfree_event.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/polling_entity.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/pollset.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/pollset_set.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/pollset_set_windows.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/pollset_windows.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/resolve_address.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/resolve_address_posix.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/resolve_address_windows.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/sockaddr_utils_posix.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/socket_factory_posix.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/socket_mutator.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/socket_utils_common_posix.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/socket_utils_linux.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/socket_utils_posix.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/socket_utils_windows.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/socket_windows.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/systemd_utils.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/tcp_client.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/tcp_client_cfstream.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/tcp_client_posix.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/tcp_client_windows.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/tcp_posix.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/tcp_server.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/tcp_server_posix.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/tcp_server_utils_posix_common.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/tcp_server_utils_posix_ifaddrs.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/tcp_server_utils_posix_noifaddrs.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/tcp_server_windows.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/tcp_windows.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/timer.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/timer_generic.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/timer_heap.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/timer_manager.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/unix_sockets_posix.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/unix_sockets_posix_noop.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/vsock.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/wakeup_fd_eventfd.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/wakeup_fd_nospecial.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/wakeup_fd_pipe.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/wakeup_fd_posix.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/json/json_object_loader.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/json/json_reader.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/json/json_util.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/json/json_writer.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/load_balancing/lb_policy.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/load_balancing/lb_policy_registry.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/matchers/matchers.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/promise/activity.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/promise/party.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/promise/sleep.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/promise/trace.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/resolver/resolver.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/resolver/resolver_registry.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/resolver/server_address.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/resource_quota/api.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/resource_quota/arena.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/resource_quota/memory_quota.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/resource_quota/periodic_update.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/resource_quota/resource_quota.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/resource_quota/thread_quota.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/resource_quota/trace.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/security/authorization/audit_logging.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/security/authorization/authorization_policy_provider_vtable.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/security/authorization/evaluate_args.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/security/authorization/grpc_authorization_engine.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/security/authorization/grpc_server_authz_filter.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/security/authorization/matchers.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/security/authorization/rbac_policy.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/security/authorization/stdout_logger.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/security/certificate_provider/certificate_provider_registry.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/security/context/security_context.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/security/credentials/alts/alts_credentials.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/security/credentials/alts/check_gcp_environment.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/security/credentials/alts/check_gcp_environment_linux.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/security/credentials/alts/check_gcp_environment_no_op.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/security/credentials/alts/check_gcp_environment_windows.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/security/credentials/alts/grpc_alts_credentials_client_options.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/security/credentials/alts/grpc_alts_credentials_options.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/security/credentials/alts/grpc_alts_credentials_server_options.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/security/credentials/call_creds_util.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/security/credentials/channel_creds_registry_init.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/security/credentials/composite/composite_credentials.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/security/credentials/credentials.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/security/credentials/external/aws_external_account_credentials.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/security/credentials/external/aws_request_signer.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/security/credentials/external/external_account_credentials.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/security/credentials/external/file_external_account_credentials.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/security/credentials/external/url_external_account_credentials.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/security/credentials/fake/fake_credentials.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/security/credentials/google_default/credentials_generic.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/security/credentials/google_default/google_default_credentials.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/security/credentials/iam/iam_credentials.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/security/credentials/insecure/insecure_credentials.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/security/credentials/jwt/json_token.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/security/credentials/jwt/jwt_credentials.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/security/credentials/jwt/jwt_verifier.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/security/credentials/local/local_credentials.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/security/credentials/oauth2/oauth2_credentials.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/security/credentials/plugin/plugin_credentials.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/security/credentials/ssl/ssl_credentials.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/security/credentials/tls/grpc_tls_certificate_distributor.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/security/credentials/tls/grpc_tls_certificate_match.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/security/credentials/tls/grpc_tls_certificate_provider.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/security/credentials/tls/grpc_tls_certificate_verifier.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/security/credentials/tls/grpc_tls_credentials_options.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/security/credentials/tls/tls_credentials.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/security/credentials/tls/tls_utils.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/security/credentials/xds/xds_credentials.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/security/security_connector/alts/alts_security_connector.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/security/security_connector/fake/fake_security_connector.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/security/security_connector/insecure/insecure_security_connector.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/security/security_connector/load_system_roots_fallback.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/security/security_connector/load_system_roots_supported.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/security/security_connector/local/local_security_connector.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/security/security_connector/security_connector.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/security/security_connector/ssl/ssl_security_connector.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/security/security_connector/ssl_utils.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/security/security_connector/tls/tls_security_connector.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/security/transport/client_auth_filter.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/security/transport/secure_endpoint.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/security/transport/security_handshaker.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/security/transport/server_auth_filter.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/security/transport/tsi_error.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/security/util/json_util.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/service_config/service_config_impl.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/service_config/service_config_parser.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/slice/b64.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/slice/percent_encoding.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/slice/slice.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/slice/slice_buffer.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/slice/slice_refcount.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/slice/slice_string_helpers.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/surface/api_trace.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/surface/builtins.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/surface/byte_buffer.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/surface/byte_buffer_reader.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/surface/call.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/surface/call_details.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/surface/call_log_batch.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/surface/call_trace.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/surface/channel.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/surface/channel_init.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/surface/channel_ping.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/surface/channel_stack_type.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/surface/completion_queue.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/surface/completion_queue_factory.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/surface/event_string.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/surface/init.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/surface/init_internally.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/surface/lame_client.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/surface/metadata_array.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/surface/server.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/surface/validate_metadata.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/surface/version.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/transport/batch_builder.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/transport/bdp_estimator.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/transport/connectivity_state.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/transport/error_utils.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/transport/handshaker.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/transport/handshaker_registry.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/transport/http_connect_handshaker.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/transport/metadata_batch.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/transport/parsed_metadata.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/transport/pid_controller.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/transport/status_conversion.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/transport/tcp_connect_handshaker.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/transport/timeout_encoding.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/transport/transport.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/transport/transport_op_string.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/uri/uri_parser.cc + ${_gRPC_SOURCE_DIR}/src/core/plugin_registry/grpc_plugin_registry.cc + ${_gRPC_SOURCE_DIR}/src/core/plugin_registry/grpc_plugin_registry_extra.cc + ${_gRPC_SOURCE_DIR}/src/core/tsi/alts/crypt/aes_gcm.cc + ${_gRPC_SOURCE_DIR}/src/core/tsi/alts/crypt/gsec.cc + ${_gRPC_SOURCE_DIR}/src/core/tsi/alts/frame_protector/alts_counter.cc + ${_gRPC_SOURCE_DIR}/src/core/tsi/alts/frame_protector/alts_crypter.cc + ${_gRPC_SOURCE_DIR}/src/core/tsi/alts/frame_protector/alts_frame_protector.cc + ${_gRPC_SOURCE_DIR}/src/core/tsi/alts/frame_protector/alts_record_protocol_crypter_common.cc + ${_gRPC_SOURCE_DIR}/src/core/tsi/alts/frame_protector/alts_seal_privacy_integrity_crypter.cc + ${_gRPC_SOURCE_DIR}/src/core/tsi/alts/frame_protector/alts_unseal_privacy_integrity_crypter.cc + ${_gRPC_SOURCE_DIR}/src/core/tsi/alts/frame_protector/frame_handler.cc + ${_gRPC_SOURCE_DIR}/src/core/tsi/alts/handshaker/alts_handshaker_client.cc + ${_gRPC_SOURCE_DIR}/src/core/tsi/alts/handshaker/alts_shared_resource.cc + ${_gRPC_SOURCE_DIR}/src/core/tsi/alts/handshaker/alts_tsi_handshaker.cc + ${_gRPC_SOURCE_DIR}/src/core/tsi/alts/handshaker/alts_tsi_utils.cc + ${_gRPC_SOURCE_DIR}/src/core/tsi/alts/handshaker/transport_security_common_api.cc + ${_gRPC_SOURCE_DIR}/src/core/tsi/alts/zero_copy_frame_protector/alts_grpc_integrity_only_record_protocol.cc + ${_gRPC_SOURCE_DIR}/src/core/tsi/alts/zero_copy_frame_protector/alts_grpc_privacy_integrity_record_protocol.cc + ${_gRPC_SOURCE_DIR}/src/core/tsi/alts/zero_copy_frame_protector/alts_grpc_record_protocol_common.cc + ${_gRPC_SOURCE_DIR}/src/core/tsi/alts/zero_copy_frame_protector/alts_iovec_record_protocol.cc + ${_gRPC_SOURCE_DIR}/src/core/tsi/alts/zero_copy_frame_protector/alts_zero_copy_grpc_protector.cc + ${_gRPC_SOURCE_DIR}/src/core/tsi/fake_transport_security.cc + ${_gRPC_SOURCE_DIR}/src/core/tsi/local_transport_security.cc + ${_gRPC_SOURCE_DIR}/src/core/tsi/ssl/key_logging/ssl_key_logging.cc + ${_gRPC_SOURCE_DIR}/src/core/tsi/ssl/session_cache/ssl_session_boringssl.cc + ${_gRPC_SOURCE_DIR}/src/core/tsi/ssl/session_cache/ssl_session_cache.cc + ${_gRPC_SOURCE_DIR}/src/core/tsi/ssl/session_cache/ssl_session_openssl.cc + ${_gRPC_SOURCE_DIR}/src/core/tsi/ssl_transport_security.cc + ${_gRPC_SOURCE_DIR}/src/core/tsi/ssl_transport_security_utils.cc + ${_gRPC_SOURCE_DIR}/src/core/tsi/transport_security.cc + ${_gRPC_SOURCE_DIR}/src/core/tsi/transport_security_grpc.cc +) + +target_compile_features(grpc PUBLIC cxx_std_14) + +target_include_directories(grpc + PUBLIC ${_gRPC_SOURCE_DIR}/include + PRIVATE + ${_gRPC_SOURCE_DIR} + ${_gRPC_ADDRESS_SORTING_INCLUDE_DIR} + ${_gRPC_RE2_INCLUDE_DIR} + ${_gRPC_SSL_INCLUDE_DIR} + ${_gRPC_UPB_GENERATED_DIR} + ${_gRPC_UPB_GRPC_GENERATED_DIR} + ${_gRPC_UPB_INCLUDE_DIR} + ${_gRPC_XXHASH_INCLUDE_DIR} + ${_gRPC_ZLIB_INCLUDE_DIR} +) +target_link_libraries(grpc + ${_gRPC_ALLTARGETS_LIBRARIES} + ${_gRPC_RE2_LIBRARIES} + upb_json_lib + upb_textformat_lib + ${_gRPC_ZLIB_LIBRARIES} + absl::algorithm_container + absl::cleanup + absl::flat_hash_map + absl::flat_hash_set + absl::inlined_vector + absl::bind_front + absl::function_ref + absl::hash + absl::type_traits + absl::random_bit_gen_ref + absl::random_distributions + absl::statusor + absl::span + absl::utility + ${_gRPC_CARES_LIBRARIES} + gpr + ${_gRPC_SSL_LIBRARIES} + ${_gRPC_ADDRESS_SORTING_LIBRARIES} +) +if(_gRPC_PLATFORM_IOS OR _gRPC_PLATFORM_MAC) + target_link_libraries(grpc "-framework CoreFoundation") +endif() + +add_library(grpc_unsecure + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/backend_metrics/backend_metric_filter.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/census/grpc_context.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/channel_idle/channel_idle_filter.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/channel_idle/idle_filter_state.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/client_channel/backend_metric.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/client_channel/backup_poller.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/client_channel/channel_connectivity.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/client_channel/client_channel.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/client_channel/client_channel_channelz.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/client_channel/client_channel_factory.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/client_channel/client_channel_plugin.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/client_channel/client_channel_service_config.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/client_channel/config_selector.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/client_channel/dynamic_filters.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/client_channel/global_subchannel_pool.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/client_channel/http_proxy_mapper.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/client_channel/lb_policy/address_filtering.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/client_channel/lb_policy/child_policy_handler.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/client_channel/lb_policy/endpoint_list.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/client_channel/lb_policy/grpclb/client_load_reporting_filter.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/client_channel/lb_policy/grpclb/grpclb.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/client_channel/lb_policy/grpclb/grpclb_balancer_addresses.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/client_channel/lb_policy/grpclb/grpclb_client_stats.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/client_channel/lb_policy/grpclb/load_balancer_api.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/client_channel/lb_policy/health_check_client.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/client_channel/lb_policy/oob_backend_metric.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/client_channel/lb_policy/outlier_detection/outlier_detection.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/client_channel/lb_policy/pick_first/pick_first.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/client_channel/lb_policy/priority/priority.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/client_channel/lb_policy/rls/rls.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/client_channel/lb_policy/round_robin/round_robin.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/client_channel/lb_policy/weighted_round_robin/static_stride_scheduler.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/client_channel/lb_policy/weighted_round_robin/weighted_round_robin.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/client_channel/lb_policy/weighted_target/weighted_target.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/client_channel/local_subchannel_pool.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/client_channel/resolver/binder/binder_resolver.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/client_channel/resolver/dns/c_ares/dns_resolver_ares.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/client_channel/resolver/dns/c_ares/grpc_ares_ev_driver_posix.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/client_channel/resolver/dns/c_ares/grpc_ares_ev_driver_windows.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/client_channel/resolver/dns/c_ares/grpc_ares_wrapper.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/client_channel/resolver/dns/c_ares/grpc_ares_wrapper_posix.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/client_channel/resolver/dns/c_ares/grpc_ares_wrapper_windows.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/client_channel/resolver/dns/dns_resolver_plugin.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/client_channel/resolver/dns/event_engine/event_engine_client_channel_resolver.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/client_channel/resolver/dns/event_engine/service_config_helper.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/client_channel/resolver/dns/native/dns_resolver.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/client_channel/resolver/fake/fake_resolver.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/client_channel/resolver/polling_resolver.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/client_channel/resolver/sockaddr/sockaddr_resolver.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/client_channel/retry_filter.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/client_channel/retry_filter_legacy_call_data.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/client_channel/retry_service_config.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/client_channel/retry_throttle.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/client_channel/service_config_channel_arg_filter.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/client_channel/subchannel.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/client_channel/subchannel_pool_interface.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/client_channel/subchannel_stream_client.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/deadline/deadline_filter.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/fault_injection/fault_injection_filter.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/fault_injection/fault_injection_service_config_parser.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/http/client/http_client_filter.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/http/client_authority_filter.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/http/http_filters_plugin.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/http/message_compress/compression_filter.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/http/server/http_server_filter.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/filters/message_size/message_size_filter.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/transport/chttp2/client/chttp2_connector.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/transport/chttp2/server/chttp2_server.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/transport/chttp2/transport/bin_decoder.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/transport/chttp2/transport/bin_encoder.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/transport/chttp2/transport/chttp2_transport.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/transport/chttp2/transport/decode_huff.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/transport/chttp2/transport/flow_control.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/transport/chttp2/transport/frame_data.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/transport/chttp2/transport/frame_goaway.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/transport/chttp2/transport/frame_ping.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/transport/chttp2/transport/frame_rst_stream.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/transport/chttp2/transport/frame_settings.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/transport/chttp2/transport/frame_window_update.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/transport/chttp2/transport/hpack_encoder.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/transport/chttp2/transport/hpack_encoder_table.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/transport/chttp2/transport/hpack_parse_result.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/transport/chttp2/transport/hpack_parser.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/transport/chttp2/transport/hpack_parser_table.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/transport/chttp2/transport/http2_settings.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/transport/chttp2/transport/http_trace.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/transport/chttp2/transport/huffsyms.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/transport/chttp2/transport/max_concurrent_streams_policy.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/transport/chttp2/transport/parsing.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/transport/chttp2/transport/ping_abuse_policy.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/transport/chttp2/transport/ping_callbacks.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/transport/chttp2/transport/ping_rate_policy.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/transport/chttp2/transport/stream_lists.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/transport/chttp2/transport/varint.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/transport/chttp2/transport/write_size_policy.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/transport/chttp2/transport/writing.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/transport/inproc/inproc_plugin.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/transport/inproc/inproc_transport.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/google/api/annotations.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/google/api/http.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/google/protobuf/any.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/google/protobuf/descriptor.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/google/protobuf/duration.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/google/protobuf/empty.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/google/protobuf/struct.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/google/protobuf/timestamp.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/google/protobuf/wrappers.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/google/rpc/status.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/src/proto/grpc/gcp/altscontext.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/src/proto/grpc/gcp/handshaker.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/src/proto/grpc/gcp/transport_security_common.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/src/proto/grpc/health/v1/health.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/src/proto/grpc/lb/v1/load_balancer.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/src/proto/grpc/lookup/v1/rls.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/validate/validate.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/xds/data/orca/v3/orca_load_report.upb.c + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/xds/service/orca/v3/orca.upb.c + ${_gRPC_SOURCE_DIR}/src/core/lib/address_utils/parse_address.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/address_utils/sockaddr_utils.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/backoff/backoff.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/backoff/random_early_detection.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/channel/call_tracer.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/channel/channel_args.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/channel/channel_args_preconditioning.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/channel/channel_stack.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/channel/channel_stack_builder.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/channel/channel_stack_builder_impl.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/channel/channel_trace.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/channel/channelz.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/channel/channelz_registry.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/channel/connected_channel.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/channel/promise_based_filter.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/channel/server_call_tracer_filter.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/channel/status_util.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/compression/compression.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/compression/compression_internal.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/compression/message_compress.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/config/core_configuration.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/debug/event_log.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/debug/histogram_view.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/debug/stats.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/debug/stats_data.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/debug/trace.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/event_engine/ares_resolver.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/event_engine/cf_engine/cf_engine.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/event_engine/cf_engine/cfstream_endpoint.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/event_engine/cf_engine/dns_service_resolver.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/event_engine/channel_args_endpoint_config.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/event_engine/default_event_engine.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/event_engine/default_event_engine_factory.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/event_engine/event_engine.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/event_engine/forkable.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/event_engine/memory_allocator.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/event_engine/posix_engine/ev_epoll1_linux.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/event_engine/posix_engine/ev_poll_posix.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/event_engine/posix_engine/event_poller_posix_default.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/event_engine/posix_engine/internal_errqueue.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/event_engine/posix_engine/lockfree_event.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/event_engine/posix_engine/posix_endpoint.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/event_engine/posix_engine/posix_engine.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/event_engine/posix_engine/posix_engine_listener.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/event_engine/posix_engine/posix_engine_listener_utils.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/event_engine/posix_engine/tcp_socket_utils.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/event_engine/posix_engine/timer.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/event_engine/posix_engine/timer_heap.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/event_engine/posix_engine/timer_manager.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/event_engine/posix_engine/traced_buffer_list.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/event_engine/posix_engine/wakeup_fd_eventfd.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/event_engine/posix_engine/wakeup_fd_pipe.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/event_engine/posix_engine/wakeup_fd_posix_default.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/event_engine/resolved_address.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/event_engine/shim.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/event_engine/slice.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/event_engine/slice_buffer.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/event_engine/tcp_socket_utils.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/event_engine/thread_pool/thread_count.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/event_engine/thread_pool/thread_pool_factory.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/event_engine/thread_pool/work_stealing_thread_pool.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/event_engine/thready_event_engine/thready_event_engine.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/event_engine/time_util.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/event_engine/trace.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/event_engine/utils.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/event_engine/windows/iocp.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/event_engine/windows/win_socket.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/event_engine/windows/windows_endpoint.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/event_engine/windows/windows_engine.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/event_engine/windows/windows_listener.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/event_engine/work_queue/basic_work_queue.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/experiments/config.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/experiments/experiments.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/gprpp/load_file.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/gprpp/per_cpu.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/gprpp/ref_counted_string.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/gprpp/status_helper.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/gprpp/time.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/gprpp/time_averaged_stats.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/gprpp/validation_errors.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/gprpp/work_serializer.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/handshaker/proxy_mapper_registry.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/http/format_request.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/http/httpcli.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/http/parser.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/buffer_list.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/call_combiner.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/cfstream_handle.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/closure.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/combiner.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/dualstack_socket_posix.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/endpoint.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/endpoint_cfstream.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/endpoint_pair_posix.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/endpoint_pair_windows.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/error.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/error_cfstream.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/ev_apple.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/ev_epoll1_linux.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/ev_poll_posix.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/ev_posix.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/ev_windows.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/event_engine_shims/closure.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/event_engine_shims/endpoint.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/event_engine_shims/tcp_client.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/exec_ctx.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/executor.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/fork_posix.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/fork_windows.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/gethostname_fallback.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/gethostname_host_name_max.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/gethostname_sysconf.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/grpc_if_nametoindex_posix.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/grpc_if_nametoindex_unsupported.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/internal_errqueue.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/iocp_windows.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/iomgr.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/iomgr_internal.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/iomgr_posix.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/iomgr_posix_cfstream.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/iomgr_windows.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/load_file.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/lockfree_event.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/polling_entity.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/pollset.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/pollset_set.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/pollset_set_windows.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/pollset_windows.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/resolve_address.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/resolve_address_posix.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/resolve_address_windows.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/sockaddr_utils_posix.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/socket_factory_posix.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/socket_mutator.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/socket_utils_common_posix.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/socket_utils_linux.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/socket_utils_posix.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/socket_utils_windows.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/socket_windows.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/systemd_utils.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/tcp_client.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/tcp_client_cfstream.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/tcp_client_posix.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/tcp_client_windows.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/tcp_posix.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/tcp_server.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/tcp_server_posix.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/tcp_server_utils_posix_common.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/tcp_server_utils_posix_ifaddrs.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/tcp_server_utils_posix_noifaddrs.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/tcp_server_windows.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/tcp_windows.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/timer.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/timer_generic.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/timer_heap.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/timer_manager.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/unix_sockets_posix.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/unix_sockets_posix_noop.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/vsock.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/wakeup_fd_eventfd.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/wakeup_fd_nospecial.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/wakeup_fd_pipe.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/iomgr/wakeup_fd_posix.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/json/json_object_loader.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/json/json_reader.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/json/json_writer.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/load_balancing/lb_policy.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/load_balancing/lb_policy_registry.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/promise/activity.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/promise/party.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/promise/sleep.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/promise/trace.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/resolver/resolver.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/resolver/resolver_registry.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/resolver/server_address.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/resource_quota/api.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/resource_quota/arena.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/resource_quota/memory_quota.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/resource_quota/periodic_update.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/resource_quota/resource_quota.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/resource_quota/thread_quota.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/resource_quota/trace.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/security/authorization/authorization_policy_provider_vtable.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/security/authorization/evaluate_args.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/security/authorization/grpc_server_authz_filter.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/security/certificate_provider/certificate_provider_registry.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/security/context/security_context.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/security/credentials/alts/check_gcp_environment.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/security/credentials/alts/check_gcp_environment_linux.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/security/credentials/alts/check_gcp_environment_no_op.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/security/credentials/alts/check_gcp_environment_windows.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/security/credentials/alts/grpc_alts_credentials_client_options.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/security/credentials/alts/grpc_alts_credentials_options.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/security/credentials/alts/grpc_alts_credentials_server_options.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/security/credentials/call_creds_util.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/security/credentials/composite/composite_credentials.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/security/credentials/credentials.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/security/credentials/fake/fake_credentials.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/security/credentials/insecure/insecure_credentials.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/security/credentials/plugin/plugin_credentials.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/security/credentials/tls/tls_utils.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/security/security_connector/fake/fake_security_connector.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/security/security_connector/insecure/insecure_security_connector.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/security/security_connector/load_system_roots_fallback.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/security/security_connector/load_system_roots_supported.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/security/security_connector/security_connector.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/security/transport/client_auth_filter.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/security/transport/secure_endpoint.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/security/transport/security_handshaker.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/security/transport/server_auth_filter.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/security/transport/tsi_error.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/security/util/json_util.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/service_config/service_config_impl.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/service_config/service_config_parser.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/slice/b64.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/slice/percent_encoding.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/slice/slice.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/slice/slice_buffer.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/slice/slice_refcount.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/slice/slice_string_helpers.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/surface/api_trace.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/surface/builtins.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/surface/byte_buffer.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/surface/byte_buffer_reader.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/surface/call.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/surface/call_details.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/surface/call_log_batch.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/surface/call_trace.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/surface/channel.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/surface/channel_init.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/surface/channel_ping.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/surface/channel_stack_type.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/surface/completion_queue.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/surface/completion_queue_factory.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/surface/event_string.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/surface/init.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/surface/init_internally.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/surface/lame_client.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/surface/metadata_array.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/surface/server.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/surface/validate_metadata.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/surface/version.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/transport/batch_builder.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/transport/bdp_estimator.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/transport/connectivity_state.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/transport/error_utils.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/transport/handshaker.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/transport/handshaker_registry.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/transport/http_connect_handshaker.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/transport/metadata_batch.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/transport/parsed_metadata.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/transport/pid_controller.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/transport/status_conversion.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/transport/tcp_connect_handshaker.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/transport/timeout_encoding.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/transport/transport.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/transport/transport_op_string.cc + ${_gRPC_SOURCE_DIR}/src/core/lib/uri/uri_parser.cc + ${_gRPC_SOURCE_DIR}/src/core/plugin_registry/grpc_plugin_registry.cc + ${_gRPC_SOURCE_DIR}/src/core/plugin_registry/grpc_plugin_registry_noextra.cc + ${_gRPC_SOURCE_DIR}/src/core/tsi/alts/handshaker/transport_security_common_api.cc + ${_gRPC_SOURCE_DIR}/src/core/tsi/fake_transport_security.cc + ${_gRPC_SOURCE_DIR}/src/core/tsi/local_transport_security.cc + ${_gRPC_SOURCE_DIR}/src/core/tsi/transport_security.cc + ${_gRPC_SOURCE_DIR}/src/core/tsi/transport_security_grpc.cc + ${_gRPC_SOURCE_DIR}/third_party/upb/upb/message/accessors.c + ${_gRPC_SOURCE_DIR}/third_party/upb/upb/mini_descriptor/build_enum.c + ${_gRPC_SOURCE_DIR}/third_party/upb/upb/mini_descriptor/decode.c + ${_gRPC_SOURCE_DIR}/third_party/upb/upb/mini_descriptor/internal/base92.c + ${_gRPC_SOURCE_DIR}/third_party/upb/upb/mini_descriptor/link.c + ${gRPC_ADDITIONAL_DLL_SRC} +) + +target_compile_features(grpc_unsecure PUBLIC cxx_std_14) + +target_include_directories(grpc_unsecure + PUBLIC ${_gRPC_SOURCE_DIR}/include + PRIVATE + ${_gRPC_SOURCE_DIR} + ${_gRPC_ADDRESS_SORTING_INCLUDE_DIR} + ${_gRPC_RE2_INCLUDE_DIR} + ${_gRPC_SSL_INCLUDE_DIR} + ${_gRPC_UPB_GENERATED_DIR} + ${_gRPC_UPB_GRPC_GENERATED_DIR} + ${_gRPC_UPB_INCLUDE_DIR} + ${_gRPC_XXHASH_INCLUDE_DIR} + ${_gRPC_ZLIB_INCLUDE_DIR} +) +target_link_libraries(grpc_unsecure + ${_gRPC_ALLTARGETS_LIBRARIES} + upb_collections_lib + upb + ${_gRPC_ZLIB_LIBRARIES} + absl::algorithm_container + absl::cleanup + absl::flat_hash_map + absl::flat_hash_set + absl::inlined_vector + absl::bind_front + absl::function_ref + absl::hash + absl::type_traits + absl::random_bit_gen_ref + absl::random_distributions + absl::statusor + absl::span + absl::utility + ${_gRPC_CARES_LIBRARIES} + gpr + ${_gRPC_ADDRESS_SORTING_LIBRARIES} +) +if(_gRPC_PLATFORM_IOS OR _gRPC_PLATFORM_MAC) + target_link_libraries(grpc_unsecure "-framework CoreFoundation") +endif() + +add_library(upb + ${_gRPC_SOURCE_DIR}/third_party/upb/upb/base/status.c + ${_gRPC_SOURCE_DIR}/third_party/upb/upb/collections/array.c + ${_gRPC_SOURCE_DIR}/third_party/upb/upb/collections/map.c + ${_gRPC_SOURCE_DIR}/third_party/upb/upb/collections/map_sorter.c + ${_gRPC_SOURCE_DIR}/third_party/upb/upb/hash/common.c + ${_gRPC_SOURCE_DIR}/third_party/upb/upb/lex/atoi.c + ${_gRPC_SOURCE_DIR}/third_party/upb/upb/lex/round_trip.c + ${_gRPC_SOURCE_DIR}/third_party/upb/upb/lex/strtod.c + ${_gRPC_SOURCE_DIR}/third_party/upb/upb/lex/unicode.c + ${_gRPC_SOURCE_DIR}/third_party/upb/upb/mem/alloc.c + ${_gRPC_SOURCE_DIR}/third_party/upb/upb/mem/arena.c + ${_gRPC_SOURCE_DIR}/third_party/upb/upb/message/message.c + ${_gRPC_SOURCE_DIR}/third_party/upb/upb/mini_table/extension_registry.c + ${_gRPC_SOURCE_DIR}/third_party/upb/upb/mini_table/internal/message.c + ${_gRPC_SOURCE_DIR}/third_party/upb/upb/mini_table/message.c + ${_gRPC_SOURCE_DIR}/third_party/upb/upb/wire/decode.c + ${_gRPC_SOURCE_DIR}/third_party/upb/upb/wire/decode_fast.c + ${_gRPC_SOURCE_DIR}/third_party/upb/upb/wire/encode.c + ${_gRPC_SOURCE_DIR}/third_party/upb/upb/wire/eps_copy_input_stream.c + ${_gRPC_SOURCE_DIR}/third_party/upb/upb/wire/reader.c +) + +target_compile_features(upb PUBLIC cxx_std_14) + +target_include_directories(upb + PUBLIC ${_gRPC_SOURCE_DIR}/include + PRIVATE + ${_gRPC_SOURCE_DIR} + ${_gRPC_ADDRESS_SORTING_INCLUDE_DIR} + ${_gRPC_RE2_INCLUDE_DIR} + ${_gRPC_SSL_INCLUDE_DIR} + ${_gRPC_UPB_GENERATED_DIR} + ${_gRPC_UPB_GRPC_GENERATED_DIR} + ${_gRPC_UPB_INCLUDE_DIR} + ${_gRPC_XXHASH_INCLUDE_DIR} + ${_gRPC_ZLIB_INCLUDE_DIR} +) +target_link_libraries(upb + ${_gRPC_ALLTARGETS_LIBRARIES} + utf8_range_lib +) + + +add_library(upb_collections_lib + ${_gRPC_SOURCE_DIR}/third_party/upb/upb/base/status.c + ${_gRPC_SOURCE_DIR}/third_party/upb/upb/collections/array.c + ${_gRPC_SOURCE_DIR}/third_party/upb/upb/collections/map.c + ${_gRPC_SOURCE_DIR}/third_party/upb/upb/collections/map_sorter.c + ${_gRPC_SOURCE_DIR}/third_party/upb/upb/hash/common.c + ${_gRPC_SOURCE_DIR}/third_party/upb/upb/mem/alloc.c + ${_gRPC_SOURCE_DIR}/third_party/upb/upb/mem/arena.c + ${_gRPC_SOURCE_DIR}/third_party/upb/upb/message/message.c + ${_gRPC_SOURCE_DIR}/third_party/upb/upb/mini_table/extension_registry.c + ${_gRPC_SOURCE_DIR}/third_party/upb/upb/mini_table/internal/message.c + ${_gRPC_SOURCE_DIR}/third_party/upb/upb/mini_table/message.c +) + +target_compile_features(upb_collections_lib PUBLIC cxx_std_14) + +target_include_directories(upb_collections_lib + PUBLIC ${_gRPC_SOURCE_DIR}/include + PRIVATE + ${_gRPC_SOURCE_DIR} + ${_gRPC_ADDRESS_SORTING_INCLUDE_DIR} + ${_gRPC_RE2_INCLUDE_DIR} + ${_gRPC_SSL_INCLUDE_DIR} + ${_gRPC_UPB_GENERATED_DIR} + ${_gRPC_UPB_GRPC_GENERATED_DIR} + ${_gRPC_UPB_INCLUDE_DIR} + ${_gRPC_XXHASH_INCLUDE_DIR} + ${_gRPC_ZLIB_INCLUDE_DIR} +) +target_link_libraries(upb_collections_lib + ${_gRPC_ALLTARGETS_LIBRARIES} +) + + + +add_library(upb_json_lib + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/google/protobuf/descriptor.upb.c + ${_gRPC_SOURCE_DIR}/third_party/upb/upb/json/decode.c + ${_gRPC_SOURCE_DIR}/third_party/upb/upb/json/encode.c + ${_gRPC_SOURCE_DIR}/third_party/upb/upb/message/accessors.c + ${_gRPC_SOURCE_DIR}/third_party/upb/upb/mini_descriptor/build_enum.c + ${_gRPC_SOURCE_DIR}/third_party/upb/upb/mini_descriptor/decode.c + ${_gRPC_SOURCE_DIR}/third_party/upb/upb/mini_descriptor/internal/base92.c + ${_gRPC_SOURCE_DIR}/third_party/upb/upb/mini_descriptor/internal/encode.c + ${_gRPC_SOURCE_DIR}/third_party/upb/upb/mini_descriptor/link.c + ${_gRPC_SOURCE_DIR}/third_party/upb/upb/reflection/def_builder.c + ${_gRPC_SOURCE_DIR}/third_party/upb/upb/reflection/def_pool.c + ${_gRPC_SOURCE_DIR}/third_party/upb/upb/reflection/def_type.c + ${_gRPC_SOURCE_DIR}/third_party/upb/upb/reflection/desc_state.c + ${_gRPC_SOURCE_DIR}/third_party/upb/upb/reflection/enum_def.c + ${_gRPC_SOURCE_DIR}/third_party/upb/upb/reflection/enum_reserved_range.c + ${_gRPC_SOURCE_DIR}/third_party/upb/upb/reflection/enum_value_def.c + ${_gRPC_SOURCE_DIR}/third_party/upb/upb/reflection/extension_range.c + ${_gRPC_SOURCE_DIR}/third_party/upb/upb/reflection/field_def.c + ${_gRPC_SOURCE_DIR}/third_party/upb/upb/reflection/file_def.c + ${_gRPC_SOURCE_DIR}/third_party/upb/upb/reflection/message.c + ${_gRPC_SOURCE_DIR}/third_party/upb/upb/reflection/message_def.c + ${_gRPC_SOURCE_DIR}/third_party/upb/upb/reflection/message_reserved_range.c + ${_gRPC_SOURCE_DIR}/third_party/upb/upb/reflection/method_def.c + ${_gRPC_SOURCE_DIR}/third_party/upb/upb/reflection/oneof_def.c + ${_gRPC_SOURCE_DIR}/third_party/upb/upb/reflection/service_def.c +) + +target_compile_features(upb_json_lib PUBLIC cxx_std_14) + +target_include_directories(upb_json_lib + PUBLIC ${_gRPC_SOURCE_DIR}/include + PRIVATE + ${_gRPC_SOURCE_DIR} + ${_gRPC_ADDRESS_SORTING_INCLUDE_DIR} + ${_gRPC_RE2_INCLUDE_DIR} + ${_gRPC_SSL_INCLUDE_DIR} + ${_gRPC_UPB_GENERATED_DIR} + ${_gRPC_UPB_GRPC_GENERATED_DIR} + ${_gRPC_UPB_INCLUDE_DIR} + ${_gRPC_XXHASH_INCLUDE_DIR} + ${_gRPC_ZLIB_INCLUDE_DIR} +) +target_link_libraries(upb_json_lib + ${_gRPC_ALLTARGETS_LIBRARIES} + upb_collections_lib + upb +) + + +add_library(upb_textformat_lib + ${_gRPC_SOURCE_DIR}/src/core/ext/upb-generated/google/protobuf/descriptor.upb.c + ${_gRPC_SOURCE_DIR}/third_party/upb/upb/message/accessors.c + ${_gRPC_SOURCE_DIR}/third_party/upb/upb/mini_descriptor/build_enum.c + ${_gRPC_SOURCE_DIR}/third_party/upb/upb/mini_descriptor/decode.c + ${_gRPC_SOURCE_DIR}/third_party/upb/upb/mini_descriptor/internal/base92.c + ${_gRPC_SOURCE_DIR}/third_party/upb/upb/mini_descriptor/internal/encode.c + ${_gRPC_SOURCE_DIR}/third_party/upb/upb/mini_descriptor/link.c + ${_gRPC_SOURCE_DIR}/third_party/upb/upb/reflection/def_builder.c + ${_gRPC_SOURCE_DIR}/third_party/upb/upb/reflection/def_pool.c + ${_gRPC_SOURCE_DIR}/third_party/upb/upb/reflection/def_type.c + ${_gRPC_SOURCE_DIR}/third_party/upb/upb/reflection/desc_state.c + ${_gRPC_SOURCE_DIR}/third_party/upb/upb/reflection/enum_def.c + ${_gRPC_SOURCE_DIR}/third_party/upb/upb/reflection/enum_reserved_range.c + ${_gRPC_SOURCE_DIR}/third_party/upb/upb/reflection/enum_value_def.c + ${_gRPC_SOURCE_DIR}/third_party/upb/upb/reflection/extension_range.c + ${_gRPC_SOURCE_DIR}/third_party/upb/upb/reflection/field_def.c + ${_gRPC_SOURCE_DIR}/third_party/upb/upb/reflection/file_def.c + ${_gRPC_SOURCE_DIR}/third_party/upb/upb/reflection/message.c + ${_gRPC_SOURCE_DIR}/third_party/upb/upb/reflection/message_def.c + ${_gRPC_SOURCE_DIR}/third_party/upb/upb/reflection/message_reserved_range.c + ${_gRPC_SOURCE_DIR}/third_party/upb/upb/reflection/method_def.c + ${_gRPC_SOURCE_DIR}/third_party/upb/upb/reflection/oneof_def.c + ${_gRPC_SOURCE_DIR}/third_party/upb/upb/reflection/service_def.c + ${_gRPC_SOURCE_DIR}/third_party/upb/upb/text/encode.c +) + +target_compile_features(upb_textformat_lib PUBLIC cxx_std_14) + +target_include_directories(upb_textformat_lib + PUBLIC ${_gRPC_SOURCE_DIR}/include + PRIVATE + ${_gRPC_SOURCE_DIR} + ${_gRPC_ADDRESS_SORTING_INCLUDE_DIR} + ${_gRPC_RE2_INCLUDE_DIR} + ${_gRPC_SSL_INCLUDE_DIR} + ${_gRPC_UPB_GENERATED_DIR} + ${_gRPC_UPB_GRPC_GENERATED_DIR} + ${_gRPC_UPB_INCLUDE_DIR} + ${_gRPC_XXHASH_INCLUDE_DIR} + ${_gRPC_ZLIB_INCLUDE_DIR} +) +target_link_libraries(upb_textformat_lib + ${_gRPC_ALLTARGETS_LIBRARIES} + upb_collections_lib + upb +) + + +add_library(utf8_range_lib + ${_gRPC_SOURCE_DIR}/third_party/utf8_range/naive.c + ${_gRPC_SOURCE_DIR}/third_party/utf8_range/range2-neon.c + ${_gRPC_SOURCE_DIR}/third_party/utf8_range/range2-sse.c +) + +target_compile_features(utf8_range_lib PUBLIC cxx_std_14) + +target_include_directories(utf8_range_lib + PUBLIC ${_gRPC_SOURCE_DIR}/include + PRIVATE + ${_gRPC_SOURCE_DIR} + ${_gRPC_ADDRESS_SORTING_INCLUDE_DIR} + ${_gRPC_RE2_INCLUDE_DIR} + ${_gRPC_SSL_INCLUDE_DIR} + ${_gRPC_UPB_GENERATED_DIR} + ${_gRPC_UPB_GRPC_GENERATED_DIR} + ${_gRPC_UPB_INCLUDE_DIR} + ${_gRPC_XXHASH_INCLUDE_DIR} + ${_gRPC_ZLIB_INCLUDE_DIR} +) +target_link_libraries(utf8_range_lib + ${_gRPC_ALLTARGETS_LIBRARIES} +) + + +add_library(grpc++ + ${_gRPC_SOURCE_DIR}/src/core/ext/transport/binder/client/binder_connector.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/transport/binder/client/channel_create.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/transport/binder/client/channel_create_impl.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/transport/binder/client/connection_id_generator.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/transport/binder/client/endpoint_binder_pool.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/transport/binder/client/jni_utils.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/transport/binder/client/security_policy_setting.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/transport/binder/security_policy/binder_security_policy.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/transport/binder/server/binder_server.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/transport/binder/server/binder_server_credentials.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/transport/binder/transport/binder_transport.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/transport/binder/utils/ndk_binder.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/transport/binder/utils/transport_stream_receiver_impl.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/transport/binder/wire_format/binder_android.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/transport/binder/wire_format/binder_constants.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/transport/binder/wire_format/transaction.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/transport/binder/wire_format/wire_reader_impl.cc + ${_gRPC_SOURCE_DIR}/src/core/ext/transport/binder/wire_format/wire_writer.cc + ${_gRPC_SOURCE_DIR}/src/cpp/client/channel_cc.cc + ${_gRPC_SOURCE_DIR}/src/cpp/client/client_callback.cc + ${_gRPC_SOURCE_DIR}/src/cpp/client/client_context.cc + ${_gRPC_SOURCE_DIR}/src/cpp/client/client_interceptor.cc + ${_gRPC_SOURCE_DIR}/src/cpp/client/client_stats_interceptor.cc + ${_gRPC_SOURCE_DIR}/src/cpp/client/create_channel.cc + ${_gRPC_SOURCE_DIR}/src/cpp/client/create_channel_internal.cc + ${_gRPC_SOURCE_DIR}/src/cpp/client/create_channel_posix.cc + ${_gRPC_SOURCE_DIR}/src/cpp/client/insecure_credentials.cc + ${_gRPC_SOURCE_DIR}/src/cpp/client/secure_credentials.cc + ${_gRPC_SOURCE_DIR}/src/cpp/client/xds_credentials.cc + ${_gRPC_SOURCE_DIR}/src/cpp/common/alarm.cc + ${_gRPC_SOURCE_DIR}/src/cpp/common/auth_property_iterator.cc + ${_gRPC_SOURCE_DIR}/src/cpp/common/channel_arguments.cc + ${_gRPC_SOURCE_DIR}/src/cpp/common/channel_filter.cc + ${_gRPC_SOURCE_DIR}/src/cpp/common/completion_queue_cc.cc + ${_gRPC_SOURCE_DIR}/src/cpp/common/resource_quota_cc.cc + ${_gRPC_SOURCE_DIR}/src/cpp/common/rpc_method.cc + ${_gRPC_SOURCE_DIR}/src/cpp/common/secure_auth_context.cc + ${_gRPC_SOURCE_DIR}/src/cpp/common/secure_channel_arguments.cc + ${_gRPC_SOURCE_DIR}/src/cpp/common/secure_create_auth_context.cc + ${_gRPC_SOURCE_DIR}/src/cpp/common/tls_certificate_provider.cc + ${_gRPC_SOURCE_DIR}/src/cpp/common/tls_certificate_verifier.cc + ${_gRPC_SOURCE_DIR}/src/cpp/common/tls_credentials_options.cc + ${_gRPC_SOURCE_DIR}/src/cpp/common/validate_service_config.cc + ${_gRPC_SOURCE_DIR}/src/cpp/common/version_cc.cc + ${_gRPC_SOURCE_DIR}/src/cpp/server/async_generic_service.cc + ${_gRPC_SOURCE_DIR}/src/cpp/server/backend_metric_recorder.cc + ${_gRPC_SOURCE_DIR}/src/cpp/server/channel_argument_option.cc + ${_gRPC_SOURCE_DIR}/src/cpp/server/create_default_thread_pool.cc + ${_gRPC_SOURCE_DIR}/src/cpp/server/external_connection_acceptor_impl.cc + ${_gRPC_SOURCE_DIR}/src/cpp/server/health/default_health_check_service.cc + ${_gRPC_SOURCE_DIR}/src/cpp/server/health/health_check_service.cc + ${_gRPC_SOURCE_DIR}/src/cpp/server/health/health_check_service_server_builder_option.cc + ${_gRPC_SOURCE_DIR}/src/cpp/server/insecure_server_credentials.cc + ${_gRPC_SOURCE_DIR}/src/cpp/server/secure_server_credentials.cc + ${_gRPC_SOURCE_DIR}/src/cpp/server/server_builder.cc + ${_gRPC_SOURCE_DIR}/src/cpp/server/server_callback.cc + ${_gRPC_SOURCE_DIR}/src/cpp/server/server_cc.cc + ${_gRPC_SOURCE_DIR}/src/cpp/server/server_context.cc + ${_gRPC_SOURCE_DIR}/src/cpp/server/server_posix.cc + ${_gRPC_SOURCE_DIR}/src/cpp/server/xds_server_credentials.cc + ${_gRPC_SOURCE_DIR}/src/cpp/thread_manager/thread_manager.cc + ${_gRPC_SOURCE_DIR}/src/cpp/util/byte_buffer_cc.cc + ${_gRPC_SOURCE_DIR}/src/cpp/util/status.cc + ${_gRPC_SOURCE_DIR}/src/cpp/util/string_ref.cc + ${_gRPC_SOURCE_DIR}/src/cpp/util/time_cc.cc + ${gRPC_UPB_GEN_DUPL_SRC} +) + +target_compile_features(grpc++ PUBLIC cxx_std_14) + +target_include_directories(grpc++ + PUBLIC ${_gRPC_SOURCE_DIR}/include + PRIVATE + ${_gRPC_SOURCE_DIR} + ${_gRPC_ADDRESS_SORTING_INCLUDE_DIR} + ${_gRPC_RE2_INCLUDE_DIR} + ${_gRPC_SSL_INCLUDE_DIR} + ${_gRPC_UPB_GENERATED_DIR} + ${_gRPC_UPB_GRPC_GENERATED_DIR} + ${_gRPC_UPB_INCLUDE_DIR} + ${_gRPC_XXHASH_INCLUDE_DIR} + ${_gRPC_ZLIB_INCLUDE_DIR} +) +target_link_libraries(grpc++ + ${_gRPC_ALLTARGETS_LIBRARIES} + grpc + ${_gRPC_PROTOBUF_LIBRARIES} +) + +add_library(grpc++_unsecure + ${_gRPC_SOURCE_DIR}/src/cpp/client/channel_cc.cc + ${_gRPC_SOURCE_DIR}/src/cpp/client/client_callback.cc + ${_gRPC_SOURCE_DIR}/src/cpp/client/client_context.cc + ${_gRPC_SOURCE_DIR}/src/cpp/client/client_interceptor.cc + ${_gRPC_SOURCE_DIR}/src/cpp/client/client_stats_interceptor.cc + ${_gRPC_SOURCE_DIR}/src/cpp/client/create_channel.cc + ${_gRPC_SOURCE_DIR}/src/cpp/client/create_channel_internal.cc + ${_gRPC_SOURCE_DIR}/src/cpp/client/create_channel_posix.cc + ${_gRPC_SOURCE_DIR}/src/cpp/client/insecure_credentials.cc + ${_gRPC_SOURCE_DIR}/src/cpp/common/alarm.cc + ${_gRPC_SOURCE_DIR}/src/cpp/common/channel_arguments.cc + ${_gRPC_SOURCE_DIR}/src/cpp/common/channel_filter.cc + ${_gRPC_SOURCE_DIR}/src/cpp/common/completion_queue_cc.cc + ${_gRPC_SOURCE_DIR}/src/cpp/common/insecure_create_auth_context.cc + ${_gRPC_SOURCE_DIR}/src/cpp/common/resource_quota_cc.cc + ${_gRPC_SOURCE_DIR}/src/cpp/common/rpc_method.cc + ${_gRPC_SOURCE_DIR}/src/cpp/common/validate_service_config.cc + ${_gRPC_SOURCE_DIR}/src/cpp/common/version_cc.cc + ${_gRPC_SOURCE_DIR}/src/cpp/server/async_generic_service.cc + ${_gRPC_SOURCE_DIR}/src/cpp/server/backend_metric_recorder.cc + ${_gRPC_SOURCE_DIR}/src/cpp/server/channel_argument_option.cc + ${_gRPC_SOURCE_DIR}/src/cpp/server/create_default_thread_pool.cc + ${_gRPC_SOURCE_DIR}/src/cpp/server/external_connection_acceptor_impl.cc + ${_gRPC_SOURCE_DIR}/src/cpp/server/health/default_health_check_service.cc + ${_gRPC_SOURCE_DIR}/src/cpp/server/health/health_check_service.cc + ${_gRPC_SOURCE_DIR}/src/cpp/server/health/health_check_service_server_builder_option.cc + ${_gRPC_SOURCE_DIR}/src/cpp/server/insecure_server_credentials.cc + ${_gRPC_SOURCE_DIR}/src/cpp/server/server_builder.cc + ${_gRPC_SOURCE_DIR}/src/cpp/server/server_callback.cc + ${_gRPC_SOURCE_DIR}/src/cpp/server/server_cc.cc + ${_gRPC_SOURCE_DIR}/src/cpp/server/server_context.cc + ${_gRPC_SOURCE_DIR}/src/cpp/server/server_posix.cc + ${_gRPC_SOURCE_DIR}/src/cpp/thread_manager/thread_manager.cc + ${_gRPC_SOURCE_DIR}/src/cpp/util/byte_buffer_cc.cc + ${_gRPC_SOURCE_DIR}/src/cpp/util/status.cc + ${_gRPC_SOURCE_DIR}/src/cpp/util/string_ref.cc + ${_gRPC_SOURCE_DIR}/src/cpp/util/time_cc.cc + ${gRPC_UPB_GEN_DUPL_SRC} +) + +target_compile_features(grpc++_unsecure PUBLIC cxx_std_14) + +target_include_directories(grpc++_unsecure + PUBLIC ${_gRPC_SOURCE_DIR}/include + PRIVATE + ${_gRPC_SOURCE_DIR} + ${_gRPC_ADDRESS_SORTING_INCLUDE_DIR} + ${_gRPC_RE2_INCLUDE_DIR} + ${_gRPC_SSL_INCLUDE_DIR} + ${_gRPC_UPB_GENERATED_DIR} + ${_gRPC_UPB_GRPC_GENERATED_DIR} + ${_gRPC_UPB_INCLUDE_DIR} + ${_gRPC_XXHASH_INCLUDE_DIR} + ${_gRPC_ZLIB_INCLUDE_DIR} +) +target_link_libraries(grpc++_unsecure + ${_gRPC_ALLTARGETS_LIBRARIES} + grpc_unsecure + ${_gRPC_PROTOBUF_LIBRARIES} +) + +add_library(grpc_plugin_support + ${_gRPC_SOURCE_DIR}/src/compiler/cpp_generator.cc + ${_gRPC_SOURCE_DIR}/src/compiler/csharp_generator.cc + ${_gRPC_SOURCE_DIR}/src/compiler/node_generator.cc + ${_gRPC_SOURCE_DIR}/src/compiler/objective_c_generator.cc + ${_gRPC_SOURCE_DIR}/src/compiler/php_generator.cc + ${_gRPC_SOURCE_DIR}/src/compiler/proto_parser_helper.cc + ${_gRPC_SOURCE_DIR}/src/compiler/python_generator.cc + ${_gRPC_SOURCE_DIR}/src/compiler/ruby_generator.cc +) + +target_compile_features(grpc_plugin_support PUBLIC cxx_std_14) + +target_include_directories(grpc_plugin_support + PUBLIC ${_gRPC_SOURCE_DIR}/include + PRIVATE + ${_gRPC_SOURCE_DIR} + ${_gRPC_ADDRESS_SORTING_INCLUDE_DIR} + ${_gRPC_RE2_INCLUDE_DIR} + ${_gRPC_SSL_INCLUDE_DIR} + ${_gRPC_UPB_GENERATED_DIR} + ${_gRPC_UPB_GRPC_GENERATED_DIR} + ${_gRPC_UPB_INCLUDE_DIR} + ${_gRPC_XXHASH_INCLUDE_DIR} + ${_gRPC_ZLIB_INCLUDE_DIR} +) +target_link_libraries(grpc_plugin_support + ${_gRPC_ALLTARGETS_LIBRARIES} + ${_gRPC_PROTOBUF_LIBRARIES} + ${_gRPC_PROTOBUF_PROTOC_LIBRARIES} +) + + +add_executable(grpc_cpp_plugin + ${_gRPC_SOURCE_DIR}/src/compiler/cpp_plugin.cc +) +target_compile_features(grpc_cpp_plugin PUBLIC cxx_std_14) +target_include_directories(grpc_cpp_plugin + PRIVATE + ${_gRPC_SOURCE_DIR} + ${_gRPC_SOURCE_DIR}/include + ${_gRPC_ADDRESS_SORTING_INCLUDE_DIR} + ${_gRPC_RE2_INCLUDE_DIR} + ${_gRPC_SSL_INCLUDE_DIR} + ${_gRPC_UPB_GENERATED_DIR} + ${_gRPC_UPB_GRPC_GENERATED_DIR} + ${_gRPC_UPB_INCLUDE_DIR} + ${_gRPC_XXHASH_INCLUDE_DIR} + ${_gRPC_ZLIB_INCLUDE_DIR} +) + +target_link_libraries(grpc_cpp_plugin + ${_gRPC_ALLTARGETS_LIBRARIES} + grpc_plugin_support +) diff --git a/contrib/jemalloc-cmake/README b/contrib/jemalloc-cmake/README index 8d27e7844c5..91b58448c1f 100644 --- a/contrib/jemalloc-cmake/README +++ b/contrib/jemalloc-cmake/README @@ -3,4 +3,4 @@ It allows to integrate JEMalloc into CMake project. - Remove JEMALLOC_HAVE_ATTR_FORMAT_GNU_PRINTF because it's non standard. - Added JEMALLOC_CONFIG_MALLOC_CONF substitution - Add musl support (USE_MUSL) -- Also note, that darwin build requires JEMALLOC_PREFIX, while others don not +- Also note, that darwin build requires JEMALLOC_PREFIX, while others do not diff --git a/contrib/libcxx-cmake/CMakeLists.txt b/contrib/libcxx-cmake/CMakeLists.txt index b7e59e2c9a3..c77d5d8319e 100644 --- a/contrib/libcxx-cmake/CMakeLists.txt +++ b/contrib/libcxx-cmake/CMakeLists.txt @@ -1,5 +1,3 @@ -include(CheckCXXCompilerFlag) - set(LIBCXX_SOURCE_DIR "${ClickHouse_SOURCE_DIR}/contrib/llvm-project/libcxx") set(SRCS diff --git a/contrib/libhdfs3 b/contrib/libhdfs3 index 377220ef351..bdcb91354b1 160000 --- a/contrib/libhdfs3 +++ b/contrib/libhdfs3 @@ -1 +1 @@ -Subproject commit 377220ef351ae24994a5fcd2b5fa3930d00c4db0 +Subproject commit bdcb91354b1c05b21e73043a112a6f1e3b013497 diff --git a/contrib/libhdfs3-cmake/CMakeLists.txt b/contrib/libhdfs3-cmake/CMakeLists.txt index a630a8e45c4..4278575fd7f 100644 --- a/contrib/libhdfs3-cmake/CMakeLists.txt +++ b/contrib/libhdfs3-cmake/CMakeLists.txt @@ -1,4 +1,4 @@ -if(NOT OS_FREEBSD AND NOT APPLE AND NOT ARCH_PPC64LE AND NOT ARCH_S390X) +if(NOT OS_FREEBSD AND NOT APPLE AND NOT ARCH_PPC64LE) option(ENABLE_HDFS "Enable HDFS" ${ENABLE_LIBRARIES}) elseif(ENABLE_HDFS) message (${RECONFIGURE_MESSAGE_LEVEL} "Cannot use HDFS3 with current configuration") diff --git a/contrib/libssh-cmake/CMakeLists.txt b/contrib/libssh-cmake/CMakeLists.txt index 58db81cf352..7a3816d4dce 100644 --- a/contrib/libssh-cmake/CMakeLists.txt +++ b/contrib/libssh-cmake/CMakeLists.txt @@ -1,11 +1,12 @@ +option (ENABLE_SSH "Enable support for SSH keys and protocol" ON) + +if (NOT ENABLE_SSH) + message(STATUS "Not using SSH") + return() +endif() + set(LIB_SOURCE_DIR "${ClickHouse_SOURCE_DIR}/contrib/libssh") set(LIB_BINARY_DIR "${ClickHouse_BINARY_DIR}/contrib/libssh") -# Specify search path for CMake modules to be loaded by include() -# and find_package() -list(APPEND CMAKE_MODULE_PATH "${LIB_SOURCE_DIR}/cmake/Modules") - -include(DefineCMakeDefaults) -include(DefineCompilerFlags) project(libssh VERSION 0.9.7 LANGUAGES C) @@ -22,12 +23,6 @@ set(APPLICATION_NAME ${PROJECT_NAME}) set(LIBRARY_VERSION "4.8.7") set(LIBRARY_SOVERSION "4") -# where to look first for cmake modules, before ${CMAKE_ROOT}/Modules/ is checked - -# add definitions - -include(DefinePlatformDefaults) - # Copy library files to a lib sub-directory set(CMAKE_LIBRARY_OUTPUT_DIRECTORY "${LIB_BINARY_DIR}/lib") diff --git a/contrib/libssh-cmake/IncludeSources.cmake b/contrib/libssh-cmake/IncludeSources.cmake index d72cf11da1f..30348d5d7dd 100644 --- a/contrib/libssh-cmake/IncludeSources.cmake +++ b/contrib/libssh-cmake/IncludeSources.cmake @@ -1,20 +1,8 @@ -set(LIBSSH_LINK_LIBRARIES - ${LIBSSH_REQUIRED_LIBRARIES} -) - - set(LIBSSH_LINK_LIBRARIES ${LIBSSH_LINK_LIBRARIES} OpenSSL::Crypto ) -if (MINGW AND Threads_FOUND) - set(LIBSSH_LINK_LIBRARIES - ${LIBSSH_LINK_LIBRARIES} - Threads::Threads - ) -endif() - set(libssh_SRCS ${LIB_SOURCE_DIR}/src/agent.c ${LIB_SOURCE_DIR}/src/auth.c @@ -66,30 +54,11 @@ set(libssh_SRCS ${LIB_SOURCE_DIR}/src/pki_ed25519_common.c ) -if (DEFAULT_C_NO_DEPRECATION_FLAGS) - set_source_files_properties(known_hosts.c - PROPERTIES - COMPILE_FLAGS ${DEFAULT_C_NO_DEPRECATION_FLAGS}) -endif() - -if (CMAKE_USE_PTHREADS_INIT) - set(libssh_SRCS - ${libssh_SRCS} - ${LIB_SOURCE_DIR}/src/threads/noop.c - ${LIB_SOURCE_DIR}/src/threads/pthread.c - ) -elseif (CMAKE_USE_WIN32_THREADS_INIT) - set(libssh_SRCS - ${libssh_SRCS} - ${LIB_SOURCE_DIR}/src/threads/noop.c - ${LIB_SOURCE_DIR}/src/threads/winlocks.c - ) -else() - set(libssh_SRCS - ${libssh_SRCS} - ${LIB_SOURCE_DIR}/src/threads/noop.c - ) -endif() +set(libssh_SRCS + ${libssh_SRCS} + ${LIB_SOURCE_DIR}/src/threads/noop.c + ${LIB_SOURCE_DIR}/src/threads/pthread.c +) # LIBCRYPT specific set(libssh_SRCS @@ -127,14 +96,3 @@ target_compile_options(_ssh PRIVATE ${DEFAULT_C_COMPILE_FLAGS} -D_GNU_SOURCE) - - -set_target_properties(_ssh - PROPERTIES - VERSION - ${LIBRARY_VERSION} - SOVERSION - ${LIBRARY_SOVERSION} - DEFINE_SYMBOL - LIBSSH_EXPORTS -) diff --git a/contrib/libunwind b/contrib/libunwind index 30cc1d3fd36..40d8eadf96b 160000 --- a/contrib/libunwind +++ b/contrib/libunwind @@ -1 +1 @@ -Subproject commit 30cc1d3fd3655a5cfa0ab112fe320fb9fc0a8344 +Subproject commit 40d8eadf96b127d9b22d53ce7a4fc52aaedea965 diff --git a/contrib/libunwind-cmake/CMakeLists.txt b/contrib/libunwind-cmake/CMakeLists.txt index 733f99d07f5..8f3cd8bd07b 100644 --- a/contrib/libunwind-cmake/CMakeLists.txt +++ b/contrib/libunwind-cmake/CMakeLists.txt @@ -1,6 +1,3 @@ -include(CheckCCompilerFlag) -include(CheckCXXCompilerFlag) - set(LIBUNWIND_SOURCE_DIR "${ClickHouse_SOURCE_DIR}/contrib/libunwind") set(LIBUNWIND_CXX_SOURCES @@ -23,15 +20,7 @@ set(LIBUNWIND_ASM_SOURCES "${LIBUNWIND_SOURCE_DIR}/src/UnwindRegistersRestore.S" "${LIBUNWIND_SOURCE_DIR}/src/UnwindRegistersSave.S") -# CMake doesn't pass the correct architecture for Apple prior to CMake 3.19 [1] -# Workaround these two issues by compiling as C. -# -# [1]: https://gitlab.kitware.com/cmake/cmake/-/issues/20771 -if (APPLE AND CMAKE_VERSION VERSION_LESS 3.19) - set_source_files_properties(${LIBUNWIND_ASM_SOURCES} PROPERTIES LANGUAGE C) -else() - enable_language(ASM) -endif() +enable_language(ASM) set(LIBUNWIND_SOURCES ${LIBUNWIND_CXX_SOURCES} @@ -48,27 +37,11 @@ target_compile_definitions(unwind PRIVATE -D_LIBUNWIND_NO_HEAP=1 -D_DEBUG -D_LIB # and disable sanitizers (otherwise infinite loop may happen) target_compile_options(unwind PRIVATE -O3 -fno-exceptions -funwind-tables -fno-sanitize=all $<$:-nostdinc++ -fno-rtti>) -check_c_compiler_flag(-Wunused-but-set-variable HAVE_WARNING_UNUSED_BUT_SET_VARIABLE) -if (HAVE_WARNING_UNUSED_BUT_SET_VARIABLE) - target_compile_options(unwind PRIVATE -Wno-unused-but-set-variable) -endif () - -check_cxx_compiler_flag(-Wmissing-attributes HAVE_WARNING_MISSING_ATTRIBUTES) -if (HAVE_WARNING_MISSING_ATTRIBUTES) - target_compile_options(unwind PRIVATE -Wno-missing-attributes) -endif () - -check_cxx_compiler_flag(-Wmaybe-uninitialized HAVE_WARNING_MAYBE_UNINITIALIZED) -if (HAVE_WARNING_MAYBE_UNINITIALIZED) - target_compile_options(unwind PRIVATE -Wno-maybe-uninitialized) -endif () +target_compile_options(unwind PRIVATE -Wno-unused-but-set-variable) # The library is using register variables that are bound to specific registers # Example: DwarfInstructions.hpp: register unsigned long long x16 __asm("x16") = cfa; -check_cxx_compiler_flag(-Wregister HAVE_WARNING_REGISTER) -if (HAVE_WARNING_REGISTER) - target_compile_options(unwind PRIVATE "$<$:-Wno-register>") -endif () +target_compile_options(unwind PRIVATE "$<$:-Wno-register>") install( TARGETS unwind diff --git a/contrib/llvm-project-cmake/CMakeLists.txt b/contrib/llvm-project-cmake/CMakeLists.txt index d6133f145bc..406bac73e90 100644 --- a/contrib/llvm-project-cmake/CMakeLists.txt +++ b/contrib/llvm-project-cmake/CMakeLists.txt @@ -61,6 +61,9 @@ set (REQUIRED_LLVM_LIBRARIES LLVMDemangle ) +# Skip useless "install" instructions from CMake: +set (LLVM_INSTALL_TOOLCHAIN_ONLY 1 CACHE INTERNAL "") + if (ARCH_AMD64) set (LLVM_TARGETS_TO_BUILD "X86" CACHE INTERNAL "") list(APPEND REQUIRED_LLVM_LIBRARIES LLVMX86Info LLVMX86Desc LLVMX86CodeGen) diff --git a/contrib/orc b/contrib/orc index f31c271110a..e24f2c2a3ca 160000 --- a/contrib/orc +++ b/contrib/orc @@ -1 +1 @@ -Subproject commit f31c271110a2f0dac908a152f11708193ae209ee +Subproject commit e24f2c2a3ca0769c96704ab20ad6f512a83ea2ad diff --git a/contrib/pocketfft b/contrib/pocketfft new file mode 160000 index 00000000000..9efd4da52cf --- /dev/null +++ b/contrib/pocketfft @@ -0,0 +1 @@ +Subproject commit 9efd4da52cf8d28d14531d14e43ad9d913807546 diff --git a/contrib/pocketfft-cmake/CMakeLists.txt b/contrib/pocketfft-cmake/CMakeLists.txt new file mode 100644 index 00000000000..01911ee4496 --- /dev/null +++ b/contrib/pocketfft-cmake/CMakeLists.txt @@ -0,0 +1,10 @@ +option (ENABLE_POCKETFFT "Enable pocketfft" ${ENABLE_LIBRARIES}) + +if (NOT ENABLE_POCKETFFT) + message(STATUS "Not using pocketfft") + return() +endif() + +add_library(_pocketfft INTERFACE) +target_include_directories(_pocketfft INTERFACE ${ClickHouse_SOURCE_DIR}/contrib/pocketfft) +add_library(ch_contrib::pocketfft ALIAS _pocketfft) diff --git a/contrib/qpl-cmake/CMakeLists.txt b/contrib/qpl-cmake/CMakeLists.txt index 4e6c66fe731..7a84048e16b 100644 --- a/contrib/qpl-cmake/CMakeLists.txt +++ b/contrib/qpl-cmake/CMakeLists.txt @@ -16,8 +16,7 @@ function(GetLibraryVersion _content _outputVar) SET(${_outputVar} ${CMAKE_MATCH_1} PARENT_SCOPE) endfunction() -FILE(READ "${QPL_PROJECT_DIR}/CMakeLists.txt" HEADER_CONTENT) -GetLibraryVersion("${HEADER_CONTENT}" QPL_VERSION) +set (QPL_VERSION 1.2.0) message(STATUS "Intel QPL version: ${QPL_VERSION}") @@ -28,16 +27,422 @@ message(STATUS "Intel QPL version: ${QPL_VERSION}") # The qpl submodule comes with its own version of isal. It contains code which does not exist in upstream isal. It would be nice to link # only upstream isal (ch_contrib::isal) but at this point we can't. -include("${QPL_PROJECT_DIR}/cmake/CompileOptions.cmake") +# ========================================================================== +# Copyright (C) 2022 Intel Corporation +# +# SPDX-License-Identifier: MIT +# ========================================================================== + +set(QPL_LINUX_TOOLCHAIN_CPP_EMBEDDED_FLAGS "-fno-exceptions;-fno-rtti") + +function(modify_standard_language_flag) + # Declaring function parameters + set(OPTIONS "") + set(ONE_VALUE_ARGS + LANGUAGE_NAME + FLAG_NAME + NEW_FLAG_VALUE) + set(MULTI_VALUE_ARGS "") + + # Parsing function parameters + cmake_parse_arguments(MODIFY + "${OPTIONS}" + "${ONE_VALUE_ARGS}" + "${MULTI_VALUE_ARGS}" + ${ARGN}) + + # Variables + set(FLAG_REGULAR_EXPRESSION "${MODIFY_FLAG_NAME}.*[ ]*") + set(NEW_VALUE "${MODIFY_FLAG_NAME}${MODIFY_NEW_FLAG_VALUE}") + + # Replacing specified flag with new value + string(REGEX REPLACE + ${FLAG_REGULAR_EXPRESSION} ${NEW_VALUE} + NEW_COMPILE_FLAGS + "${CMAKE_${MODIFY_LANGUAGE_NAME}_FLAGS}") + + # Returning the value + set(CMAKE_${MODIFY_LANGUAGE_NAME}_FLAGS ${NEW_COMPILE_FLAGS} PARENT_SCOPE) +endfunction() + +function(get_function_name_with_default_bit_width in_function_name bit_width out_function_name) + + if(in_function_name MATCHES ".*_i") + + string(REPLACE "_i" "" in_function_name ${in_function_name}) + + set(${out_function_name} "${in_function_name}_${bit_width}_i" PARENT_SCOPE) + + else() + + set(${out_function_name} "${in_function_name}_${bit_width}" PARENT_SCOPE) + + endif() + +endfunction() + +macro(get_list_of_supported_optimizations PLATFORMS_LIST) + list(APPEND PLATFORMS_LIST "") + list(APPEND PLATFORMS_LIST "px") + list(APPEND PLATFORMS_LIST "avx512") +endmacro(get_list_of_supported_optimizations) + +function(generate_unpack_kernel_arrays current_directory PLATFORMS_LIST) + list(APPEND UNPACK_POSTFIX_LIST "") + list(APPEND UNPACK_PRLE_POSTFIX_LIST "") + list(APPEND PACK_POSTFIX_LIST "") + list(APPEND PACK_INDEX_POSTFIX_LIST "") + list(APPEND SCAN_POSTFIX_LIST "") + list(APPEND DEFAULT_BIT_WIDTH_FUNCTIONS_LIST "") + list(APPEND DEFAULT_BIT_WIDTH_LIST "") + + #create list of functions that use only 8u 16u 32u postfixes + list(APPEND DEFAULT_BIT_WIDTH_FUNCTIONS_LIST "unpack_prle") + list(APPEND DEFAULT_BIT_WIDTH_FUNCTIONS_LIST "extract") + list(APPEND DEFAULT_BIT_WIDTH_FUNCTIONS_LIST "extract_i") + list(APPEND DEFAULT_BIT_WIDTH_FUNCTIONS_LIST "select") + list(APPEND DEFAULT_BIT_WIDTH_FUNCTIONS_LIST "select_i") + list(APPEND DEFAULT_BIT_WIDTH_FUNCTIONS_LIST "expand") + + #create default bit width list + list(APPEND DEFAULT_BIT_WIDTH_LIST "8u") + list(APPEND DEFAULT_BIT_WIDTH_LIST "16u") + list(APPEND DEFAULT_BIT_WIDTH_LIST "32u") + + #create scan kernel postfixes + list(APPEND SCAN_COMPARATOR_LIST "") + + list(APPEND SCAN_COMPARATOR_LIST "eq") + list(APPEND SCAN_COMPARATOR_LIST "ne") + list(APPEND SCAN_COMPARATOR_LIST "lt") + list(APPEND SCAN_COMPARATOR_LIST "le") + list(APPEND SCAN_COMPARATOR_LIST "gt") + list(APPEND SCAN_COMPARATOR_LIST "ge") + list(APPEND SCAN_COMPARATOR_LIST "range") + list(APPEND SCAN_COMPARATOR_LIST "not_range") + + foreach(SCAN_COMPARATOR IN LISTS SCAN_COMPARATOR_LIST) + list(APPEND SCAN_POSTFIX_LIST "_${SCAN_COMPARATOR}_8u") + list(APPEND SCAN_POSTFIX_LIST "_${SCAN_COMPARATOR}_16u8u") + list(APPEND SCAN_POSTFIX_LIST "_${SCAN_COMPARATOR}_32u8u") + endforeach() + + # create unpack kernel postfixes + foreach(input_width RANGE 1 32 1) + if(input_width LESS 8 OR input_width EQUAL 8) + list(APPEND UNPACK_POSTFIX_LIST "_${input_width}u8u") + + elseif(input_width LESS 16 OR input_width EQUAL 16) + list(APPEND UNPACK_POSTFIX_LIST "_${input_width}u16u") + + else() + list(APPEND UNPACK_POSTFIX_LIST "_${input_width}u32u") + endif() + endforeach() + + # create pack kernel postfixes + foreach(output_width RANGE 1 8 1) + list(APPEND PACK_POSTFIX_LIST "_8u${output_width}u") + endforeach() + + foreach(output_width RANGE 9 16 1) + list(APPEND PACK_POSTFIX_LIST "_16u${output_width}u") + endforeach() + + foreach(output_width RANGE 17 32 1) + list(APPEND PACK_POSTFIX_LIST "_32u${output_width}u") + endforeach() + + list(APPEND PACK_POSTFIX_LIST "_8u16u") + list(APPEND PACK_POSTFIX_LIST "_8u32u") + list(APPEND PACK_POSTFIX_LIST "_16u32u") + + # create pack index kernel postfixes + list(APPEND PACK_INDEX_POSTFIX_LIST "_nu") + list(APPEND PACK_INDEX_POSTFIX_LIST "_8u") + list(APPEND PACK_INDEX_POSTFIX_LIST "_8u16u") + list(APPEND PACK_INDEX_POSTFIX_LIST "_8u32u") + + # write to file + file(MAKE_DIRECTORY ${current_directory}/generated) + + foreach(PLATFORM_VALUE IN LISTS PLATFORMS_LIST) + set(directory "${current_directory}/generated") + set(PLATFORM_PREFIX "${PLATFORM_VALUE}_") + + # + # Write unpack table + # + file(WRITE ${directory}/${PLATFORM_PREFIX}unpack.cpp "#include \"qplc_api.h\"\n") + file(APPEND ${directory}/${PLATFORM_PREFIX}unpack.cpp "#include \"dispatcher/dispatcher.hpp\"\n") + file(APPEND ${directory}/${PLATFORM_PREFIX}unpack.cpp "namespace qpl::core_sw::dispatcher\n{\n") + file(APPEND ${directory}/${PLATFORM_PREFIX}unpack.cpp "unpack_table_t ${PLATFORM_PREFIX}unpack_table = {\n") + + #write LE kernels + foreach(UNPACK_POSTFIX IN LISTS UNPACK_POSTFIX_LIST) + file(APPEND ${directory}/${PLATFORM_PREFIX}unpack.cpp "\t${PLATFORM_PREFIX}qplc_unpack${UNPACK_POSTFIX},\n") + endforeach() + + #write BE kernels + + #get last element of the list + set(LAST_ELEMENT "") + list(GET UNPACK_POSTFIX_LIST -1 LAST_ELEMENT) + + foreach(UNPACK_POSTFIX IN LISTS UNPACK_POSTFIX_LIST) + + if(UNPACK_POSTFIX STREQUAL LAST_ELEMENT) + file(APPEND ${directory}/${PLATFORM_PREFIX}unpack.cpp "\t${PLATFORM_PREFIX}qplc_unpack_be${UNPACK_POSTFIX}};\n") + else() + file(APPEND ${directory}/${PLATFORM_PREFIX}unpack.cpp "\t${PLATFORM_PREFIX}qplc_unpack_be${UNPACK_POSTFIX},\n") + endif() + endforeach() + + file(APPEND ${directory}/${PLATFORM_PREFIX}unpack.cpp "}\n") + + # + # Write pack table + # + file(WRITE ${directory}/${PLATFORM_PREFIX}pack.cpp "#include \"qplc_api.h\"\n") + file(APPEND ${directory}/${PLATFORM_PREFIX}pack.cpp "#include \"dispatcher/dispatcher.hpp\"\n") + file(APPEND ${directory}/${PLATFORM_PREFIX}pack.cpp "namespace qpl::core_sw::dispatcher\n{\n") + file(APPEND ${directory}/${PLATFORM_PREFIX}pack.cpp "pack_table_t ${PLATFORM_PREFIX}pack_table = {\n") + + #write LE kernels + foreach(PACK_POSTFIX IN LISTS PACK_POSTFIX_LIST) + file(APPEND ${directory}/${PLATFORM_PREFIX}pack.cpp "\t${PLATFORM_PREFIX}qplc_pack${PACK_POSTFIX},\n") + endforeach() + + #write BE kernels + + #get last element of the list + set(LAST_ELEMENT "") + list(GET PACK_POSTFIX_LIST -1 LAST_ELEMENT) + + foreach(PACK_POSTFIX IN LISTS PACK_POSTFIX_LIST) + + if(PACK_POSTFIX STREQUAL LAST_ELEMENT) + file(APPEND ${directory}/${PLATFORM_PREFIX}pack.cpp "\t${PLATFORM_PREFIX}qplc_pack_be${PACK_POSTFIX}};\n") + else() + file(APPEND ${directory}/${PLATFORM_PREFIX}pack.cpp "\t${PLATFORM_PREFIX}qplc_pack_be${PACK_POSTFIX},\n") + endif() + endforeach() + + file(APPEND ${directory}/${PLATFORM_PREFIX}pack.cpp "}\n") + + # + # Write scan table + # + file(WRITE ${directory}/${PLATFORM_PREFIX}scan.cpp "#include \"qplc_api.h\"\n") + file(APPEND ${directory}/${PLATFORM_PREFIX}scan.cpp "#include \"dispatcher/dispatcher.hpp\"\n") + file(APPEND ${directory}/${PLATFORM_PREFIX}scan.cpp "namespace qpl::core_sw::dispatcher\n{\n") + file(APPEND ${directory}/${PLATFORM_PREFIX}scan.cpp "scan_table_t ${PLATFORM_PREFIX}scan_table = {\n") + + #get last element of the list + set(LAST_ELEMENT "") + list(GET SCAN_POSTFIX_LIST -1 LAST_ELEMENT) + + foreach(SCAN_POSTFIX IN LISTS SCAN_POSTFIX_LIST) + + if(SCAN_POSTFIX STREQUAL LAST_ELEMENT) + file(APPEND ${directory}/${PLATFORM_PREFIX}scan.cpp "\t${PLATFORM_PREFIX}qplc_scan${SCAN_POSTFIX}};\n") + else() + file(APPEND ${directory}/${PLATFORM_PREFIX}scan.cpp "\t${PLATFORM_PREFIX}qplc_scan${SCAN_POSTFIX},\n") + endif() + endforeach() + + file(APPEND ${directory}/${PLATFORM_PREFIX}scan.cpp "}\n") + + # + # Write scan_i table + # + file(WRITE ${directory}/${PLATFORM_PREFIX}scan_i.cpp "#include \"qplc_api.h\"\n") + file(APPEND ${directory}/${PLATFORM_PREFIX}scan_i.cpp "#include \"dispatcher/dispatcher.hpp\"\n") + file(APPEND ${directory}/${PLATFORM_PREFIX}scan_i.cpp "namespace qpl::core_sw::dispatcher\n{\n") + file(APPEND ${directory}/${PLATFORM_PREFIX}scan_i.cpp "scan_i_table_t ${PLATFORM_PREFIX}scan_i_table = {\n") + + #get last element of the list + set(LAST_ELEMENT "") + list(GET SCAN_POSTFIX_LIST -1 LAST_ELEMENT) + + foreach(SCAN_POSTFIX IN LISTS SCAN_POSTFIX_LIST) + + if(SCAN_POSTFIX STREQUAL LAST_ELEMENT) + file(APPEND ${directory}/${PLATFORM_PREFIX}scan_i.cpp "\t${PLATFORM_PREFIX}qplc_scan${SCAN_POSTFIX}_i};\n") + else() + file(APPEND ${directory}/${PLATFORM_PREFIX}scan_i.cpp "\t${PLATFORM_PREFIX}qplc_scan${SCAN_POSTFIX}_i,\n") + endif() + endforeach() + + file(APPEND ${directory}/${PLATFORM_PREFIX}scan_i.cpp "}\n") + + # + # Write pack_index table + # + file(WRITE ${directory}/${PLATFORM_PREFIX}pack_index.cpp "#include \"qplc_api.h\"\n") + file(APPEND ${directory}/${PLATFORM_PREFIX}pack_index.cpp "#include \"dispatcher/dispatcher.hpp\"\n") + file(APPEND ${directory}/${PLATFORM_PREFIX}pack_index.cpp "namespace qpl::core_sw::dispatcher\n{\n") + file(APPEND ${directory}/${PLATFORM_PREFIX}pack_index.cpp "pack_index_table_t ${PLATFORM_PREFIX}pack_index_table = {\n") + file(APPEND ${directory}/${PLATFORM_PREFIX}pack_index.cpp "\t${PLATFORM_PREFIX}qplc_pack_bits_nu,\n") + file(APPEND ${directory}/${PLATFORM_PREFIX}pack_index.cpp "\t${PLATFORM_PREFIX}qplc_pack_index_8u,\n") + file(APPEND ${directory}/${PLATFORM_PREFIX}pack_index.cpp "\t${PLATFORM_PREFIX}qplc_pack_index_8u16u,\n") + file(APPEND ${directory}/${PLATFORM_PREFIX}pack_index.cpp "\t${PLATFORM_PREFIX}qplc_pack_index_8u32u,\n") + file(APPEND ${directory}/${PLATFORM_PREFIX}pack_index.cpp "\t${PLATFORM_PREFIX}qplc_pack_bits_be_nu,\n") + file(APPEND ${directory}/${PLATFORM_PREFIX}pack_index.cpp "\t${PLATFORM_PREFIX}qplc_pack_index_8u,\n") + file(APPEND ${directory}/${PLATFORM_PREFIX}pack_index.cpp "\t${PLATFORM_PREFIX}qplc_pack_index_be_8u16u,\n") + file(APPEND ${directory}/${PLATFORM_PREFIX}pack_index.cpp "\t${PLATFORM_PREFIX}qplc_pack_index_be_8u32u};\n") + + file(APPEND ${directory}/${PLATFORM_PREFIX}pack_index.cpp "}\n") + + # + # Write default bit width functions + # + foreach(DEAULT_BIT_WIDTH_FUNCTION IN LISTS DEFAULT_BIT_WIDTH_FUNCTIONS_LIST) + file(WRITE ${directory}/${PLATFORM_PREFIX}${DEAULT_BIT_WIDTH_FUNCTION}.cpp "#include \"qplc_api.h\"\n") + file(APPEND ${directory}/${PLATFORM_PREFIX}${DEAULT_BIT_WIDTH_FUNCTION}.cpp "#include \"dispatcher/dispatcher.hpp\"\n") + file(APPEND ${directory}/${PLATFORM_PREFIX}${DEAULT_BIT_WIDTH_FUNCTION}.cpp "namespace qpl::core_sw::dispatcher\n{\n") + file(APPEND ${directory}/${PLATFORM_PREFIX}${DEAULT_BIT_WIDTH_FUNCTION}.cpp "${DEAULT_BIT_WIDTH_FUNCTION}_table_t ${PLATFORM_PREFIX}${DEAULT_BIT_WIDTH_FUNCTION}_table = {\n") + + #get last element of the list + set(LAST_ELEMENT "") + list(GET DEFAULT_BIT_WIDTH_LIST -1 LAST_ELEMENT) + + foreach(BIT_WIDTH IN LISTS DEFAULT_BIT_WIDTH_LIST) + + set(FUNCTION_NAME "") + get_function_name_with_default_bit_width(${DEAULT_BIT_WIDTH_FUNCTION} ${BIT_WIDTH} FUNCTION_NAME) + + if(BIT_WIDTH STREQUAL LAST_ELEMENT) + file(APPEND ${directory}/${PLATFORM_PREFIX}${DEAULT_BIT_WIDTH_FUNCTION}.cpp "\t${PLATFORM_PREFIX}qplc_${FUNCTION_NAME}};\n") + else() + file(APPEND ${directory}/${PLATFORM_PREFIX}${DEAULT_BIT_WIDTH_FUNCTION}.cpp "\t${PLATFORM_PREFIX}qplc_${FUNCTION_NAME},\n") + endif() + endforeach() + + file(APPEND ${directory}/${PLATFORM_PREFIX}${DEAULT_BIT_WIDTH_FUNCTION}.cpp "}\n") + endforeach() + + # + # Write aggregates table + # + file(WRITE ${directory}/${PLATFORM_PREFIX}aggregates.cpp "#include \"qplc_api.h\"\n") + file(APPEND ${directory}/${PLATFORM_PREFIX}aggregates.cpp "#include \"dispatcher/dispatcher.hpp\"\n") + file(APPEND ${directory}/${PLATFORM_PREFIX}aggregates.cpp "namespace qpl::core_sw::dispatcher\n{\n") + file(APPEND ${directory}/${PLATFORM_PREFIX}aggregates.cpp "aggregates_table_t ${PLATFORM_PREFIX}aggregates_table = {\n") + + file(APPEND ${directory}/${PLATFORM_PREFIX}aggregates.cpp "\t${PLATFORM_PREFIX}qplc_bit_aggregates_8u,\n") + file(APPEND ${directory}/${PLATFORM_PREFIX}aggregates.cpp "\t${PLATFORM_PREFIX}qplc_aggregates_8u,\n") + file(APPEND ${directory}/${PLATFORM_PREFIX}aggregates.cpp "\t${PLATFORM_PREFIX}qplc_aggregates_16u,\n") + file(APPEND ${directory}/${PLATFORM_PREFIX}aggregates.cpp "\t${PLATFORM_PREFIX}qplc_aggregates_32u};\n") + + file(APPEND ${directory}/${PLATFORM_PREFIX}aggregates.cpp "}\n") + + # + # Write mem_copy functions table + # + file(WRITE ${directory}/${PLATFORM_PREFIX}memory_copy.cpp "#include \"qplc_api.h\"\n") + file(APPEND ${directory}/${PLATFORM_PREFIX}memory_copy.cpp "#include \"dispatcher/dispatcher.hpp\"\n") + file(APPEND ${directory}/${PLATFORM_PREFIX}memory_copy.cpp "namespace qpl::core_sw::dispatcher\n{\n") + file(APPEND ${directory}/${PLATFORM_PREFIX}memory_copy.cpp "memory_copy_table_t ${PLATFORM_PREFIX}memory_copy_table = {\n") + + file(APPEND ${directory}/${PLATFORM_PREFIX}memory_copy.cpp "\t${PLATFORM_PREFIX}qplc_copy_8u,\n") + file(APPEND ${directory}/${PLATFORM_PREFIX}memory_copy.cpp "\t${PLATFORM_PREFIX}qplc_copy_16u,\n") + file(APPEND ${directory}/${PLATFORM_PREFIX}memory_copy.cpp "\t${PLATFORM_PREFIX}qplc_copy_32u};\n") + + file(APPEND ${directory}/${PLATFORM_PREFIX}memory_copy.cpp "}\n") + + # + # Write mem_copy functions table + # + file(WRITE ${directory}/${PLATFORM_PREFIX}zero.cpp "#include \"qplc_api.h\"\n") + file(APPEND ${directory}/${PLATFORM_PREFIX}zero.cpp "#include \"dispatcher/dispatcher.hpp\"\n") + file(APPEND ${directory}/${PLATFORM_PREFIX}zero.cpp "namespace qpl::core_sw::dispatcher\n{\n") + file(APPEND ${directory}/${PLATFORM_PREFIX}zero.cpp "zero_table_t ${PLATFORM_PREFIX}zero_table = {\n") + + file(APPEND ${directory}/${PLATFORM_PREFIX}zero.cpp "\t${PLATFORM_PREFIX}qplc_zero_8u};\n") + + file(APPEND ${directory}/${PLATFORM_PREFIX}zero.cpp "}\n") + + # + # Write move functions table + # + file(WRITE ${directory}/${PLATFORM_PREFIX}move.cpp "#include \"qplc_api.h\"\n") + file(APPEND ${directory}/${PLATFORM_PREFIX}move.cpp "#include \"dispatcher/dispatcher.hpp\"\n") + file(APPEND ${directory}/${PLATFORM_PREFIX}move.cpp "namespace qpl::core_sw::dispatcher\n{\n") + file(APPEND ${directory}/${PLATFORM_PREFIX}move.cpp "move_table_t ${PLATFORM_PREFIX}move_table = {\n") + + file(APPEND ${directory}/${PLATFORM_PREFIX}move.cpp "\t${PLATFORM_PREFIX}qplc_move_8u};\n") + + file(APPEND ${directory}/${PLATFORM_PREFIX}move.cpp "}\n") + + # + # Write crc64 function table + # + file(WRITE ${directory}/${PLATFORM_PREFIX}crc64.cpp "#include \"qplc_api.h\"\n") + file(APPEND ${directory}/${PLATFORM_PREFIX}crc64.cpp "#include \"dispatcher/dispatcher.hpp\"\n") + file(APPEND ${directory}/${PLATFORM_PREFIX}crc64.cpp "namespace qpl::core_sw::dispatcher\n{\n") + file(APPEND ${directory}/${PLATFORM_PREFIX}crc64.cpp "crc64_table_t ${PLATFORM_PREFIX}crc64_table = {\n") + + file(APPEND ${directory}/${PLATFORM_PREFIX}crc64.cpp "\t${PLATFORM_PREFIX}qplc_crc64};\n") + + file(APPEND ${directory}/${PLATFORM_PREFIX}crc64.cpp "}\n") + + # + # Write xor_checksum function table + # + file(WRITE ${directory}/${PLATFORM_PREFIX}xor_checksum.cpp "#include \"qplc_api.h\"\n") + file(APPEND ${directory}/${PLATFORM_PREFIX}xor_checksum.cpp "#include \"dispatcher/dispatcher.hpp\"\n") + file(APPEND ${directory}/${PLATFORM_PREFIX}xor_checksum.cpp "namespace qpl::core_sw::dispatcher\n{\n") + file(APPEND ${directory}/${PLATFORM_PREFIX}xor_checksum.cpp "xor_checksum_table_t ${PLATFORM_PREFIX}xor_checksum_table = {\n") + + file(APPEND ${directory}/${PLATFORM_PREFIX}xor_checksum.cpp "\t${PLATFORM_PREFIX}qplc_xor_checksum_8u};\n") + + file(APPEND ${directory}/${PLATFORM_PREFIX}xor_checksum.cpp "}\n") + + # + # Write deflate functions table + # + file(WRITE ${directory}/${PLATFORM_PREFIX}deflate.cpp "#include \"deflate_slow_icf.h\"\n") + file(APPEND ${directory}/${PLATFORM_PREFIX}deflate.cpp "#include \"deflate_hash_table.h\"\n") + file(APPEND ${directory}/${PLATFORM_PREFIX}deflate.cpp "#include \"deflate_histogram.h\"\n") + file(APPEND ${directory}/${PLATFORM_PREFIX}deflate.cpp "#include \"dispatcher/dispatcher.hpp\"\n") + file(APPEND ${directory}/${PLATFORM_PREFIX}deflate.cpp "namespace qpl::core_sw::dispatcher\n{\n") + file(APPEND ${directory}/${PLATFORM_PREFIX}deflate.cpp "deflate_table_t ${PLATFORM_PREFIX}deflate_table = {\n") + + file(APPEND ${directory}/${PLATFORM_PREFIX}deflate.cpp "\t reinterpret_cast(&${PLATFORM_PREFIX}slow_deflate_icf_body),\n") + file(APPEND ${directory}/${PLATFORM_PREFIX}deflate.cpp "\t reinterpret_cast(&${PLATFORM_PREFIX}deflate_histogram_reset),\n") + file(APPEND ${directory}/${PLATFORM_PREFIX}deflate.cpp "\t reinterpret_cast(&${PLATFORM_PREFIX}deflate_hash_table_reset)};\n") + + file(APPEND ${directory}/${PLATFORM_PREFIX}deflate.cpp "}\n") + + # + # Write deflate fix functions table + # + file(WRITE ${directory}/${PLATFORM_PREFIX}deflate_fix.cpp "#include \"deflate_slow.h\"\n") + file(APPEND ${directory}/${PLATFORM_PREFIX}deflate_fix.cpp "#include \"dispatcher/dispatcher.hpp\"\n") + file(APPEND ${directory}/${PLATFORM_PREFIX}deflate_fix.cpp "namespace qpl::core_sw::dispatcher\n{\n") + file(APPEND ${directory}/${PLATFORM_PREFIX}deflate_fix.cpp "deflate_fix_table_t ${PLATFORM_PREFIX}deflate_fix_table = {\n") + + file(APPEND ${directory}/${PLATFORM_PREFIX}deflate_fix.cpp "\t reinterpret_cast(&${PLATFORM_PREFIX}slow_deflate_body)};\n") + + file(APPEND ${directory}/${PLATFORM_PREFIX}deflate_fix.cpp "}\n") + + # + # Write setup_dictionary functions table + # + file(WRITE ${directory}/${PLATFORM_PREFIX}setup_dictionary.cpp "#include \"deflate_slow_utils.h\"\n") + file(APPEND ${directory}/${PLATFORM_PREFIX}setup_dictionary.cpp "#include \"dispatcher/dispatcher.hpp\"\n") + file(APPEND ${directory}/${PLATFORM_PREFIX}setup_dictionary.cpp "namespace qpl::core_sw::dispatcher\n{\n") + file(APPEND ${directory}/${PLATFORM_PREFIX}setup_dictionary.cpp "setup_dictionary_table_t ${PLATFORM_PREFIX}setup_dictionary_table = {\n") + + file(APPEND ${directory}/${PLATFORM_PREFIX}setup_dictionary.cpp "\t reinterpret_cast(&${PLATFORM_PREFIX}setup_dictionary)};\n") + + file(APPEND ${directory}/${PLATFORM_PREFIX}setup_dictionary.cpp "}\n") + + endforeach() +endfunction() -# check nasm compiler -include(CheckLanguage) -check_language(ASM_NASM) -if(NOT CMAKE_ASM_NASM_COMPILER) - message(FATAL_ERROR "Please install NASM from 'https://www.nasm.us/' because NASM compiler can not be found!") -endif() -# [SUBDIR]isal enable_language(ASM_NASM) set(ISAL_C_SRC ${QPL_SRC_DIR}/isal/igzip/adler32_base.c @@ -107,11 +512,6 @@ set_target_properties(isal PROPERTIES CXX_STANDARD 11 C_STANDARD 99) -target_compile_options(isal PRIVATE - "$<$:${QPL_LINUX_TOOLCHAIN_REQUIRED_FLAGS}>" - "$<$:>" - "$<$:>") - # AS_FEATURE_LEVEL=10 means "Check SIMD capabilities of the target system at runtime and use up to AVX512 if available". # HAVE_KNOWS_AVX512 means rely on AVX512 being available on the target system. target_compile_options(isal_asm PRIVATE "-I${QPL_SRC_DIR}/isal/include/" @@ -164,15 +564,7 @@ foreach(PLATFORM_ID IN LISTS PLATFORMS_LIST) PUBLIC $ PRIVATE $) - set_target_properties(qplcore_${PLATFORM_ID} PROPERTIES - $<$:C_STANDARD 17>) - - target_compile_options(qplcore_${PLATFORM_ID} - PRIVATE ${QPL_LINUX_TOOLCHAIN_REQUIRED_FLAGS} - PRIVATE "$<$:>" - PRIVATE "$<$:-O3;-D_FORTIFY_SOURCE=2>") - - # Set specific compiler options and/or definitions based on a platform + # Set specific compiler options and/or definitions based on a platform if (${PLATFORM_ID} MATCHES "avx512") target_compile_definitions(qplcore_${PLATFORM_ID} PRIVATE PLATFORM=2) target_compile_options(qplcore_${PLATFORM_ID} PRIVATE -march=skylake-avx512) @@ -221,10 +613,7 @@ set_target_properties(qplcore_sw_dispatcher PROPERTIES CXX_STANDARD 17) target_compile_definitions(qplcore_sw_dispatcher PUBLIC -DQPL_LIB) target_compile_options(qplcore_sw_dispatcher - PRIVATE $<$:${QPL_LINUX_TOOLCHAIN_REQUIRED_FLAGS}; - ${QPL_LINUX_TOOLCHAIN_DYNAMIC_LIBRARY_FLAGS}; - $<$:-O3;-D_FORTIFY_SOURCE=2>> - PRIVATE $<$:${QPL_LINUX_TOOLCHAIN_CPP_EMBEDDED_FLAGS}>) + PRIVATE ${QPL_LINUX_TOOLCHAIN_CPP_EMBEDDED_FLAGS}) # [SUBDIR]core-iaa file(GLOB HW_PATH_SRC ${QPL_SRC_DIR}/core-iaa/sources/aecs/*.c @@ -249,14 +638,6 @@ target_include_directories(core_iaa PRIVATE $ # own_checkers.h PRIVATE $) -set_target_properties(core_iaa PROPERTIES - $<$:C_STANDARD 17> - CXX_STANDARD 17) - -target_compile_options(core_iaa - PRIVATE $<$:${QPL_LINUX_TOOLCHAIN_REQUIRED_FLAGS}; - $<$:-O3;-D_FORTIFY_SOURCE=2>>) - target_compile_features(core_iaa PRIVATE c_std_11) target_compile_definitions(core_iaa PRIVATE QPL_BADARG_CHECK @@ -286,10 +667,7 @@ set_property(GLOBAL APPEND PROPERTY QPL_LIB_DEPS $) target_compile_options(middle_layer_lib - PRIVATE $<$:${QPL_LINUX_TOOLCHAIN_REQUIRED_FLAGS}; - ${QPL_LINUX_TOOLCHAIN_DYNAMIC_LIBRARY_FLAGS}; - $<$:-O3;-D_FORTIFY_SOURCE=2>> - PRIVATE $<$:${QPL_LINUX_TOOLCHAIN_CPP_EMBEDDED_FLAGS}>) + PRIVATE ${QPL_LINUX_TOOLCHAIN_CPP_EMBEDDED_FLAGS}) target_compile_definitions(middle_layer_lib PUBLIC QPL_VERSION="${QPL_VERSION}" @@ -324,15 +702,8 @@ target_include_directories(_qpl PRIVATE $ PRIVATE $) -set_target_properties(_qpl PROPERTIES - $<$:C_STANDARD 17> - CXX_STANDARD 17) - target_compile_options(_qpl - PRIVATE $<$:${QPL_LINUX_TOOLCHAIN_REQUIRED_FLAGS}; - ${QPL_LINUX_TOOLCHAIN_DYNAMIC_LIBRARY_FLAGS}; - $<$:-O3;-D_FORTIFY_SOURCE=2>> - PRIVATE $<$:${QPL_LINUX_TOOLCHAIN_CPP_EMBEDDED_FLAGS}>) + PRIVATE ${QPL_LINUX_TOOLCHAIN_CPP_EMBEDDED_FLAGS}) target_compile_definitions(_qpl PRIVATE -DQPL_LIB diff --git a/contrib/qpl-cmake/benchmark_sample/client_scripts/allin1_ssb.sh b/contrib/qpl-cmake/benchmark_sample/client_scripts/allin1_ssb.sh deleted file mode 100644 index 31017b565b6..00000000000 --- a/contrib/qpl-cmake/benchmark_sample/client_scripts/allin1_ssb.sh +++ /dev/null @@ -1,530 +0,0 @@ -#!/bin/bash -ckhost="localhost" -ckport=("9000" "9001" "9002" "9003") -WORKING_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)/.." -OUTPUT_DIR="${WORKING_DIR}/output" -LOG_DIR="${OUTPUT_DIR}/log" -RAWDATA_DIR="${WORKING_DIR}/rawdata_dir" -database_dir="${WORKING_DIR}/database_dir" -CLIENT_SCRIPTS_DIR="${WORKING_DIR}/client_scripts" -LOG_PACK_FILE="$(date +%Y-%m-%d-%H-%M-%S)" -QUERY_FILE="queries_ssb.sql" -SERVER_BIND_CMD[0]="numactl -m 0 -N 0" -SERVER_BIND_CMD[1]="numactl -m 0 -N 0" -SERVER_BIND_CMD[2]="numactl -m 1 -N 1" -SERVER_BIND_CMD[3]="numactl -m 1 -N 1" -CLIENT_BIND_CMD="" -SSB_GEN_FACTOR=20 -TABLE_NAME="lineorder_flat" -TALBE_ROWS="119994608" -CODEC_CONFIG="lz4 deflate zstd" - -# define instance number -inst_num=$1 -if [ ! -n "$1" ]; then - echo "Please clarify instance number from 1,2,3 or 4" - exit 1 -else - echo "Benchmarking with instance number:$1" -fi - -if [ ! -d "$OUTPUT_DIR" ]; then -mkdir $OUTPUT_DIR -fi -if [ ! -d "$LOG_DIR" ]; then -mkdir $LOG_DIR -fi -if [ ! -d "$RAWDATA_DIR" ]; then -mkdir $RAWDATA_DIR -fi - -# define different directories -dir_server=("" "_s2" "_s3" "_s4") -ckreadSql=" - CREATE TABLE customer - ( - C_CUSTKEY UInt32, - C_NAME String, - C_ADDRESS String, - C_CITY LowCardinality(String), - C_NATION LowCardinality(String), - C_REGION LowCardinality(String), - C_PHONE String, - C_MKTSEGMENT LowCardinality(String) - ) - ENGINE = MergeTree ORDER BY (C_CUSTKEY); - - CREATE TABLE lineorder - ( - LO_ORDERKEY UInt32, - LO_LINENUMBER UInt8, - LO_CUSTKEY UInt32, - LO_PARTKEY UInt32, - LO_SUPPKEY UInt32, - LO_ORDERDATE Date, - LO_ORDERPRIORITY LowCardinality(String), - LO_SHIPPRIORITY UInt8, - LO_QUANTITY UInt8, - LO_EXTENDEDPRICE UInt32, - LO_ORDTOTALPRICE UInt32, - LO_DISCOUNT UInt8, - LO_REVENUE UInt32, - LO_SUPPLYCOST UInt32, - LO_TAX UInt8, - LO_COMMITDATE Date, - LO_SHIPMODE LowCardinality(String) - ) - ENGINE = MergeTree PARTITION BY toYear(LO_ORDERDATE) ORDER BY (LO_ORDERDATE, LO_ORDERKEY); - - CREATE TABLE part - ( - P_PARTKEY UInt32, - P_NAME String, - P_MFGR LowCardinality(String), - P_CATEGORY LowCardinality(String), - P_BRAND LowCardinality(String), - P_COLOR LowCardinality(String), - P_TYPE LowCardinality(String), - P_SIZE UInt8, - P_CONTAINER LowCardinality(String) - ) - ENGINE = MergeTree ORDER BY P_PARTKEY; - - CREATE TABLE supplier - ( - S_SUPPKEY UInt32, - S_NAME String, - S_ADDRESS String, - S_CITY LowCardinality(String), - S_NATION LowCardinality(String), - S_REGION LowCardinality(String), - S_PHONE String - ) - ENGINE = MergeTree ORDER BY S_SUPPKEY; -" -supplier_table=" - CREATE TABLE supplier - ( - S_SUPPKEY UInt32, - S_NAME String, - S_ADDRESS String, - S_CITY LowCardinality(String), - S_NATION LowCardinality(String), - S_REGION LowCardinality(String), - S_PHONE String - ) - ENGINE = MergeTree ORDER BY S_SUPPKEY; -" -part_table=" - CREATE TABLE part - ( - P_PARTKEY UInt32, - P_NAME String, - P_MFGR LowCardinality(String), - P_CATEGORY LowCardinality(String), - P_BRAND LowCardinality(String), - P_COLOR LowCardinality(String), - P_TYPE LowCardinality(String), - P_SIZE UInt8, - P_CONTAINER LowCardinality(String) - ) - ENGINE = MergeTree ORDER BY P_PARTKEY; -" -lineorder_table=" - CREATE TABLE lineorder - ( - LO_ORDERKEY UInt32, - LO_LINENUMBER UInt8, - LO_CUSTKEY UInt32, - LO_PARTKEY UInt32, - LO_SUPPKEY UInt32, - LO_ORDERDATE Date, - LO_ORDERPRIORITY LowCardinality(String), - LO_SHIPPRIORITY UInt8, - LO_QUANTITY UInt8, - LO_EXTENDEDPRICE UInt32, - LO_ORDTOTALPRICE UInt32, - LO_DISCOUNT UInt8, - LO_REVENUE UInt32, - LO_SUPPLYCOST UInt32, - LO_TAX UInt8, - LO_COMMITDATE Date, - LO_SHIPMODE LowCardinality(String) - ) - ENGINE = MergeTree PARTITION BY toYear(LO_ORDERDATE) ORDER BY (LO_ORDERDATE, LO_ORDERKEY); -" -customer_table=" - CREATE TABLE customer - ( - C_CUSTKEY UInt32, - C_NAME String, - C_ADDRESS String, - C_CITY LowCardinality(String), - C_NATION LowCardinality(String), - C_REGION LowCardinality(String), - C_PHONE String, - C_MKTSEGMENT LowCardinality(String) - ) - ENGINE = MergeTree ORDER BY (C_CUSTKEY); -" - -lineorder_flat_table=" - SET max_memory_usage = 20000000000; - CREATE TABLE lineorder_flat - ENGINE = MergeTree - PARTITION BY toYear(LO_ORDERDATE) - ORDER BY (LO_ORDERDATE, LO_ORDERKEY) AS - SELECT - l.LO_ORDERKEY AS LO_ORDERKEY, - l.LO_LINENUMBER AS LO_LINENUMBER, - l.LO_CUSTKEY AS LO_CUSTKEY, - l.LO_PARTKEY AS LO_PARTKEY, - l.LO_SUPPKEY AS LO_SUPPKEY, - l.LO_ORDERDATE AS LO_ORDERDATE, - l.LO_ORDERPRIORITY AS LO_ORDERPRIORITY, - l.LO_SHIPPRIORITY AS LO_SHIPPRIORITY, - l.LO_QUANTITY AS LO_QUANTITY, - l.LO_EXTENDEDPRICE AS LO_EXTENDEDPRICE, - l.LO_ORDTOTALPRICE AS LO_ORDTOTALPRICE, - l.LO_DISCOUNT AS LO_DISCOUNT, - l.LO_REVENUE AS LO_REVENUE, - l.LO_SUPPLYCOST AS LO_SUPPLYCOST, - l.LO_TAX AS LO_TAX, - l.LO_COMMITDATE AS LO_COMMITDATE, - l.LO_SHIPMODE AS LO_SHIPMODE, - c.C_NAME AS C_NAME, - c.C_ADDRESS AS C_ADDRESS, - c.C_CITY AS C_CITY, - c.C_NATION AS C_NATION, - c.C_REGION AS C_REGION, - c.C_PHONE AS C_PHONE, - c.C_MKTSEGMENT AS C_MKTSEGMENT, - s.S_NAME AS S_NAME, - s.S_ADDRESS AS S_ADDRESS, - s.S_CITY AS S_CITY, - s.S_NATION AS S_NATION, - s.S_REGION AS S_REGION, - s.S_PHONE AS S_PHONE, - p.P_NAME AS P_NAME, - p.P_MFGR AS P_MFGR, - p.P_CATEGORY AS P_CATEGORY, - p.P_BRAND AS P_BRAND, - p.P_COLOR AS P_COLOR, - p.P_TYPE AS P_TYPE, - p.P_SIZE AS P_SIZE, - p.P_CONTAINER AS P_CONTAINER - FROM lineorder AS l - INNER JOIN customer AS c ON c.C_CUSTKEY = l.LO_CUSTKEY - INNER JOIN supplier AS s ON s.S_SUPPKEY = l.LO_SUPPKEY - INNER JOIN part AS p ON p.P_PARTKEY = l.LO_PARTKEY; - show settings ilike 'max_memory_usage'; -" - -function insert_data(){ - echo "insert_data:$1" - create_table_prefix="clickhouse client --host ${ckhost} --port $2 --multiquery -q" - insert_data_prefix="clickhouse client --query " - case $1 in - all) - clickhouse client --host ${ckhost} --port $2 --multiquery -q"$ckreadSql" && { - ${insert_data_prefix} "INSERT INTO customer FORMAT CSV" < ${RAWDATA_DIR}/ssb-dbgen/customer.tbl --port=$2 - ${insert_data_prefix} "INSERT INTO part FORMAT CSV" < ${RAWDATA_DIR}/ssb-dbgen/part.tbl --port=$2 - ${insert_data_prefix} "INSERT INTO supplier FORMAT CSV" < ${RAWDATA_DIR}/ssb-dbgen/supplier.tbl --port=$2 - ${insert_data_prefix} "INSERT INTO lineorder FORMAT CSV" < ${RAWDATA_DIR}/ssb-dbgen/lineorder.tbl --port=$2 - } - ${create_table_prefix}"${lineorder_flat_table}" - ;; - customer) - echo ${create_table_prefix}\"${customer_table}\" - ${create_table_prefix}"${customer_table}" && { - echo "${insert_data_prefix} \"INSERT INTO $1 FORMAT CSV\" < ${RAWDATA_DIR}/ssb-dbgen/$1.tbl --port=$2" - ${insert_data_prefix} "INSERT INTO $1 FORMAT CSV" < ${RAWDATA_DIR}/ssb-dbgen/$1.tbl --port=$2 - } - ;; - part) - echo ${create_table_prefix}\"${part_table}\" - ${create_table_prefix}"${part_table}" && { - echo "${insert_data_prefix} \"INSERT INTO $1 FORMAT CSV\" < ${RAWDATA_DIR}/ssb-dbgen/$1.tbl --port=$2" - ${insert_data_prefix} "INSERT INTO $1 FORMAT CSV" < ${RAWDATA_DIR}/ssb-dbgen/$1.tbl --port=$2 - } - ;; - supplier) - echo ${create_table_prefix}"${supplier_table}" - ${create_table_prefix}"${supplier_table}" && { - echo "${insert_data_prefix} \"INSERT INTO $1 FORMAT CSV\" < ${RAWDATA_DIR}/ssb-dbgen/$1.tbl --port=$2" - ${insert_data_prefix} "INSERT INTO $1 FORMAT CSV" < ${RAWDATA_DIR}/ssb-dbgen/$1.tbl --port=$2 - } - ;; - lineorder) - echo ${create_table_prefix}"${lineorder_table}" - ${create_table_prefix}"${lineorder_table}" && { - echo "${insert_data_prefix} \"INSERT INTO $1 FORMAT CSV\" < ${RAWDATA_DIR}/ssb-dbgen/$1.tbl --port=$2" - ${insert_data_prefix} "INSERT INTO $1 FORMAT CSV" < ${RAWDATA_DIR}/ssb-dbgen/$1.tbl --port=$2 - } - ;; - lineorder_flat) - echo ${create_table_prefix}"${lineorder_flat_table}" - ${create_table_prefix}"${lineorder_flat_table}" - return 0 - ;; - *) - exit 0 - ;; - - esac -} - -function check_sql(){ - select_sql="select * from "$1" limit 1" - clickhouse client --host ${ckhost} --port $2 --multiquery -q"${select_sql}" -} - -function check_table(){ - checknum=0 - source_tables="customer part supplier lineorder lineorder_flat" - test_tables=${1:-${source_tables}} - echo "Checking table data required in server..." - for i in $(seq 0 $[inst_num-1]) - do - for j in `echo ${test_tables}` - do - check_sql $j ${ckport[i]} &> /dev/null || { - let checknum+=1 && insert_data "$j" ${ckport[i]} - } - done - done - - for i in $(seq 0 $[inst_num-1]) - do - echo "clickhouse client --host ${ckhost} --port ${ckport[i]} -m -q\"select count() from ${TABLE_NAME};\"" - var=$(clickhouse client --host ${ckhost} --port ${ckport[i]} -m -q"select count() from ${TABLE_NAME};") - if [ $var -eq $TALBE_ROWS ];then - echo "Instance_${i} Table data integrity check OK -> Rows:$var" - else - echo "Instance_${i} Table data integrity check Failed -> Rows:$var" - exit 1 - fi - done - if [ $checknum -gt 0 ];then - echo "Need sleep 10s after first table data insertion...$checknum" - sleep 10 - fi -} - -function check_instance(){ -instance_alive=0 -for i in {1..10} -do - sleep 1 - netstat -nltp | grep ${1} > /dev/null - if [ $? -ne 1 ];then - instance_alive=1 - break - fi - -done - -if [ $instance_alive -eq 0 ];then - echo "check_instance -> clickhouse server instance faild to launch due to 10s timeout!" - exit 1 -else - echo "check_instance -> clickhouse server instance launch successfully!" -fi -} - -function start_clickhouse_for_insertion(){ - echo "start_clickhouse_for_insertion" - for i in $(seq 0 $[inst_num-1]) - do - echo "cd ${database_dir}/$1${dir_server[i]}" - echo "${SERVER_BIND_CMD[i]} clickhouse server -C config_${1}${dir_server[i]}.xml >&${LOG_DIR}/${1}_${i}_server_log& > /dev/null" - - cd ${database_dir}/$1${dir_server[i]} - ${SERVER_BIND_CMD[i]} clickhouse server -C config_${1}${dir_server[i]}.xml >&${LOG_DIR}/${1}_${i}_server_log& > /dev/null - check_instance ${ckport[i]} - done -} - -function start_clickhouse_for_stressing(){ - echo "start_clickhouse_for_stressing" - for i in $(seq 0 $[inst_num-1]) - do - echo "cd ${database_dir}/$1${dir_server[i]}" - echo "${SERVER_BIND_CMD[i]} clickhouse server -C config_${1}${dir_server[i]}.xml >&/dev/null&" - - cd ${database_dir}/$1${dir_server[i]} - ${SERVER_BIND_CMD[i]} clickhouse server -C config_${1}${dir_server[i]}.xml >&/dev/null& - check_instance ${ckport[i]} - done -} -yum -y install git make gcc sudo net-tools &> /dev/null -pip3 install clickhouse_driver numpy &> /dev/null -test -d ${RAWDATA_DIR}/ssb-dbgen || git clone https://github.com/vadimtk/ssb-dbgen.git ${RAWDATA_DIR}/ssb-dbgen && cd ${RAWDATA_DIR}/ssb-dbgen - -if [ ! -f ${RAWDATA_DIR}/ssb-dbgen/dbgen ];then - make && { - test -f ${RAWDATA_DIR}/ssb-dbgen/customer.tbl || echo y |./dbgen -s ${SSB_GEN_FACTOR} -T c - test -f ${RAWDATA_DIR}/ssb-dbgen/part.tbl || echo y | ./dbgen -s ${SSB_GEN_FACTOR} -T p - test -f ${RAWDATA_DIR}/ssb-dbgen/supplier.tbl || echo y | ./dbgen -s ${SSB_GEN_FACTOR} -T s - test -f ${RAWDATA_DIR}/ssb-dbgen/date.tbl || echo y | ./dbgen -s ${SSB_GEN_FACTOR} -T d - test -f ${RAWDATA_DIR}/ssb-dbgen/lineorder.tbl || echo y | ./dbgen -s ${SSB_GEN_FACTOR} -T l - } -else - test -f ${RAWDATA_DIR}/ssb-dbgen/customer.tbl || echo y | ./dbgen -s ${SSB_GEN_FACTOR} -T c - test -f ${RAWDATA_DIR}/ssb-dbgen/part.tbl || echo y | ./dbgen -s ${SSB_GEN_FACTOR} -T p - test -f ${RAWDATA_DIR}/ssb-dbgen/supplier.tbl || echo y | ./dbgen -s ${SSB_GEN_FACTOR} -T s - test -f ${RAWDATA_DIR}/ssb-dbgen/date.tbl || echo y | ./dbgen -s ${SSB_GEN_FACTOR} -T d - test -f ${RAWDATA_DIR}/ssb-dbgen/lineorder.tbl || echo y | ./dbgen -s ${SSB_GEN_FACTOR} -T l - -fi - -filenum=`find ${RAWDATA_DIR}/ssb-dbgen/ -name "*.tbl" | wc -l` - -if [ $filenum -ne 5 ];then - echo "generate ssb data file *.tbl faild" - exit 1 -fi - -function kill_instance(){ -instance_alive=1 -for i in {1..2} -do - pkill clickhouse && sleep 5 - instance_alive=0 - for i in $(seq 0 $[inst_num-1]) - do - netstat -nltp | grep ${ckport[i]} > /dev/null - if [ $? -ne 1 ];then - instance_alive=1 - break; - fi - done - if [ $instance_alive -eq 0 ];then - break; - fi -done -if [ $instance_alive -eq 0 ];then - echo "kill_instance OK!" -else - echo "kill_instance Failed -> clickhouse server instance still alive due to 10s timeout" - exit 1 -fi -} - -function run_test(){ -is_xml=0 -for i in $(seq 0 $[inst_num-1]) -do - if [ -f ${database_dir}/${1}${dir_server[i]}/config_${1}${dir_server[i]}.xml ]; then - is_xml=$[is_xml+1] - fi -done -if [ $is_xml -eq $inst_num ];then - echo "Benchmark with $inst_num instance" - start_clickhouse_for_insertion ${1} - - for i in $(seq 0 $[inst_num-1]) - do - clickhouse client --host ${ckhost} --port ${ckport[i]} -m -q"show databases;" >/dev/null - done - - if [ $? -eq 0 ];then - check_table - fi - kill_instance - - if [ $1 == "deflate" ];then - test -f ${LOG_DIR}/${1}_server_log && deflatemsg=`cat ${LOG_DIR}/${1}_server_log | grep DeflateJobHWPool` - if [ -n "$deflatemsg" ];then - echo ------------------------------------------------------ - echo $deflatemsg - echo ------------------------------------------------------ - fi - fi - echo "Check table data required in server_${1} -> Done! " - - start_clickhouse_for_stressing ${1} - for i in $(seq 0 $[inst_num-1]) - do - clickhouse client --host ${ckhost} --port ${ckport[i]} -m -q"show databases;" >/dev/null - done - if [ $? -eq 0 ];then - test -d ${CLIENT_SCRIPTS_DIR} && cd ${CLIENT_SCRIPTS_DIR} - echo "Client stressing... " - echo "${CLIENT_BIND_CMD} python3 client_stressing_test.py ${QUERY_FILE} $inst_num &> ${LOG_DIR}/${1}.log" - ${CLIENT_BIND_CMD} python3 client_stressing_test.py ${QUERY_FILE} $inst_num &> ${LOG_DIR}/${1}.log - echo "Completed client stressing, checking log... " - finish_log=`grep "Finished" ${LOG_DIR}/${1}.log | wc -l` - if [ $finish_log -eq 1 ] ;then - kill_instance - test -f ${LOG_DIR}/${1}.log && echo "${1}.log ===> ${LOG_DIR}/${1}.log" - else - kill_instance - echo "No find 'Finished' in client log -> Performance test may fail" - exit 1 - - fi - - else - echo "${1} clickhouse server start fail" - exit 1 - fi -else - echo "clickhouse server start fail -> Please check xml files required in ${database_dir} for each instance" - exit 1 - -fi -} -function clear_log(){ - if [ -d "$LOG_DIR" ]; then - cd ${LOG_DIR} && rm -rf * - fi -} - -function gather_log_for_codec(){ - cd ${OUTPUT_DIR} && mkdir -p ${LOG_PACK_FILE}/${1} - cp -rf ${LOG_DIR} ${OUTPUT_DIR}/${LOG_PACK_FILE}/${1} -} - -function pack_log(){ - if [ -e "${OUTPUT_DIR}/run.log" ]; then - cp ${OUTPUT_DIR}/run.log ${OUTPUT_DIR}/${LOG_PACK_FILE}/ - fi - echo "Please check all log information in ${OUTPUT_DIR}/${LOG_PACK_FILE}" -} - -function setup_check(){ - - iax_dev_num=`accel-config list | grep iax | wc -l` - if [ $iax_dev_num -eq 0 ] ;then - iax_dev_num=`accel-config list | grep iax | wc -l` - if [ $iax_dev_num -eq 0 ] ;then - echo "No IAA devices available -> Please check IAA hardware setup manually!" - exit 1 - else - echo "IAA enabled devices number:$iax_dev_num" - fi - else - echo "IAA enabled devices number:$iax_dev_num" - fi - libaccel_version=`accel-config -v` - clickhouser_version=`clickhouse server --version` - kernel_dxd_log=`dmesg | grep dxd` - echo "libaccel_version:$libaccel_version" - echo "clickhouser_version:$clickhouser_version" - echo -e "idxd section in kernel log:\n$kernel_dxd_log" -} - -setup_check -export CLICKHOUSE_WATCHDOG_ENABLE=0 -for i in ${CODEC_CONFIG[@]} -do - clear_log - codec=${i} - echo "run test------------$codec" - run_test $codec - gather_log_for_codec $codec -done - -pack_log -echo "Done." \ No newline at end of file diff --git a/contrib/qpl-cmake/benchmark_sample/client_scripts/client_stressing_test.py b/contrib/qpl-cmake/benchmark_sample/client_scripts/client_stressing_test.py deleted file mode 100644 index f12381a198c..00000000000 --- a/contrib/qpl-cmake/benchmark_sample/client_scripts/client_stressing_test.py +++ /dev/null @@ -1,278 +0,0 @@ -from operator import eq -import os -import random -import time -import sys -from clickhouse_driver import Client -import numpy as np -import subprocess -import multiprocessing -from multiprocessing import Manager - -warmup_runs = 10 -calculated_runs = 10 -seconds = 30 -max_instances_number = 8 -retest_number = 3 -retest_tolerance = 10 - - -def checkInt(str): - try: - int(str) - return True - except ValueError: - return False - - -def setup_client(index): - if index < 4: - port_idx = index - else: - port_idx = index + 4 - client = Client( - host="localhost", - database="default", - user="default", - password="", - port="900%d" % port_idx, - ) - union_mode_query = "SET union_default_mode='DISTINCT'" - client.execute(union_mode_query) - return client - - -def warm_client(clientN, clientL, query, loop): - for c_idx in range(clientN): - for _ in range(loop): - clientL[c_idx].execute(query) - - -def read_queries(queries_list): - queries = list() - queries_id = list() - with open(queries_list, "r") as f: - for line in f: - line = line.rstrip() - line = line.split("$") - queries_id.append(line[0]) - queries.append(line[1]) - return queries_id, queries - - -def run_task(client, cname, query, loop, query_latency): - start_time = time.time() - for i in range(loop): - client.execute(query) - query_latency.append(client.last_query.elapsed) - - end_time = time.time() - p95 = np.percentile(query_latency, 95) - print( - "CLIENT: {0} end. -> P95: %f, qps: %f".format(cname) - % (p95, loop / (end_time - start_time)) - ) - - -def run_multi_clients(clientN, clientList, query, loop): - client_pids = {} - start_time = time.time() - manager = multiprocessing.Manager() - query_latency_list0 = manager.list() - query_latency_list1 = manager.list() - query_latency_list2 = manager.list() - query_latency_list3 = manager.list() - query_latency_list4 = manager.list() - query_latency_list5 = manager.list() - query_latency_list6 = manager.list() - query_latency_list7 = manager.list() - - for c_idx in range(clientN): - client_name = "Role_%d" % c_idx - if c_idx == 0: - client_pids[c_idx] = multiprocessing.Process( - target=run_task, - args=(clientList[c_idx], client_name, query, loop, query_latency_list0), - ) - elif c_idx == 1: - client_pids[c_idx] = multiprocessing.Process( - target=run_task, - args=(clientList[c_idx], client_name, query, loop, query_latency_list1), - ) - elif c_idx == 2: - client_pids[c_idx] = multiprocessing.Process( - target=run_task, - args=(clientList[c_idx], client_name, query, loop, query_latency_list2), - ) - elif c_idx == 3: - client_pids[c_idx] = multiprocessing.Process( - target=run_task, - args=(clientList[c_idx], client_name, query, loop, query_latency_list3), - ) - elif c_idx == 4: - client_pids[c_idx] = multiprocessing.Process( - target=run_task, - args=(clientList[c_idx], client_name, query, loop, query_latency_list4), - ) - elif c_idx == 5: - client_pids[c_idx] = multiprocessing.Process( - target=run_task, - args=(clientList[c_idx], client_name, query, loop, query_latency_list5), - ) - elif c_idx == 6: - client_pids[c_idx] = multiprocessing.Process( - target=run_task, - args=(clientList[c_idx], client_name, query, loop, query_latency_list6), - ) - elif c_idx == 7: - client_pids[c_idx] = multiprocessing.Process( - target=run_task, - args=(clientList[c_idx], client_name, query, loop, query_latency_list7), - ) - else: - print("ERROR: CLIENT number dismatch!!") - exit() - print("CLIENT: %s start" % client_name) - client_pids[c_idx].start() - - for c_idx in range(clientN): - client_pids[c_idx].join() - end_time = time.time() - totalT = end_time - start_time - - query_latencyTotal = list() - for item in query_latency_list0: - query_latencyTotal.append(item) - for item in query_latency_list1: - query_latencyTotal.append(item) - for item in query_latency_list2: - query_latencyTotal.append(item) - for item in query_latency_list3: - query_latencyTotal.append(item) - for item in query_latency_list4: - query_latencyTotal.append(item) - for item in query_latency_list5: - query_latencyTotal.append(item) - for item in query_latency_list6: - query_latencyTotal.append(item) - for item in query_latency_list7: - query_latencyTotal.append(item) - - totalP95 = np.percentile(query_latencyTotal, 95) * 1000 - return totalT, totalP95 - - -def run_task_caculated(client, cname, query, loop): - query_latency = list() - start_time = time.time() - for i in range(loop): - client.execute(query) - query_latency.append(client.last_query.elapsed) - end_time = time.time() - p95 = np.percentile(query_latency, 95) - - -def run_multi_clients_caculated(clientN, clientList, query, loop): - client_pids = {} - start_time = time.time() - for c_idx in range(clientN): - client_name = "Role_%d" % c_idx - client_pids[c_idx] = multiprocessing.Process( - target=run_task_caculated, - args=(clientList[c_idx], client_name, query, loop), - ) - client_pids[c_idx].start() - for c_idx in range(clientN): - client_pids[c_idx].join() - end_time = time.time() - totalT = end_time - start_time - return totalT - - -if __name__ == "__main__": - client_number = 1 - queries = list() - queries_id = list() - - if len(sys.argv) != 3: - print( - "usage: python3 client_stressing_test.py [queries_file_path] [client_number]" - ) - sys.exit() - else: - queries_list = sys.argv[1] - client_number = int(sys.argv[2]) - print( - "queries_file_path: %s, client_number: %d" % (queries_list, client_number) - ) - if not os.path.isfile(queries_list) or not os.access(queries_list, os.R_OK): - print("please check the right path for queries file") - sys.exit() - if ( - not checkInt(sys.argv[2]) - or int(sys.argv[2]) > max_instances_number - or int(sys.argv[2]) < 1 - ): - print("client_number should be in [1~%d]" % max_instances_number) - sys.exit() - - client_list = {} - queries_id, queries = read_queries(queries_list) - - for c_idx in range(client_number): - client_list[c_idx] = setup_client(c_idx) - # clear cache - os.system("sync; echo 3 > /proc/sys/vm/drop_caches") - - print("###Polit Run Begin") - for i in queries: - warm_client(client_number, client_list, i, 1) - print("###Polit Run End -> Start stressing....") - - query_index = 0 - for q in queries: - print( - "\n###START -> Index: %d, ID: %s, Query: %s" - % (query_index, queries_id[query_index], q) - ) - warm_client(client_number, client_list, q, warmup_runs) - print("###Warm Done!") - for j in range(0, retest_number): - totalT = run_multi_clients_caculated( - client_number, client_list, q, calculated_runs - ) - curr_loop = int(seconds * calculated_runs / totalT) + 1 - print( - "###Calculation Done! -> loopN: %d, expected seconds:%d" - % (curr_loop, seconds) - ) - - print("###Stress Running! -> %d iterations......" % curr_loop) - - totalT, totalP95 = run_multi_clients( - client_number, client_list, q, curr_loop - ) - - if totalT > (seconds - retest_tolerance) and totalT < ( - seconds + retest_tolerance - ): - break - else: - print( - "###totalT:%d is far way from expected seconds:%d. Run again ->j:%d!" - % (totalT, seconds, j) - ) - - print( - "###Completed! -> ID: %s, clientN: %d, totalT: %.2f s, latencyAVG: %.2f ms, P95: %.2f ms, QPS_Final: %.2f" - % ( - queries_id[query_index], - client_number, - totalT, - totalT * 1000 / (curr_loop * client_number), - totalP95, - ((curr_loop * client_number) / totalT), - ) - ) - query_index += 1 - print("###Finished!") diff --git a/contrib/qpl-cmake/benchmark_sample/client_scripts/queries_ssb.sql b/contrib/qpl-cmake/benchmark_sample/client_scripts/queries_ssb.sql deleted file mode 100644 index abf2df6503a..00000000000 --- a/contrib/qpl-cmake/benchmark_sample/client_scripts/queries_ssb.sql +++ /dev/null @@ -1,10 +0,0 @@ -Q1.1$SELECT sum(LO_EXTENDEDPRICE * LO_DISCOUNT) AS revenue FROM lineorder_flat WHERE toYear(LO_ORDERDATE) = 1993 AND LO_DISCOUNT BETWEEN 1 AND 3 AND LO_QUANTITY < 25; -Q2.1$SELECT sum(LO_REVENUE),toYear(LO_ORDERDATE) AS year,P_BRAND FROM lineorder_flat WHERE P_CATEGORY = 'MFGR#12' AND S_REGION = 'AMERICA' GROUP BY year,P_BRAND ORDER BY year,P_BRAND; -Q2.2$SELECT sum(LO_REVENUE),toYear(LO_ORDERDATE) AS year,P_BRAND FROM lineorder_flat WHERE P_BRAND >= 'MFGR#2221' AND P_BRAND <= 'MFGR#2228' AND S_REGION = 'ASIA' GROUP BY year,P_BRAND ORDER BY year,P_BRAND; -Q2.3$SELECT sum(LO_REVENUE),toYear(LO_ORDERDATE) AS year,P_BRAND FROM lineorder_flat WHERE P_BRAND = 'MFGR#2239' AND S_REGION = 'EUROPE' GROUP BY year,P_BRAND ORDER BY year,P_BRAND; -Q3.1$SELECT C_NATION,S_NATION,toYear(LO_ORDERDATE) AS year,sum(LO_REVENUE) AS revenue FROM lineorder_flat WHERE C_REGION = 'ASIA' AND S_REGION = 'ASIA' AND year >= 1992 AND year <= 1997 GROUP BY C_NATION,S_NATION,year ORDER BY year ASC,revenue DESC; -Q3.2$SELECT C_CITY,S_CITY,toYear(LO_ORDERDATE) AS year,sum(LO_REVENUE) AS revenue FROM lineorder_flat WHERE C_NATION = 'UNITED STATES' AND S_NATION = 'UNITED STATES' AND year >= 1992 AND year <= 1997 GROUP BY C_CITY,S_CITY,year ORDER BY year ASC,revenue DESC; -Q3.3$SELECT C_CITY,S_CITY,toYear(LO_ORDERDATE) AS year,sum(LO_REVENUE) AS revenue FROM lineorder_flat WHERE (C_CITY = 'UNITED KI1' OR C_CITY = 'UNITED KI5') AND (S_CITY = 'UNITED KI1' OR S_CITY = 'UNITED KI5') AND year >= 1992 AND year <= 1997 GROUP BY C_CITY,S_CITY,year ORDER BY year ASC,revenue DESC; -Q4.1$SELECT toYear(LO_ORDERDATE) AS year,C_NATION,sum(LO_REVENUE - LO_SUPPLYCOST) AS profit FROM lineorder_flat WHERE C_REGION = 'AMERICA' AND S_REGION = 'AMERICA' AND (P_MFGR = 'MFGR#1' OR P_MFGR = 'MFGR#2') GROUP BY year,C_NATION ORDER BY year ASC,C_NATION ASC; -Q4.2$SELECT toYear(LO_ORDERDATE) AS year,S_NATION,P_CATEGORY,sum(LO_REVENUE - LO_SUPPLYCOST) AS profit FROM lineorder_flat WHERE C_REGION = 'AMERICA' AND S_REGION = 'AMERICA' AND (year = 1997 OR year = 1998) AND (P_MFGR = 'MFGR#1' OR P_MFGR = 'MFGR#2') GROUP BY year,S_NATION,P_CATEGORY ORDER BY year ASC,S_NATION ASC,P_CATEGORY ASC; -Q4.3$SELECT toYear(LO_ORDERDATE) AS year,S_CITY,P_BRAND,sum(LO_REVENUE - LO_SUPPLYCOST) AS profit FROM lineorder_flat WHERE S_NATION = 'UNITED STATES' AND (year = 1997 OR year = 1998) AND P_CATEGORY = 'MFGR#14' GROUP BY year,S_CITY,P_BRAND ORDER BY year ASC,S_CITY ASC,P_BRAND ASC; diff --git a/contrib/qpl-cmake/benchmark_sample/client_scripts/run_ssb.sh b/contrib/qpl-cmake/benchmark_sample/client_scripts/run_ssb.sh deleted file mode 100644 index 6067b1058f2..00000000000 --- a/contrib/qpl-cmake/benchmark_sample/client_scripts/run_ssb.sh +++ /dev/null @@ -1,6 +0,0 @@ -WORKING_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)/.." -if [ ! -d "${WORKING_DIR}/output" ]; then -mkdir ${WORKING_DIR}/output -fi -bash allin1_ssb.sh 2 > ${WORKING_DIR}/output/run.log -echo "Please check log in: ${WORKING_DIR}/output/run.log" \ No newline at end of file diff --git a/contrib/qpl-cmake/benchmark_sample/database_dir/deflate/config_deflate.xml b/contrib/qpl-cmake/benchmark_sample/database_dir/deflate/config_deflate.xml deleted file mode 100644 index ab77a9cdcbe..00000000000 --- a/contrib/qpl-cmake/benchmark_sample/database_dir/deflate/config_deflate.xml +++ /dev/null @@ -1,49 +0,0 @@ - - - - - trace - true - - - 8123 - 9000 - 9004 - - ./ - - 8589934592 - 5368709120 - true - - - - deflate_qpl - - - - - - - - - ::/0 - - - default - default - 1 - - - - - - - - - - - diff --git a/contrib/qpl-cmake/benchmark_sample/database_dir/deflate_s2/config_deflate_s2.xml b/contrib/qpl-cmake/benchmark_sample/database_dir/deflate_s2/config_deflate_s2.xml deleted file mode 100644 index b71456486f5..00000000000 --- a/contrib/qpl-cmake/benchmark_sample/database_dir/deflate_s2/config_deflate_s2.xml +++ /dev/null @@ -1,49 +0,0 @@ - - - - - trace - true - - - 8124 - 9001 - 9005 - - ./ - - 8589934592 - 5368709120 - true - - - - deflate_qpl - - - - - - - - - ::/0 - - - default - default - 1 - - - - - - - - - - - diff --git a/contrib/qpl-cmake/benchmark_sample/database_dir/lz4/config_lz4.xml b/contrib/qpl-cmake/benchmark_sample/database_dir/lz4/config_lz4.xml deleted file mode 100644 index f4dc59b60aa..00000000000 --- a/contrib/qpl-cmake/benchmark_sample/database_dir/lz4/config_lz4.xml +++ /dev/null @@ -1,49 +0,0 @@ - - - - - trace - true - - - 8123 - 9000 - 9004 - - ./ - - 8589934592 - 5368709120 - true - - - - lz4 - - - - - - - - - ::/0 - - - default - default - 1 - - - - - - - - - - - diff --git a/contrib/qpl-cmake/benchmark_sample/database_dir/lz4_s2/config_lz4_s2.xml b/contrib/qpl-cmake/benchmark_sample/database_dir/lz4_s2/config_lz4_s2.xml deleted file mode 100644 index 357db8942d7..00000000000 --- a/contrib/qpl-cmake/benchmark_sample/database_dir/lz4_s2/config_lz4_s2.xml +++ /dev/null @@ -1,49 +0,0 @@ - - - - - trace - true - - - 8124 - 9001 - 9005 - - ./ - - 8589934592 - 5368709120 - true - - - - lz4 - - - - - - - - - ::/0 - - - default - default - 1 - - - - - - - - - - - diff --git a/contrib/qpl-cmake/benchmark_sample/database_dir/zstd/config_zstd.xml b/contrib/qpl-cmake/benchmark_sample/database_dir/zstd/config_zstd.xml deleted file mode 100644 index 1c4c738edaf..00000000000 --- a/contrib/qpl-cmake/benchmark_sample/database_dir/zstd/config_zstd.xml +++ /dev/null @@ -1,49 +0,0 @@ - - - - - trace - true - - - 8123 - 9000 - 9004 - - ./ - - 8589934592 - 5368709120 - true - - - - zstd - - - - - - - - - ::/0 - - - default - default - 1 - - - - - - - - - - - diff --git a/contrib/qpl-cmake/benchmark_sample/database_dir/zstd_s2/config_zstd_s2.xml b/contrib/qpl-cmake/benchmark_sample/database_dir/zstd_s2/config_zstd_s2.xml deleted file mode 100644 index f3db01b7739..00000000000 --- a/contrib/qpl-cmake/benchmark_sample/database_dir/zstd_s2/config_zstd_s2.xml +++ /dev/null @@ -1,49 +0,0 @@ - - - - - trace - true - - - 8124 - 9001 - 9005 - - ./ - - 8589934592 - 5368709120 - true - - - - zstd - - - - - - - - - ::/0 - - - default - default - 1 - - - - - - - - - - - diff --git a/contrib/re2-cmake/CMakeLists.txt b/contrib/re2-cmake/CMakeLists.txt index e72b5e1fca8..f773bc65a69 100644 --- a/contrib/re2-cmake/CMakeLists.txt +++ b/contrib/re2-cmake/CMakeLists.txt @@ -27,6 +27,17 @@ set(RE2_SOURCES add_library(_re2 ${RE2_SOURCES}) target_include_directories(_re2 PUBLIC "${SRC_DIR}") -target_link_libraries(_re2 ch_contrib::abseil_str_format) +target_link_libraries(_re2 PRIVATE + absl::base + absl::core_headers + absl::fixed_array + absl::flat_hash_map + absl::flat_hash_set + absl::inlined_vector + absl::strings + absl::str_format + absl::synchronization + absl::optional + absl::span) add_library(ch_contrib::re2 ALIAS _re2) diff --git a/contrib/rocksdb-cmake/CMakeLists.txt b/contrib/rocksdb-cmake/CMakeLists.txt index 466adf6aff0..7d7666dff87 100644 --- a/contrib/rocksdb-cmake/CMakeLists.txt +++ b/contrib/rocksdb-cmake/CMakeLists.txt @@ -76,7 +76,6 @@ else() endif() endif() -include(CheckCCompilerFlag) if(CMAKE_SYSTEM_PROCESSOR MATCHES "^(powerpc|ppc)64") if(POWER9) set(HAS_POWER9 1) @@ -88,26 +87,15 @@ if(CMAKE_SYSTEM_PROCESSOR MATCHES "^(powerpc|ppc)64") endif(CMAKE_SYSTEM_PROCESSOR MATCHES "^(powerpc|ppc)64") if(CMAKE_SYSTEM_PROCESSOR MATCHES "aarch64|AARCH64|arm64|ARM64") - CHECK_C_COMPILER_FLAG("-march=armv8-a+crc+crypto" HAS_ARMV8_CRC) - if(HAS_ARMV8_CRC) - message(STATUS " HAS_ARMV8_CRC yes") - set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -march=armv8-a+crc+crypto -Wno-unused-function") - set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -march=armv8-a+crc+crypto -Wno-unused-function") - endif(HAS_ARMV8_CRC) + set(HAS_ARMV8_CRC 1) + set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -march=armv8-a+crc+crypto -Wno-unused-function") + set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -march=armv8-a+crc+crypto -Wno-unused-function") endif(CMAKE_SYSTEM_PROCESSOR MATCHES "aarch64|AARCH64|arm64|ARM64") -include(CheckCXXSourceCompiles) -if(NOT MSVC) - set(CMAKE_REQUIRED_FLAGS "-msse4.2 -mpclmul") -endif() - -unset(CMAKE_REQUIRED_FLAGS) -if(HAVE_SSE42) +if(ENABLE_AVX2 AND ENABLE_PCLMULQDQ) add_definitions(-DHAVE_SSE42) add_definitions(-DHAVE_PCLMUL) -elseif(FORCE_SSE42) - message(FATAL_ERROR "FORCE_SSE42=ON but unable to compile with SSE4.2 enabled") endif() set (HAVE_THREAD_LOCAL 1) @@ -121,75 +109,18 @@ elseif(CMAKE_SYSTEM_NAME MATCHES "Linux") add_definitions(-DOS_LINUX) elseif(CMAKE_SYSTEM_NAME MATCHES "SunOS") add_definitions(-DOS_SOLARIS) -elseif(CMAKE_SYSTEM_NAME MATCHES "kFreeBSD") - add_definitions(-DOS_GNU_KFREEBSD) elseif(CMAKE_SYSTEM_NAME MATCHES "FreeBSD") add_definitions(-DOS_FREEBSD) -elseif(CMAKE_SYSTEM_NAME MATCHES "NetBSD") - add_definitions(-DOS_NETBSD) -elseif(CMAKE_SYSTEM_NAME MATCHES "OpenBSD") - add_definitions(-DOS_OPENBSD) -elseif(CMAKE_SYSTEM_NAME MATCHES "DragonFly") - add_definitions(-DOS_DRAGONFLYBSD) elseif(CMAKE_SYSTEM_NAME MATCHES "Android") add_definitions(-DOS_ANDROID) -elseif(CMAKE_SYSTEM_NAME MATCHES "Windows") - add_definitions(-DWIN32 -DOS_WIN -D_MBCS -DWIN64 -DNOMINMAX) - if(MINGW) - add_definitions(-D_WIN32_WINNT=_WIN32_WINNT_VISTA) - endif() endif() -if(NOT WIN32) - add_definitions(-DROCKSDB_PLATFORM_POSIX -DROCKSDB_LIB_IO_POSIX) -endif() +add_definitions(-DROCKSDB_PLATFORM_POSIX -DROCKSDB_LIB_IO_POSIX) -option(WITH_FALLOCATE "build with fallocate" ON) -if(WITH_FALLOCATE) - CHECK_C_SOURCE_COMPILES(" -#include -#include -int main() { - int fd = open(\"/dev/null\", 0); - fallocate(fd, FALLOC_FL_KEEP_SIZE, 0, 1024); -} -" HAVE_FALLOCATE) - if(HAVE_FALLOCATE) - add_definitions(-DROCKSDB_FALLOCATE_PRESENT) - endif() -endif() - -CHECK_C_SOURCE_COMPILES(" -#include -int main() { - int fd = open(\"/dev/null\", 0); - sync_file_range(fd, 0, 1024, SYNC_FILE_RANGE_WRITE); -} -" HAVE_SYNC_FILE_RANGE_WRITE) -if(HAVE_SYNC_FILE_RANGE_WRITE) - add_definitions(-DROCKSDB_RANGESYNC_PRESENT) -endif() - -CHECK_C_SOURCE_COMPILES(" -#include -int main() { - (void) PTHREAD_MUTEX_ADAPTIVE_NP; -} -" HAVE_PTHREAD_MUTEX_ADAPTIVE_NP) -if(HAVE_PTHREAD_MUTEX_ADAPTIVE_NP) +if (OS_LINUX OR OS_FREEBSD) add_definitions(-DROCKSDB_PTHREAD_ADAPTIVE_MUTEX) endif() -include(CheckCXXSymbolExists) -if (OS_FREEBSD) - check_cxx_symbol_exists(malloc_usable_size "${ROCKSDB_SOURCE_DIR}/malloc_np.h" HAVE_MALLOC_USABLE_SIZE) -else() - check_cxx_symbol_exists(malloc_usable_size "${ROCKSDB_SOURCE_DIR}/malloc.h" HAVE_MALLOC_USABLE_SIZE) -endif() -if(HAVE_MALLOC_USABLE_SIZE) - add_definitions(-DROCKSDB_MALLOC_USABLE_SIZE) -endif() - if (OS_LINUX) add_definitions(-DROCKSDB_SCHED_GETCPU_PRESENT) add_definitions(-DROCKSDB_AUXV_SYSAUXV_PRESENT) @@ -204,7 +135,6 @@ include_directories("${ROCKSDB_SOURCE_DIR}/include") if(WITH_FOLLY_DISTRIBUTED_MUTEX) include_directories("${ROCKSDB_SOURCE_DIR}/third-party/folly") endif() -find_package(Threads REQUIRED) # Main library source code @@ -497,7 +427,7 @@ set(SOURCES ${ROCKSDB_SOURCE_DIR}/utilities/transactions/lock/range/range_tree/lib/util/memarena.cc rocksdb_build_version.cc) -if(HAVE_SSE42 AND NOT MSVC) +if(ENABLE_SSE42 AND ENABLE_PCLMULQDQ) set_source_files_properties( "${ROCKSDB_SOURCE_DIR}/util/crc32c.cc" PROPERTIES COMPILE_FLAGS "-msse4.2 -mpclmul") diff --git a/contrib/sparse-checkout/update-grpc.sh b/contrib/sparse-checkout/update-grpc.sh index 38934fdbc1b..21628ce8dd1 100755 --- a/contrib/sparse-checkout/update-grpc.sh +++ b/contrib/sparse-checkout/update-grpc.sh @@ -6,12 +6,13 @@ FILES_TO_CHECKOUT=$(git rev-parse --git-dir)/info/sparse-checkout echo '/*' > $FILES_TO_CHECKOUT echo '!/test/*' >> $FILES_TO_CHECKOUT echo '/test/build/*' >> $FILES_TO_CHECKOUT +echo '/test/core/tsi/alts/fake_handshaker/*' >> $FILES_TO_CHECKOUT +echo '/test/core/event_engine/fuzzing_event_engine/*' >> $FILES_TO_CHECKOUT echo '!/tools/*' >> $FILES_TO_CHECKOUT echo '/tools/codegen/*' >> $FILES_TO_CHECKOUT echo '!/examples/*' >> $FILES_TO_CHECKOUT echo '!/doc/*' >> $FILES_TO_CHECKOUT -# FIXME why do we need csharp? -#echo '!/src/csharp/*' >> $FILES_TO_CHECKOUT +echo '!/src/csharp/*' >> $FILES_TO_CHECKOUT echo '!/src/python/*' >> $FILES_TO_CHECKOUT echo '!/src/objective-c/*' >> $FILES_TO_CHECKOUT echo '!/src/php/*' >> $FILES_TO_CHECKOUT diff --git a/contrib/thrift-cmake/CMakeLists.txt b/contrib/thrift-cmake/CMakeLists.txt index d6aa6b9e5f2..89a444cfb83 100644 --- a/contrib/thrift-cmake/CMakeLists.txt +++ b/contrib/thrift-cmake/CMakeLists.txt @@ -47,8 +47,6 @@ set(thriftcpp_threads_SOURCES "${LIBRARY_DIR}/src/thrift/concurrency/Mutex.cpp" ) -include("${ClickHouse_SOURCE_DIR}/contrib/thrift/build/cmake/ConfigureChecks.cmake") # makes config.h - set (HAVE_ARPA_INET_H 1) set (HAVE_FCNTL_H 1) set (HAVE_GETOPT_H 1) @@ -81,10 +79,6 @@ if (OS_LINUX AND NOT USE_MUSL) set (STRERROR_R_CHAR_P 1) endif () -#set(PACKAGE ${PACKAGE_NAME}) -#set(PACKAGE_STRING "${PACKAGE_NAME} ${PACKAGE_VERSION}") -#set(VERSION ${thrift_VERSION}) - # generate a config.h file configure_file("${CMAKE_CURRENT_SOURCE_DIR}/build/cmake/config.h.in" "${CMAKE_CURRENT_BINARY_DIR}/thrift/config.h") diff --git a/contrib/update-submodules.sh b/contrib/update-submodules.sh index c94681e6240..b12f3f924dc 100755 --- a/contrib/update-submodules.sh +++ b/contrib/update-submodules.sh @@ -1,11 +1,24 @@ #!/bin/sh - set -e -WORKDIR=$(dirname "$0") -WORKDIR=$(readlink -f "${WORKDIR}") +SCRIPT_PATH=$(realpath "$0") +SCRIPT_DIR=$(dirname "${SCRIPT_PATH}") +GIT_DIR=$(git -C "$SCRIPT_DIR" rev-parse --show-toplevel) +cd $GIT_DIR -"$WORKDIR/sparse-checkout/setup-sparse-checkout.sh" +contrib/sparse-checkout/setup-sparse-checkout.sh git submodule init git submodule sync -git submodule update --depth=1 +# NOTE: do not use --remote for `git submodule update`[1] command, since the submodule references to the specific commit SHA1 in the subproject. +# It may cause unexpected behavior. Instead you need to commit a new SHA1 for a submodule. +# +# [1] - https://git-scm.com/book/en/v2/Git-Tools-Submodules +git config --file .gitmodules --get-regexp '.*path' | sed 's/[^ ]* //' | xargs -I _ --max-procs 64 git submodule update --depth=1 --single-branch _ + +# We don't want to depend on any third-party CMake files. +# To check it, find and delete them. +grep -o -P '"contrib/[^"]+"' .gitmodules | + grep -v -P 'contrib/(llvm-project|google-protobuf|grpc|abseil-cpp|corrosion)' | + xargs -I@ find @ \ + -'(' -name 'CMakeLists.txt' -or -name '*.cmake' -')' -and -not -name '*.h.cmake' \ + -delete diff --git a/contrib/xz-cmake/CMakeLists.txt b/contrib/xz-cmake/CMakeLists.txt index c3a8203c83e..c73433d9863 100644 --- a/contrib/xz-cmake/CMakeLists.txt +++ b/contrib/xz-cmake/CMakeLists.txt @@ -98,8 +98,6 @@ if (ARCH_S390X) add_compile_definitions(WORDS_BIGENDIAN) endif () -find_package(Threads REQUIRED) - add_library(_liblzma ${SRC_DIR}/src/common/mythread.h diff --git a/docker/docs/builder/Dockerfile b/docker/docs/builder/Dockerfile index 3ca2bdafcb3..b7b706a8a5c 100644 --- a/docker/docs/builder/Dockerfile +++ b/docker/docs/builder/Dockerfile @@ -4,8 +4,8 @@ FROM node:16-alpine RUN apk add --no-cache git openssh bash -# At this point we want to really update /opt/clickhouse-docs -# despite the cached images +# At this point we want to really update /opt/clickhouse-docs directory +# So we reset the cache ARG CACHE_INVALIDATOR=0 RUN git clone https://github.com/ClickHouse/clickhouse-docs.git \ diff --git a/docker/keeper/Dockerfile b/docker/keeper/Dockerfile index 2a332771fff..b174dfde675 100644 --- a/docker/keeper/Dockerfile +++ b/docker/keeper/Dockerfile @@ -34,7 +34,7 @@ RUN arch=${TARGETARCH:-amd64} \ # lts / testing / prestable / etc ARG REPO_CHANNEL="stable" ARG REPOSITORY="https://packages.clickhouse.com/tgz/${REPO_CHANNEL}" -ARG VERSION="23.9.2.56" +ARG VERSION="23.10.5.20" ARG PACKAGES="clickhouse-keeper" # user/group precreated explicitly with fixed uid/gid on purpose. diff --git a/docker/packager/binary/Dockerfile b/docker/packager/binary/Dockerfile index fb033e28959..20fb97c80bb 100644 --- a/docker/packager/binary/Dockerfile +++ b/docker/packager/binary/Dockerfile @@ -6,29 +6,27 @@ FROM clickhouse/test-util:latest AS cctools ENV CC=clang-${LLVM_VERSION} ENV CXX=clang++-${LLVM_VERSION} # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -# DO NOT PUT ANYTHING BEFORE THREE NEXT `RUN` DIRECTIVES +# DO NOT PUT ANYTHING BEFORE THE NEXT TWO `RUN` DIRECTIVES # THE MOST HEAVY OPERATION MUST BE THE FIRST IN THE CACHE # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! # libtapi is required to support .tbh format from recent MacOS SDKs -RUN git clone --depth 1 https://github.com/tpoechtrager/apple-libtapi.git \ +RUN git clone https://github.com/tpoechtrager/apple-libtapi.git \ && cd apple-libtapi \ + && git checkout 15dfc2a8c9a2a89d06ff227560a69f5265b692f9 \ && INSTALLPREFIX=/cctools ./build.sh \ && ./install.sh \ && cd .. \ && rm -rf apple-libtapi # Build and install tools for cross-linking to Darwin (x86-64) -RUN git clone --depth 1 https://github.com/tpoechtrager/cctools-port.git \ +# Build and install tools for cross-linking to Darwin (aarch64) +RUN git clone https://github.com/tpoechtrager/cctools-port.git \ && cd cctools-port/cctools \ + && git checkout 2a3e1c2a6ff54a30f898b70cfb9ba1692a55fad7 \ && ./configure --prefix=/cctools --with-libtapi=/cctools \ --target=x86_64-apple-darwin \ && make install -j$(nproc) \ - && cd ../.. \ - && rm -rf cctools-port - -# Build and install tools for cross-linking to Darwin (aarch64) -RUN git clone --depth 1 https://github.com/tpoechtrager/cctools-port.git \ - && cd cctools-port/cctools \ + && make clean \ && ./configure --prefix=/cctools --with-libtapi=/cctools \ --target=aarch64-apple-darwin \ && make install -j$(nproc) \ @@ -62,19 +60,12 @@ RUN curl https://sh.rustup.rs -sSf | bash -s -- -y && \ rustup target add aarch64-unknown-linux-musl && \ rustup target add riscv64gc-unknown-linux-gnu -# NOTE: Seems like gcc-11 is too new for ubuntu20 repository # A cross-linker for RISC-V 64 (we need it, because LLVM's LLD does not work): RUN add-apt-repository ppa:ubuntu-toolchain-r/test --yes \ && apt-get update \ && apt-get install --yes \ binutils-riscv64-linux-gnu \ build-essential \ - g++-11 \ - gcc-11 \ - gcc-aarch64-linux-gnu \ - libc6 \ - libc6-dev \ - libc6-dev-arm64-cross \ python3-boto3 \ yasm \ zstd \ diff --git a/docker/packager/binary/build.sh b/docker/packager/binary/build.sh index cc2613cbaf5..fd9bfcaabb2 100755 --- a/docker/packager/binary/build.sh +++ b/docker/packager/binary/build.sh @@ -22,6 +22,7 @@ if [ "$EXTRACT_TOOLCHAIN_DARWIN" = "1" ]; then fi fi + # Uncomment to debug ccache. Don't put ccache log in /output right away, or it # will be confusingly packed into the "performance" package. # export CCACHE_LOGFILE=/build/ccache.log @@ -32,6 +33,7 @@ mkdir -p /build/build_docker cd /build/build_docker rm -f CMakeCache.txt + if [ -n "$MAKE_DEB" ]; then rm -rf /build/packages/root # NOTE: this is for backward compatibility with previous releases, @@ -126,6 +128,7 @@ fi mv ./programs/clickhouse* /output || mv ./programs/*_fuzzer /output [ -x ./programs/self-extracting/clickhouse ] && mv ./programs/self-extracting/clickhouse /output +[ -x ./programs/self-extracting/clickhouse-stripped ] && mv ./programs/self-extracting/clickhouse-stripped /output mv ./src/unit_tests_dbms /output ||: # may not exist for some binary builds mv ./programs/*.dict ./programs/*.options ./programs/*_seed_corpus.zip /output ||: # libFuzzer oss-fuzz compatible infrastructure @@ -176,11 +179,12 @@ then tar c -C /build/ --exclude='.git/modules/**' .git | tar x -C "$PERF_OUTPUT"/ch # Create branch pr and origin/master to have them for the following performance comparison git -C "$PERF_OUTPUT"/ch branch pr - git -C "$PERF_OUTPUT"/ch fetch --no-tags --depth 50 origin master:origin/master + git -C "$PERF_OUTPUT"/ch fetch --no-tags --no-recurse-submodules --depth 50 origin master:origin/master # Clean remote, to not have it stale git -C "$PERF_OUTPUT"/ch remote | xargs -n1 git -C "$PERF_OUTPUT"/ch remote remove # And clean all tags - git -C "$PERF_OUTPUT"/ch tag | xargs git -C "$PERF_OUTPUT"/ch tag -d + echo "Deleting $(git -C "$PERF_OUTPUT"/ch tag | wc -l) tags" + git -C "$PERF_OUTPUT"/ch tag | xargs git -C "$PERF_OUTPUT"/ch tag -d >/dev/null git -C "$PERF_OUTPUT"/ch reset --soft pr git -C "$PERF_OUTPUT"/ch log -5 ( diff --git a/docker/packager/packager b/docker/packager/packager index e63a4912e7c..b5bcbada1da 100755 --- a/docker/packager/packager +++ b/docker/packager/packager @@ -236,16 +236,14 @@ def parse_env_variables( cc = compiler result.append("DEB_ARCH=amd64") - cxx = cc.replace("gcc", "g++").replace("clang", "clang++") + cxx = cc.replace("clang", "clang++") if package_type == "deb": - # NOTE: This are the env for packages/build script + # NOTE: This is the env for packages/build script result.append("MAKE_DEB=true") cmake_flags.append("-DENABLE_TESTS=0") cmake_flags.append("-DENABLE_UTILS=0") - cmake_flags.append("-DCMAKE_EXPORT_NO_PACKAGE_REGISTRY=ON") cmake_flags.append("-DCMAKE_FIND_PACKAGE_NO_PACKAGE_REGISTRY=ON") - cmake_flags.append("-DCMAKE_AUTOGEN_VERBOSE=ON") cmake_flags.append("-DCMAKE_INSTALL_PREFIX=/usr") cmake_flags.append("-DCMAKE_INSTALL_SYSCONFDIR=/etc") cmake_flags.append("-DCMAKE_INSTALL_LOCALSTATEDIR=/var") @@ -265,12 +263,7 @@ def parse_env_variables( elif package_type == "fuzzers": cmake_flags.append("-DENABLE_FUZZING=1") cmake_flags.append("-DENABLE_PROTOBUF=1") - cmake_flags.append("-DUSE_INTERNAL_PROTOBUF_LIBRARY=1") cmake_flags.append("-DWITH_COVERAGE=1") - cmake_flags.append("-DCMAKE_AUTOGEN_VERBOSE=ON") - # cmake_flags.append("-DCMAKE_INSTALL_PREFIX=/usr") - # cmake_flags.append("-DCMAKE_INSTALL_SYSCONFDIR=/etc") - # cmake_flags.append("-DCMAKE_INSTALL_LOCALSTATEDIR=/var") # Reduce linking and building time by avoid *install/all dependencies cmake_flags.append("-DCMAKE_SKIP_INSTALL_ALL_DEPENDENCY=ON") diff --git a/docker/server/.dockerignore b/docker/server/.dockerignore deleted file mode 100644 index d360712c18f..00000000000 --- a/docker/server/.dockerignore +++ /dev/null @@ -1,8 +0,0 @@ -# post / preinstall scripts (not needed, we do it in Dockerfile) -alpine-root/install/* - -# docs (looks useless) -alpine-root/usr/share/doc/* - -# packages, etc. (used by alpine-build.sh) -tgz-packages/* diff --git a/docker/server/Dockerfile.alpine b/docker/server/Dockerfile.alpine index 7f81d10fc2e..d4498abda6a 100644 --- a/docker/server/Dockerfile.alpine +++ b/docker/server/Dockerfile.alpine @@ -32,7 +32,7 @@ RUN arch=${TARGETARCH:-amd64} \ # lts / testing / prestable / etc ARG REPO_CHANNEL="stable" ARG REPOSITORY="https://packages.clickhouse.com/tgz/${REPO_CHANNEL}" -ARG VERSION="23.9.2.56" +ARG VERSION="23.10.5.20" ARG PACKAGES="clickhouse-client clickhouse-server clickhouse-common-static" # user/group precreated explicitly with fixed uid/gid on purpose. diff --git a/docker/server/Dockerfile.ubuntu b/docker/server/Dockerfile.ubuntu index 80a5c869daa..08e95cd535b 100644 --- a/docker/server/Dockerfile.ubuntu +++ b/docker/server/Dockerfile.ubuntu @@ -30,7 +30,7 @@ RUN sed -i "s|http://archive.ubuntu.com|${apt_archive}|g" /etc/apt/sources.list ARG REPO_CHANNEL="stable" ARG REPOSITORY="deb [signed-by=/usr/share/keyrings/clickhouse-keyring.gpg] https://packages.clickhouse.com/deb ${REPO_CHANNEL} main" -ARG VERSION="23.9.2.56" +ARG VERSION="23.10.5.20" ARG PACKAGES="clickhouse-client clickhouse-server clickhouse-common-static" # set non-empty deb_location_url url to create a docker image diff --git a/docker/server/README.md b/docker/server/README.md index 6200acbd30c..d6cf2dfdf09 100644 --- a/docker/server/README.md +++ b/docker/server/README.md @@ -19,7 +19,7 @@ For more information and documentation see https://clickhouse.com/. ### Compatibility - The amd64 image requires support for [SSE3 instructions](https://en.wikipedia.org/wiki/SSE3). Virtually all x86 CPUs after 2005 support SSE3. -- The arm64 image requires support for the [ARMv8.2-A architecture](https://en.wikipedia.org/wiki/AArch64#ARMv8.2-A). Most ARM CPUs after 2017 support ARMv8.2-A. A notable exception is Raspberry Pi 4 from 2019 whose CPU only supports ARMv8.0-A. +- The arm64 image requires support for the [ARMv8.2-A architecture](https://en.wikipedia.org/wiki/AArch64#ARMv8.2-A) and additionally the Load-Acquire RCpc register. The register is optional in version ARMv8.2-A and mandatory in [ARMv8.3-A](https://en.wikipedia.org/wiki/AArch64#ARMv8.3-A). Supported in Graviton >=2, Azure and GCP instances. Examples for unsupported devices are Raspberry Pi 4 (ARMv8.0-A) and Jetson AGX Xavier/Orin (ARMv8.2-A). ## How to use this image diff --git a/docker/test/base/setup_export_logs.sh b/docker/test/base/setup_export_logs.sh index 0ff79e24bf8..6e3721956c0 100755 --- a/docker/test/base/setup_export_logs.sh +++ b/docker/test/base/setup_export_logs.sh @@ -15,10 +15,15 @@ CLICKHOUSE_CI_LOGS_USER=${CLICKHOUSE_CI_LOGS_USER:-ci} # Pre-configured destination cluster, where to export the data CLICKHOUSE_CI_LOGS_CLUSTER=${CLICKHOUSE_CI_LOGS_CLUSTER:-system_logs_export} -EXTRA_COLUMNS=${EXTRA_COLUMNS:-"pull_request_number UInt32, commit_sha String, check_start_time DateTime('UTC'), check_name String, instance_type String, instance_id String, "} -EXTRA_COLUMNS_EXPRESSION=${EXTRA_COLUMNS_EXPRESSION:-"CAST(0 AS UInt32) AS pull_request_number, '' AS commit_sha, now() AS check_start_time, '' AS check_name, '' AS instance_type, '' AS instance_id"} +EXTRA_COLUMNS=${EXTRA_COLUMNS:-"pull_request_number UInt32, commit_sha String, check_start_time DateTime('UTC'), check_name LowCardinality(String), instance_type LowCardinality(String), instance_id String, INDEX ix_pr (pull_request_number) TYPE set(100), INDEX ix_commit (commit_sha) TYPE set(100), INDEX ix_check_time (check_start_time) TYPE minmax, "} +EXTRA_COLUMNS_EXPRESSION=${EXTRA_COLUMNS_EXPRESSION:-"CAST(0 AS UInt32) AS pull_request_number, '' AS commit_sha, now() AS check_start_time, toLowCardinality('') AS check_name, toLowCardinality('') AS instance_type, '' AS instance_id"} EXTRA_ORDER_BY_COLUMNS=${EXTRA_ORDER_BY_COLUMNS:-"check_name, "} +# trace_log needs more columns for symbolization +EXTRA_COLUMNS_TRACE_LOG="${EXTRA_COLUMNS} symbols Array(LowCardinality(String)), lines Array(LowCardinality(String)), " +EXTRA_COLUMNS_EXPRESSION_TRACE_LOG="${EXTRA_COLUMNS_EXPRESSION}, arrayMap(x -> toLowCardinality(demangle(addressToSymbol(x))), trace) AS symbols, arrayMap(x -> toLowCardinality(addressToLine(x)), trace) AS lines" + + function __set_connection_args { # It's impossible to use generous $CONNECTION_ARGS string, it's unsafe from word splitting perspective. @@ -121,13 +126,32 @@ function setup_logs_replication # It's doesn't make sense to try creating tables if SYNC fails echo "SYSTEM SYNC DATABASE REPLICA default" | clickhouse-client "${CONNECTION_ARGS[@]}" || return 0 + debug_or_sanitizer_build=$(clickhouse-client -q "WITH ((SELECT value FROM system.build_options WHERE name='BUILD_TYPE') AS build, (SELECT value FROM system.build_options WHERE name='CXX_FLAGS') as flags) SELECT build='Debug' OR flags LIKE '%fsanitize%'") + echo "Build is debug or sanitizer: $debug_or_sanitizer_build" + # For each system log table: echo 'Create %_log tables' clickhouse-client --query "SHOW TABLES FROM system LIKE '%\\_log'" | while read -r table do + if [[ "$table" = "trace_log" ]] + then + EXTRA_COLUMNS_FOR_TABLE="${EXTRA_COLUMNS_TRACE_LOG}" + # Do not try to resolve stack traces in case of debug/sanitizers + # build, since it is too slow (flushing of trace_log can take ~1min + # with such MV attached) + if [[ "$debug_or_sanitizer_build" = 1 ]]; then + EXTRA_COLUMNS_EXPRESSION_FOR_TABLE="${EXTRA_COLUMNS_EXPRESSION}" + else + EXTRA_COLUMNS_EXPRESSION_FOR_TABLE="${EXTRA_COLUMNS_EXPRESSION_TRACE_LOG}" + fi + else + EXTRA_COLUMNS_FOR_TABLE="${EXTRA_COLUMNS}" + EXTRA_COLUMNS_EXPRESSION_FOR_TABLE="${EXTRA_COLUMNS_EXPRESSION}" + fi + # Calculate hash of its structure. Note: 4 is the version of extra columns - increment it if extra columns are changed: hash=$(clickhouse-client --query " - SELECT sipHash64(4, groupArray((name, type))) + SELECT sipHash64(9, groupArray((name, type))) FROM (SELECT name, type FROM system.columns WHERE database = 'system' AND table = '$table' ORDER BY position) @@ -135,7 +159,7 @@ function setup_logs_replication # Create the destination table with adapted name and structure: statement=$(clickhouse-client --format TSVRaw --query "SHOW CREATE TABLE system.${table}" | sed -r -e ' - s/^\($/('"$EXTRA_COLUMNS"'/; + s/^\($/('"$EXTRA_COLUMNS_FOR_TABLE"'/; s/ORDER BY \(/ORDER BY ('"$EXTRA_ORDER_BY_COLUMNS"'/; s/^CREATE TABLE system\.\w+_log$/CREATE TABLE IF NOT EXISTS '"$table"'_'"$hash"'/; /^TTL /d @@ -155,7 +179,7 @@ function setup_logs_replication ENGINE = Distributed(${CLICKHOUSE_CI_LOGS_CLUSTER}, default, ${table}_${hash}) SETTINGS flush_on_detach=0 EMPTY AS - SELECT ${EXTRA_COLUMNS_EXPRESSION}, * + SELECT ${EXTRA_COLUMNS_EXPRESSION_FOR_TABLE}, * FROM system.${table} " || continue @@ -163,8 +187,18 @@ function setup_logs_replication clickhouse-client --query " CREATE MATERIALIZED VIEW system.${table}_watcher TO system.${table}_sender AS - SELECT ${EXTRA_COLUMNS_EXPRESSION}, * + SELECT ${EXTRA_COLUMNS_EXPRESSION_FOR_TABLE}, * FROM system.${table} " || continue done ) + +function stop_logs_replication +{ + echo "Detach all logs replication" + clickhouse-client --query "select database||'.'||table from system.tables where database = 'system' and (table like '%_sender' or table like '%_watcher')" | { + tee /dev/stderr + } | { + xargs -n1 -r -i clickhouse-client --query "drop table {}" + } +} diff --git a/docker/test/fasttest/run.sh b/docker/test/fasttest/run.sh index 1b72dab5e3c..d3695ba2613 100755 --- a/docker/test/fasttest/run.sh +++ b/docker/test/fasttest/run.sh @@ -206,7 +206,7 @@ function build ( cd "$FASTTEST_BUILD" TIMEFORMAT=$'\nreal\t%3R\nuser\t%3U\nsys\t%3S' - ( time ninja clickhouse-bundle) |& ts '%Y-%m-%d %H:%M:%S' | tee "$FASTTEST_OUTPUT/build_log.txt" + ( time ninja clickhouse-bundle clickhouse-stripped) |& ts '%Y-%m-%d %H:%M:%S' | tee "$FASTTEST_OUTPUT/build_log.txt" BUILD_SECONDS_ELAPSED=$(awk '/^....-..-.. ..:..:.. real\t[0-9]/ {print $4}' < "$FASTTEST_OUTPUT/build_log.txt") echo "build_clickhouse_fasttest_binary: [ OK ] $BUILD_SECONDS_ELAPSED sec." \ | ts '%Y-%m-%d %H:%M:%S' \ @@ -215,7 +215,6 @@ function build mkdir -p "$FASTTEST_OUTPUT/binaries/" cp programs/clickhouse "$FASTTEST_OUTPUT/binaries/clickhouse" - strip programs/clickhouse -o programs/clickhouse-stripped zstd --threads=0 programs/clickhouse-stripped -o "$FASTTEST_OUTPUT/binaries/clickhouse-stripped.zst" fi ccache_status diff --git a/docker/test/fuzzer/generate-test-j2.py b/docker/test/fuzzer/generate-test-j2.py index 11525163ed8..6fd37d6bd02 100755 --- a/docker/test/fuzzer/generate-test-j2.py +++ b/docker/test/fuzzer/generate-test-j2.py @@ -3,6 +3,7 @@ from argparse import ArgumentParser import os import jinja2 +import itertools def removesuffix(text, suffix): @@ -47,6 +48,7 @@ def main(args): loader=jinja2.FileSystemLoader(suite_dir), keep_trailing_newline=True, ) + j2env.globals.update(product=itertools.product) test_names = os.listdir(suite_dir) for test_name in test_names: diff --git a/docker/test/fuzzer/query-fuzzer-tweaks-users.xml b/docker/test/fuzzer/query-fuzzer-tweaks-users.xml index ecd7aae2e4a..023f257253a 100644 --- a/docker/test/fuzzer/query-fuzzer-tweaks-users.xml +++ b/docker/test/fuzzer/query-fuzzer-tweaks-users.xml @@ -23,11 +23,6 @@ 10G - - - - - 200 diff --git a/docker/test/fuzzer/run-fuzzer.sh b/docker/test/fuzzer/run-fuzzer.sh index af1ce0c4dd4..8aeb06ec27b 100755 --- a/docker/test/fuzzer/run-fuzzer.sh +++ b/docker/test/fuzzer/run-fuzzer.sh @@ -212,11 +212,11 @@ quit gdb -batch -command script.gdb -p $server_pid & sleep 5 - # gdb will send SIGSTOP, spend some time loading debug info and then send SIGCONT, wait for it (up to send_timeout, 300s) + # gdb will send SIGSTOP, spend some time loading debug info, and then send SIGCONT, wait for it (up to send_timeout, 300s) time clickhouse-client --query "SELECT 'Connected to clickhouse-server after attaching gdb'" ||: # Check connectivity after we attach gdb, because it might cause the server - # to freeze and the fuzzer will fail. In debug build it can take a lot of time. + # to freeze, and the fuzzer will fail. In debug build, it can take a lot of time. for _ in {1..180} do if clickhouse-client --query "select 1" @@ -226,14 +226,15 @@ quit sleep 1 done kill -0 $server_pid # This checks that it is our server that is started and not some other one - echo 'Server started and responded' + echo 'Server started and responded.' setup_logs_replication # SC2012: Use find instead of ls to better handle non-alphanumeric filenames. They are all alphanumeric. - # SC2046: Quote this to prevent word splitting. Actually I need word splitting. + # SC2046: Quote this to prevent word splitting. Actually, I need word splitting. # shellcheck disable=SC2012,SC2046 timeout -s TERM --preserve-status 30m clickhouse-client \ + --max_memory_usage_in_client=1000000000 \ --receive_timeout=10 \ --receive_data_timeout_ms=10000 \ --stacktrace \ @@ -253,10 +254,10 @@ quit wait "$fuzzer_pid" || fuzzer_exit_code=$? echo "Fuzzer exit code is $fuzzer_exit_code" - # If the server dies, most often the fuzzer returns code 210: connetion + # If the server dies, most often the fuzzer returns Code 210: Connetion # refused, and sometimes also code 32: attempt to read after eof. For - # simplicity, check again whether the server is accepting connections, using - # clickhouse-client. We don't check for existence of server process, because + # simplicity, check again whether the server is accepting connections using + # clickhouse-client. We don't check for the existence of the server process, because # the process is still present while the server is terminating and not # accepting the connections anymore. diff --git a/docker/test/integration/mysql_java_client/MySQLJavaClientTest.java b/docker/test/integration/mysql_java_client/MySQLJavaClientTest.java index 1ac21ffe4b4..445e384ba1a 100644 --- a/docker/test/integration/mysql_java_client/MySQLJavaClientTest.java +++ b/docker/test/integration/mysql_java_client/MySQLJavaClientTest.java @@ -39,8 +39,7 @@ public class MySQLJavaClientTest { // useServerPrepStmts=true -> COM_STMT_PREPARE + COM_STMT_EXECUTE -> binary // useServerPrepStmts=false -> COM_QUERY -> text - String jdbcUrl = String.format("jdbc:mysql://%s:%s/%s?useSSL=false&useServerPrepStmts=%s", - host, port, database, binary); + String jdbcUrl = String.format("jdbc:mysql://%s:%s/%s?useSSL=false&useServerPrepStmts=%s", host, port, database, binary); try { Class.forName("com.mysql.cj.jdbc.Driver"); @@ -67,21 +66,21 @@ public class MySQLJavaClientTest { int rowNum = 1; while (rs.next()) { System.out.printf("Row #%d\n", rowNum++); - System.out.printf("%s, value: %d\n", getMysqlType(rs, "i8"), rs.getInt("i8")); - System.out.printf("%s, value: %d\n", getMysqlType(rs, "i16"), rs.getInt("i16")); - System.out.printf("%s, value: %d\n", getMysqlType(rs, "i32"), rs.getInt("i32")); - System.out.printf("%s, value: %d\n", getMysqlType(rs, "i64"), rs.getLong("i64")); - System.out.printf("%s, value: %s\n", getMysqlType(rs, "i128"), rs.getString("i128")); - System.out.printf("%s, value: %s\n", getMysqlType(rs, "i256"), rs.getString("i256")); - System.out.printf("%s, value: %d\n", getMysqlType(rs, "ui8"), rs.getInt("ui8")); - System.out.printf("%s, value: %d\n", getMysqlType(rs, "ui16"), rs.getInt("ui16")); - System.out.printf("%s, value: %d\n", getMysqlType(rs, "ui32"), rs.getLong("ui32")); - System.out.printf("%s, value: %s\n", getMysqlType(rs, "ui64"), rs.getString("ui64")); - System.out.printf("%s, value: %s\n", getMysqlType(rs, "ui128"), rs.getString("ui128")); - System.out.printf("%s, value: %s\n", getMysqlType(rs, "ui256"), rs.getString("ui256")); - System.out.printf("%s, value: %f\n", getMysqlType(rs, "f32"), rs.getFloat("f32")); - System.out.printf("%s, value: %f\n", getMysqlType(rs, "f64"), rs.getFloat("f64")); - System.out.printf("%s, value: %b\n", getMysqlType(rs, "b"), rs.getBoolean("b")); + System.out.printf("%s, value: %d, wasNull: %b\n", getMysqlType(rs, "i8"), rs.getInt("i8"), rs.wasNull()); + System.out.printf("%s, value: %d, wasNull: %b\n", getMysqlType(rs, "i16"), rs.getInt("i16"), rs.wasNull()); + System.out.printf("%s, value: %d, wasNull: %b\n", getMysqlType(rs, "i32"), rs.getInt("i32"), rs.wasNull()); + System.out.printf("%s, value: %d, wasNull: %b\n", getMysqlType(rs, "i64"), rs.getLong("i64"), rs.wasNull()); + System.out.printf("%s, value: %s, wasNull: %b\n", getMysqlType(rs, "i128"), rs.getString("i128"), rs.wasNull()); + System.out.printf("%s, value: %s, wasNull: %b\n", getMysqlType(rs, "i256"), rs.getString("i256"), rs.wasNull()); + System.out.printf("%s, value: %d, wasNull: %b\n", getMysqlType(rs, "ui8"), rs.getInt("ui8"), rs.wasNull()); + System.out.printf("%s, value: %d, wasNull: %b\n", getMysqlType(rs, "ui16"), rs.getInt("ui16"), rs.wasNull()); + System.out.printf("%s, value: %d, wasNull: %b\n", getMysqlType(rs, "ui32"), rs.getLong("ui32"), rs.wasNull()); + System.out.printf("%s, value: %s, wasNull: %b\n", getMysqlType(rs, "ui64"), rs.getString("ui64"), rs.wasNull()); + System.out.printf("%s, value: %s, wasNull: %b\n", getMysqlType(rs, "ui128"), rs.getString("ui128"), rs.wasNull()); + System.out.printf("%s, value: %s, wasNull: %b\n", getMysqlType(rs, "ui256"), rs.getString("ui256"), rs.wasNull()); + System.out.printf("%s, value: %f, wasNull: %b\n", getMysqlType(rs, "f32"), rs.getFloat("f32"), rs.wasNull()); + System.out.printf("%s, value: %f, wasNull: %b\n", getMysqlType(rs, "f64"), rs.getFloat("f64"), rs.wasNull()); + System.out.printf("%s, value: %b, wasNull: %b\n", getMysqlType(rs, "b"), rs.getBoolean("b"), rs.wasNull()); } System.out.println(); } @@ -92,10 +91,10 @@ public class MySQLJavaClientTest { int rowNum = 1; while (rs.next()) { System.out.printf("Row #%d\n", rowNum++); - System.out.printf("%s, value: %s\n", getMysqlType(rs, "s"), rs.getString("s")); - System.out.printf("%s, value: %s\n", getMysqlType(rs, "sn"), rs.getString("sn")); - System.out.printf("%s, value: %s\n", getMysqlType(rs, "lc"), rs.getString("lc")); - System.out.printf("%s, value: %s\n", getMysqlType(rs, "nlc"), rs.getString("nlc")); + System.out.printf("%s, value: %s, wasNull: %b\n", getMysqlType(rs, "s"), rs.getString("s"), rs.wasNull()); + System.out.printf("%s, value: %s, wasNull: %b\n", getMysqlType(rs, "sn"), rs.getString("sn"), rs.wasNull()); + System.out.printf("%s, value: %s, wasNull: %b\n", getMysqlType(rs, "lc"), rs.getString("lc"), rs.wasNull()); + System.out.printf("%s, value: %s, wasNull: %b\n", getMysqlType(rs, "nlc"), rs.getString("nlc"), rs.wasNull()); } System.out.println(); } @@ -106,10 +105,10 @@ public class MySQLJavaClientTest { int rowNum = 1; while (rs.next()) { System.out.printf("Row #%d\n", rowNum++); - System.out.printf("%s, value: %s\n", getMysqlType(rs, "ilc"), rs.getInt("ilc")); - System.out.printf("%s, value: %s\n", getMysqlType(rs, "dlc"), rs.getDate("dlc")); + System.out.printf("%s, value: %s, wasNull: %b\n", getMysqlType(rs, "ilc"), rs.getInt("ilc"), rs.wasNull()); + System.out.printf("%s, value: %s, wasNull: %b\n", getMysqlType(rs, "dlc"), rs.getDate("dlc"), rs.wasNull()); // NULL int is represented as zero - System.out.printf("%s, value: %s\n", getMysqlType(rs, "ni"), rs.getInt("ni")); + System.out.printf("%s, value: %s, wasNull: %b\n", getMysqlType(rs, "ni"), rs.getInt("ni"), rs.wasNull()); } System.out.println(); } @@ -120,12 +119,11 @@ public class MySQLJavaClientTest { int rowNum = 1; while (rs.next()) { System.out.printf("Row #%d\n", rowNum++); - System.out.printf("%s, value: %s\n", getMysqlType(rs, "d32"), rs.getBigDecimal("d32").toPlainString()); - System.out.printf("%s, value: %s\n", getMysqlType(rs, "d64"), rs.getBigDecimal("d64").toPlainString()); - System.out.printf("%s, value: %s\n", getMysqlType(rs, "d128_native"), - rs.getBigDecimal("d128_native").toPlainString()); - System.out.printf("%s, value: %s\n", getMysqlType(rs, "d128_text"), rs.getString("d128_text")); - System.out.printf("%s, value: %s\n", getMysqlType(rs, "d256"), rs.getString("d256")); + System.out.printf("%s, value: %s, wasNull: %b\n", getMysqlType(rs, "d32"), rs.getBigDecimal("d32").toPlainString(), rs.wasNull()); + System.out.printf("%s, value: %s, wasNull: %b\n", getMysqlType(rs, "d64"), rs.getBigDecimal("d64").toPlainString(), rs.wasNull()); + System.out.printf("%s, value: %s, wasNull: %b\n", getMysqlType(rs, "d128_native"), rs.getBigDecimal("d128_native").toPlainString(), rs.wasNull()); + System.out.printf("%s, value: %s, wasNull: %b\n", getMysqlType(rs, "d128_text"), rs.getString("d128_text"), rs.wasNull()); + System.out.printf("%s, value: %s, wasNull: %b\n", getMysqlType(rs, "d256"), rs.getString("d256"), rs.wasNull()); } System.out.println(); } @@ -136,12 +134,12 @@ public class MySQLJavaClientTest { int rowNum = 1; while (rs.next()) { System.out.printf("Row #%d\n", rowNum++); - System.out.printf("%s, value: %s\n", getMysqlType(rs, "d"), rs.getDate("d")); - System.out.printf("%s, value: %s\n", getMysqlType(rs, "d32"), rs.getDate("d32")); - System.out.printf("%s, value: %s\n", getMysqlType(rs, "dt"), rs.getTimestamp("dt")); - System.out.printf("%s, value: %s\n", getMysqlType(rs, "dt64_3"), rs.getTimestamp("dt64_3")); - System.out.printf("%s, value: %s\n", getMysqlType(rs, "dt64_6"), rs.getTimestamp("dt64_6")); - System.out.printf("%s, value: %s\n", getMysqlType(rs, "dt64_9"), rs.getTimestamp("dt64_9")); + System.out.printf("%s, value: %s, wasNull: %b\n", getMysqlType(rs, "d"), rs.getDate("d"), rs.wasNull()); + System.out.printf("%s, value: %s, wasNull: %b\n", getMysqlType(rs, "d32"), rs.getDate("d32"), rs.wasNull()); + System.out.printf("%s, value: %s, wasNull: %b\n", getMysqlType(rs, "dt"), rs.getTimestamp("dt"), rs.wasNull()); + System.out.printf("%s, value: %s, wasNull: %b\n", getMysqlType(rs, "dt64_3"), rs.getTimestamp("dt64_3"), rs.wasNull()); + System.out.printf("%s, value: %s, wasNull: %b\n", getMysqlType(rs, "dt64_6"), rs.getTimestamp("dt64_6"), rs.wasNull()); + System.out.printf("%s, value: %s, wasNull: %b\n", getMysqlType(rs, "dt64_9"), rs.getTimestamp("dt64_9"), rs.wasNull()); } System.out.println(); } @@ -152,13 +150,13 @@ public class MySQLJavaClientTest { int rowNum = 1; while (rs.next()) { System.out.printf("Row #%d\n", rowNum++); - System.out.printf("%s, value: %s\n", getMysqlType(rs, "dt64_0"), rs.getTimestamp("dt64_0")); - System.out.printf("%s, value: %s\n", getMysqlType(rs, "dt64_1"), rs.getTimestamp("dt64_1")); - System.out.printf("%s, value: %s\n", getMysqlType(rs, "dt64_2"), rs.getTimestamp("dt64_2")); - System.out.printf("%s, value: %s\n", getMysqlType(rs, "dt64_4"), rs.getTimestamp("dt64_4")); - System.out.printf("%s, value: %s\n", getMysqlType(rs, "dt64_5"), rs.getTimestamp("dt64_5")); - System.out.printf("%s, value: %s\n", getMysqlType(rs, "dt64_7"), rs.getTimestamp("dt64_7")); - System.out.printf("%s, value: %s\n", getMysqlType(rs, "dt64_8"), rs.getTimestamp("dt64_8")); + System.out.printf("%s, value: %s, wasNull: %b\n", getMysqlType(rs, "dt64_0"), rs.getTimestamp("dt64_0"), rs.wasNull()); + System.out.printf("%s, value: %s, wasNull: %b\n", getMysqlType(rs, "dt64_1"), rs.getTimestamp("dt64_1"), rs.wasNull()); + System.out.printf("%s, value: %s, wasNull: %b\n", getMysqlType(rs, "dt64_2"), rs.getTimestamp("dt64_2"), rs.wasNull()); + System.out.printf("%s, value: %s, wasNull: %b\n", getMysqlType(rs, "dt64_4"), rs.getTimestamp("dt64_4"), rs.wasNull()); + System.out.printf("%s, value: %s, wasNull: %b\n", getMysqlType(rs, "dt64_5"), rs.getTimestamp("dt64_5"), rs.wasNull()); + System.out.printf("%s, value: %s, wasNull: %b\n", getMysqlType(rs, "dt64_7"), rs.getTimestamp("dt64_7"), rs.wasNull()); + System.out.printf("%s, value: %s, wasNull: %b\n", getMysqlType(rs, "dt64_8"), rs.getTimestamp("dt64_8"), rs.wasNull()); } System.out.println(); } @@ -169,8 +167,8 @@ public class MySQLJavaClientTest { int rowNum = 1; while (rs.next()) { System.out.printf("Row #%d\n", rowNum++); - System.out.printf("%s, value: %s\n", getMysqlType(rs, "dt"), rs.getTimestamp("dt")); - System.out.printf("%s, value: %s\n", getMysqlType(rs, "dt64_3"), rs.getTimestamp("dt64_3")); + System.out.printf("%s, value: %s, wasNull: %b\n", getMysqlType(rs, "dt"), rs.getTimestamp("dt"), rs.wasNull()); + System.out.printf("%s, value: %s, wasNull: %b\n", getMysqlType(rs, "dt64_3"), rs.getTimestamp("dt64_3"), rs.wasNull()); } System.out.println(); } @@ -181,10 +179,10 @@ public class MySQLJavaClientTest { int rowNum = 1; while (rs.next()) { System.out.printf("Row #%d\n", rowNum++); - System.out.printf("%s, value: %s\n", getMysqlType(rs, "a"), rs.getString("a")); - System.out.printf("%s, value: %s\n", getMysqlType(rs, "u"), rs.getString("u")); - System.out.printf("%s, value: %s\n", getMysqlType(rs, "t"), rs.getString("t")); - System.out.printf("%s, value: %s\n", getMysqlType(rs, "m"), rs.getString("m")); + System.out.printf("%s, value: %s, wasNull: %b\n", getMysqlType(rs, "a"), rs.getString("a"), rs.wasNull()); + System.out.printf("%s, value: %s, wasNull: %b\n", getMysqlType(rs, "u"), rs.getString("u"), rs.wasNull()); + System.out.printf("%s, value: %s, wasNull: %b\n", getMysqlType(rs, "t"), rs.getString("t"), rs.wasNull()); + System.out.printf("%s, value: %s, wasNull: %b\n", getMysqlType(rs, "m"), rs.getString("m"), rs.wasNull()); } System.out.println(); } @@ -196,17 +194,15 @@ public class MySQLJavaClientTest { int rowNum = 1; while (rs.next()) { System.out.printf("Row #%d\n", rowNum++); - System.out.printf("%s, value: %s\n", getMysqlType(rs, "f"), rs.getFloat("f")); - System.out.printf("%s, value: %s\n", getMysqlType(rs, "d"), rs.getDate("d")); - System.out.printf("%s, value: %s\n", getMysqlType(rs, "dt"), rs.getTimestamp("dt")); + System.out.printf("%s, value: %s, wasNull: %b\n", getMysqlType(rs, "f"), rs.getFloat("f"), rs.wasNull()); + System.out.printf("%s, value: %s, wasNull: %b\n", getMysqlType(rs, "d"), rs.getDate("d"), rs.wasNull()); + System.out.printf("%s, value: %s, wasNull: %b\n", getMysqlType(rs, "dt"), rs.getTimestamp("dt"), rs.wasNull()); } System.out.println(); } private static String getMysqlType(ResultSet rs, String columnLabel) throws SQLException { ResultSetMetaData meta = rs.getMetaData(); - return String.format("%s type is %s", columnLabel, - MysqlType.getByJdbcType(meta.getColumnType(rs.findColumn(columnLabel)))); + return String.format("%s type is %s", columnLabel, MysqlType.getByJdbcType(meta.getColumnType(rs.findColumn(columnLabel)))); } - } diff --git a/docker/test/integration/runner/Dockerfile b/docker/test/integration/runner/Dockerfile index 8345e3d5791..458ca2b1da8 100644 --- a/docker/test/integration/runner/Dockerfile +++ b/docker/test/integration/runner/Dockerfile @@ -68,6 +68,7 @@ RUN python3 -m pip install --no-cache-dir \ asyncio \ avro==1.10.2 \ azure-storage-blob \ + boto3 \ cassandra-driver \ confluent-kafka==1.9.2 \ delta-spark==2.3.0 \ diff --git a/docker/test/integration/runner/compose/docker_compose_azurite.yml b/docker/test/integration/runner/compose/docker_compose_azurite.yml index 430ea0d9d14..7c379a971ea 100644 --- a/docker/test/integration/runner/compose/docker_compose_azurite.yml +++ b/docker/test/integration/runner/compose/docker_compose_azurite.yml @@ -4,10 +4,10 @@ services: azurite1: image: mcr.microsoft.com/azure-storage/azurite ports: - - "10000:10000" + - "${AZURITE_PORT}:${AZURITE_PORT}" volumes: - data1-1:/data1 - command: azurite-blob --blobHost 0.0.0.0 --blobPort 10000 --debug /azurite_log + command: azurite-blob --blobHost 0.0.0.0 --blobPort ${AZURITE_PORT} --debug /azurite_log volumes: data1-1: diff --git a/docker/test/integration/runner/compose/docker_compose_rabbitmq.yml b/docker/test/integration/runner/compose/docker_compose_rabbitmq.yml index 2db9fb589d2..61b21e0e3d9 100644 --- a/docker/test/integration/runner/compose/docker_compose_rabbitmq.yml +++ b/docker/test/integration/runner/compose/docker_compose_rabbitmq.yml @@ -6,9 +6,13 @@ services: hostname: rabbitmq1 expose: - ${RABBITMQ_PORT:-5672} + - ${RABBITMQ_SECURE_PORT:-5671} volumes: - type: ${RABBITMQ_LOGS_FS:-tmpfs} source: ${RABBITMQ_LOGS:-} target: /rabbitmq_logs/ - "${RABBITMQ_COOKIE_FILE}:/var/lib/rabbitmq/.erlang.cookie" - - /misc/rabbitmq.conf:/etc/rabbitmq/rabbitmq.conf \ No newline at end of file + - /misc/rabbitmq/rabbitmq.conf:/etc/rabbitmq/rabbitmq.conf + - /misc/rabbitmq/ca-cert.pem:/etc/rabbitmq/ca-cert.pem + - /misc/rabbitmq/server-cert.pem:/etc/rabbitmq/server-cert.pem + - /misc/rabbitmq/server-key.pem:/etc/rabbitmq/server-key.pem diff --git a/docker/test/integration/runner/misc/rabbitmq.conf b/docker/test/integration/runner/misc/rabbitmq.conf deleted file mode 100644 index 3527c83880b..00000000000 --- a/docker/test/integration/runner/misc/rabbitmq.conf +++ /dev/null @@ -1,8 +0,0 @@ -loopback_users.guest = false -listeners.tcp.default = 5672 -default_pass = clickhouse -default_user = root -management.tcp.port = 15672 - -log.file = /rabbitmq_logs/rabbit.log -log.file.level = debug diff --git a/docker/test/integration/runner/misc/rabbitmq/ca-cert.pem b/docker/test/integration/runner/misc/rabbitmq/ca-cert.pem new file mode 100644 index 00000000000..4a7b88f7936 --- /dev/null +++ b/docker/test/integration/runner/misc/rabbitmq/ca-cert.pem @@ -0,0 +1,32 @@ +-----BEGIN CERTIFICATE----- +MIIFhTCCA22gAwIBAgIUWhfjFfbwannH3KIqITDtgcvSItMwDQYJKoZIhvcNAQEL +BQAwUjELMAkGA1UEBhMCUlUxEzARBgNVBAgMClNvbWUtU3RhdGUxITAfBgNVBAoM +GEludGVybmV0IFdpZGdpdHMgUHR5IEx0ZDELMAkGA1UEAwwCY2EwHhcNMjMxMTE0 +MTgyODI2WhcNMzMxMTExMTgyODI2WjBSMQswCQYDVQQGEwJSVTETMBEGA1UECAwK +U29tZS1TdGF0ZTEhMB8GA1UECgwYSW50ZXJuZXQgV2lkZ2l0cyBQdHkgTHRkMQsw +CQYDVQQDDAJjYTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAJfJegdC +gavNGYzSdva+5QMxGvqyLwZzjophMeyEzlW/Di4KFGPho+fVlVMB/EwaTRoBRLEu +SQusQwoFg71mGvUTOpgHzlsUz4vcVVFOiL4bJdzCWQKzdC8M8rUFoks9FMboVeSx +jhAnKAm/NpCLpm9VYnRjEq2KEbJp7VkPAHgZEXR7VABwCFvmDcztrfcWfmXxm6IH +o+AkF/nqdphLu7Q1yDQiF8Q8TuszuhqgQ7/1PrRcaSADrF15jJjQb05sILpGCT3e +lxJYId5RF0+fgTIqy03bAKB53+8V8cAkowI4rvPTmcFXhcG3rkDO6lyZixHhlpKi +PmXEzHh0kfsRjzkNBP0CKqPnu3D2iymROiPAH2cteaYe6jdD2HIjuVLk/TjX1ZFy +DlZCrJIwj0l8A2xAfLq8Gw5RSr0a9k5TiMD5nZtfd12Vd0K82vO32vmcjO2Igddc +VWccDDwUY/ZWV3uznkusOBrB8wba3ZsXA5hjJzs0KlTvQKPjX0y4lFMmZGbelwjt +pR5dRNLi5XTdMPzV0mAnvJhDTFEmME19Bh6AEsjuAz3gHUdwNTbSxUS3mF/hTL9k +v2wh5udUAOwqD1uEzqPJyG4JCJQozIDOEEZVixWqQ60b9wUHN8meqO4y9fxTdmHW +Vo5BAF1xEJhJJb0QY/O6GahPtWqb/Mr1rtPJAgMBAAGjUzBRMB0GA1UdDgQWBBSw +fQcOabXwX/v9F1hd2cmuIug56jAfBgNVHSMEGDAWgBSwfQcOabXwX/v9F1hd2cmu +Iug56jAPBgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3DQEBCwUAA4ICAQAms8y6RVxl +mKSUbsU8JscYwOzcRUQJWETeIr4rtZvMHH+3vkdBU0yKxGpEm7U8J3+5oVTYPhbs +11ZAL+DvIZ6gT6pjDvECyVox1OkjNogz843fTMbNqjuuehjSKXwpMTy5/kmT2aLj +//nBi5UX1xo3RQ9vtmBwzZ3VFK99DFXraDOPS/yk43WV2uqdWsXCNvyEyCHmM1IB +9FQe2EFcO6s4/N+TarhIZ8Udhj5bl8d4eDd1yEckmTD4aHJBgMII2uEwrAxR5CT1 +tCqUKutvNrkXI5PIULvmy+Lwm7PJAC7grPtUHK6anSugpljd7bFj18fHH9APiC45 +Ou4OOK1BUZogCEo7rD36UlanxQO0GEzgDCVEoEdoe0WRdc6T9b4fM8vpQqwBdf9t +nkPB8oLCKerqqYwCiMuWm4BcRmExA7ypIkUCcluGO9/kTmdps3NqOvET9oLTjXuA +z5TPmaK5a3poKLoxBfv6WfRTgisOnMNTsjL1R8+xuhEn5hSlE2r3wAi8Cys9Z9PV +LhTj0SRTXILd2NW3lO8QfO0pGdjgk90GqkyUY9YjuiMVPvdUAFQsHm+0GEZEXjOD +Bw7tLSJQ4IKhfactg/Puxd15ahcWAxeelyED+w/zVGdHYblqbvfdtiGj370KVhoj +DL5HkdPa0IhTPqMBnmoVQ4C/WzKofXBjQQ== +-----END CERTIFICATE----- diff --git a/docker/test/integration/runner/misc/rabbitmq/generate_certs.sh b/docker/test/integration/runner/misc/rabbitmq/generate_certs.sh new file mode 100755 index 00000000000..442d2fe004f --- /dev/null +++ b/docker/test/integration/runner/misc/rabbitmq/generate_certs.sh @@ -0,0 +1,10 @@ +#!/bin/bash + +# 1. Generate CA's private key and self-signed certificate +openssl req -newkey rsa:4096 -x509 -days 3650 -nodes -batch -keyout ca-key.pem -out ca-cert.pem -subj "/C=RU/ST=Some-State/O=Internet Widgits Pty Ltd/CN=ca" + +# 2. Generate server's private key and certificate signing request (CSR) +openssl req -newkey rsa:4096 -nodes -batch -keyout server-key.pem -out server-req.pem -subj "/C=RU/ST=Some-State/O=Internet Widgits Pty Ltd/CN=server" + +# 3. Use CA's private key to sign server's CSR and get back the signed certificate +openssl x509 -req -days 3650 -in server-req.pem -CA ca-cert.pem -CAkey ca-key.pem -CAcreateserial -extfile server-ext.cnf -out server-cert.pem diff --git a/docker/test/integration/runner/misc/rabbitmq/rabbitmq.conf b/docker/test/integration/runner/misc/rabbitmq/rabbitmq.conf new file mode 100644 index 00000000000..258a282907a --- /dev/null +++ b/docker/test/integration/runner/misc/rabbitmq/rabbitmq.conf @@ -0,0 +1,15 @@ +loopback_users.guest = false +listeners.tcp.default = 5672 +default_pass = clickhouse +default_user = root +management.tcp.port = 15672 + +log.file = /rabbitmq_logs/rabbit.log +log.file.level = debug + +listeners.ssl.default = 5671 +ssl_options.verify = verify_none +ssl_options.fail_if_no_peer_cert = false +ssl_options.cacertfile = /etc/rabbitmq/ca-cert.pem +ssl_options.certfile = /etc/rabbitmq/server-cert.pem +ssl_options.keyfile = /etc/rabbitmq/server-key.pem diff --git a/docker/test/integration/runner/misc/rabbitmq/server-cert.pem b/docker/test/integration/runner/misc/rabbitmq/server-cert.pem new file mode 100644 index 00000000000..338de91aa0f --- /dev/null +++ b/docker/test/integration/runner/misc/rabbitmq/server-cert.pem @@ -0,0 +1,33 @@ +-----BEGIN CERTIFICATE----- +MIIFpTCCA42gAwIBAgIUJvQslezZO09XgFGQCxOM6orIsWowDQYJKoZIhvcNAQEL +BQAwUjELMAkGA1UEBhMCUlUxEzARBgNVBAgMClNvbWUtU3RhdGUxITAfBgNVBAoM +GEludGVybmV0IFdpZGdpdHMgUHR5IEx0ZDELMAkGA1UEAwwCY2EwHhcNMjMxMTE0 +MTgyODI5WhcNMzMxMTExMTgyODI5WjBWMQswCQYDVQQGEwJSVTETMBEGA1UECAwK +U29tZS1TdGF0ZTEhMB8GA1UECgwYSW50ZXJuZXQgV2lkZ2l0cyBQdHkgTHRkMQ8w +DQYDVQQDDAZzZXJ2ZXIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCe +o/K71WdKpVpdDvhaZy6wBVhFlu7j7DhfTSYvcPpAJfExmzO8JK3vh5/yGyAO1t79 +gAjqyXLMCZKw7ajM2rez9YnGYqaFi70BlTcU2KQ8LbFEYRc3cYNDmmWIKBpwpSri +We5SQrRLnDXqAn6T8FG5ejQ/t+1IUMrtZENB4lp8fBmEOJb5yr1TE++6EhiDBQho +cLDWWWP8b55kyZhqP/VgmId4lvboGMRKxbiRJ6/SPr/i/pteBD8jTYfbJr6ceXov +/p5yxIp61z5ry1anU7W3B8jTl/gj7SqtFdSnRajZ0DGJJAUKpiiJSCSlp5YB5Ub2 +eBBMHmdA5R1MuiU9TOA35nUW5wkhEOJXnBR/WCsYioVmn/+5dm6JPYiwp/TefYnr +x9iLbb/Tyx7MnXzeyvKg781SwmnvS6Blhtr0zhAW9szZz8cVHPBqFs6PzGs/5mwE +C+tM3Zp85aHd28nIT4NQLHdMDwVmGwmPdy4uavtYWMDhsuIyEU8hCZymiHhPnuHU +VbmfZ8GOTIzUgQAvZb0fL1Xow2Tf6XuARnvuU9weRttg9jSOqPuUENRsFXv0mU8M +EpQjrxry88Wfz7bBEjN5JHC16PB/Nu7zTGJ4/slThbxNv0bIONzvTBPbXrKnxw7Z +d9WhGJI+LQxRqLTynQe6yzDwIuW9LRdBNTp7CtQRwQIDAQABo28wbTArBgNVHREE +JDAigiBpbnRlZ3JhdGlvbi10ZXN0cy5jbGlja2hvdXNlLmNvbTAdBgNVHQ4EFgQU +54GvBUYWvMADpTz/zglwMlaJuskwHwYDVR0jBBgwFoAUsH0HDmm18F/7/RdYXdnJ +riLoOeowDQYJKoZIhvcNAQELBQADggIBADfNH6O6ay+xg0XmV6sR0n4j6PwL9Cnc +VjuCmHQbpFXfMvgCdfHvbtT0Y/pG7IoeKmrrm0JPvKa2E9Ht0j6ZnowQ2m9mJk8U +5Fd/PbC1I4KgVCw6HRSOcwqANJxOGe7RyN9PTZZ8fxzmzIR3FiQ2bXfr+LaotZOK +aVS8F8xCOzoMvL9LFls2YpEn20p/1EATIf2MFX3j9vKfcJVOyDJV4i5BMImStFLM +g3sdC96de/59yxt9khM0PNucU1ldNFs/kZVEcNSwGOAIgQEPwULJtDY+ZSWeROpX +EpWndN6zQsv1pdNvLtXsDXfi4YoH9QVaA/k4aFFJ08CjSZfMYmwyPOGsf/wqT65i +ADID2yb1A/FIIe/fM+d2gXHBVFBDmydJ1JCdCoYrEJgfWj1LO/0jLi34ZZ17Hu7F +D33fLARF9nlLzlUiWjcQlOjNoCM48AgG/3wHk4eiSfc/3PIJDuDGDa0NdtDeKKhH +XkP2ll4cMUH6EQ9KO1jHPmf5RokX4QJgH+ofO4U5XQFwc3lOyJzEQnED+wame7do +R7TE4F/OXhxLqA6DFkzXe89/kSCoAF9bjzmUn/ilrg8NXKKgprgHg4DJHgvCQVVC +34ab7Xj7msUm4D9vI+GAeUbUqnqCaWxDF6vCMT0Qq7iSVDxa/SV8TX8Vp2Zh+PSh +4m23Did+KjLq +-----END CERTIFICATE----- diff --git a/docker/test/integration/runner/misc/rabbitmq/server-ext.cnf b/docker/test/integration/runner/misc/rabbitmq/server-ext.cnf new file mode 100644 index 00000000000..49859873222 --- /dev/null +++ b/docker/test/integration/runner/misc/rabbitmq/server-ext.cnf @@ -0,0 +1 @@ +subjectAltName=DNS:integration-tests.clickhouse.com diff --git a/docker/test/integration/runner/misc/rabbitmq/server-key.pem b/docker/test/integration/runner/misc/rabbitmq/server-key.pem new file mode 100644 index 00000000000..92e93e8fba5 --- /dev/null +++ b/docker/test/integration/runner/misc/rabbitmq/server-key.pem @@ -0,0 +1,52 @@ +-----BEGIN PRIVATE KEY----- +MIIJQgIBADANBgkqhkiG9w0BAQEFAASCCSwwggkoAgEAAoICAQCeo/K71WdKpVpd +DvhaZy6wBVhFlu7j7DhfTSYvcPpAJfExmzO8JK3vh5/yGyAO1t79gAjqyXLMCZKw +7ajM2rez9YnGYqaFi70BlTcU2KQ8LbFEYRc3cYNDmmWIKBpwpSriWe5SQrRLnDXq +An6T8FG5ejQ/t+1IUMrtZENB4lp8fBmEOJb5yr1TE++6EhiDBQhocLDWWWP8b55k +yZhqP/VgmId4lvboGMRKxbiRJ6/SPr/i/pteBD8jTYfbJr6ceXov/p5yxIp61z5r +y1anU7W3B8jTl/gj7SqtFdSnRajZ0DGJJAUKpiiJSCSlp5YB5Ub2eBBMHmdA5R1M +uiU9TOA35nUW5wkhEOJXnBR/WCsYioVmn/+5dm6JPYiwp/TefYnrx9iLbb/Tyx7M +nXzeyvKg781SwmnvS6Blhtr0zhAW9szZz8cVHPBqFs6PzGs/5mwEC+tM3Zp85aHd +28nIT4NQLHdMDwVmGwmPdy4uavtYWMDhsuIyEU8hCZymiHhPnuHUVbmfZ8GOTIzU +gQAvZb0fL1Xow2Tf6XuARnvuU9weRttg9jSOqPuUENRsFXv0mU8MEpQjrxry88Wf +z7bBEjN5JHC16PB/Nu7zTGJ4/slThbxNv0bIONzvTBPbXrKnxw7Zd9WhGJI+LQxR +qLTynQe6yzDwIuW9LRdBNTp7CtQRwQIDAQABAoICAA0lev0T3z5xW36wueYL/PN7 +TehebKeYsMc9BngR/bsJKea5fN0PkRZzf865brusFMifLp3+WbQM6wocd8uaKHUS +WPuGu1P/04bpDap9lYajJriK7ziaAI2+osFYyXAiT954I2bPvk8xv8oHsOOjm7Iq +LWBGZrSCdX6cu3IfRu5f/mFVqzVCFtRmp4wc6ckZxquZAx6QQ9fsjAzAJBBSAoyh +t0BICmgLfWDQ582no0tiBdbS0J9G7NCJIUQI/uzKqFSH3iuWm/84DSUzsZemOT3U +uFDInDil885qK7g87pQ2S5SY1o4eXOebgeX0cFrx3CKaqocUUewv0HDGUEW3NDFs +KhUvlJZIFgk6bMend16U6kfRCUsjLA22Rfxzanl53cGVywCeIMirnLYuEu0TsxyK +CblBvyhcpjrGi7FQskzR+J9LpZPnmtn6TAb7JCAALRVHcAGKhGeh613SjPUfkWb0 +KpDps08x8MWGEAALuHbOK0nMLFm+PuMt7+krqCeJET+XM44GT+6ZstrDv0RufxUN ++pkLW7AsVZoXcFvaOWjuyBvX/f6UHCSfueo0mB3H80WoftDIfdhM+AI7/oBTYCBx +Z8BtW+g7Eq3pOUg/Um7S7Z2bybBWE14kpi95gRf3upEYPqHJUpJPdu20lk24iAt9 +LCXF4AjZBIdAuyJrYOJBAoIBAQDd/Bm14WvmBOablGLn6hmohi6M75D+/eQanlg9 +eJhXJUVd8FzOTjKi70EHWvkqswenNDbe/WGtImqG+9G+N/ol2qhi5xVSQ2XQmcVQ +U+k15Bzm9xKM0OqsStFvRgP1Cy6Ms3/jxr5JEEwUepmjvWTDGTlhTQASA/D7Uh2q +5HpPiHEVm4g5eTAYWeAbI6cGwVS0L4y6xkFGde37Kh2P8ZodWB+d3fglVu4Ok9Nf +wE2f8MK2ewQ0SbF/Nj2WjlVomvOvOJG/2CDLuiH/vc4YUvLAm8pNwvsmgtSh1Okt +E/HfXegrlPPEgw6owqoQFt+aGUITgEhiwEVAcYS0pXzzkQX5AoIBAQC28wJ8ueKr +fINpJM2pSc7WRDFduP5yGsRreSLBXLKMbvOlIVb3PaWp11Cg3+X5O90bPXYJ9mBI +WGR0g14/VD8edxs2D5TUZcP4/vKXGHaWRY9Z4A3jVpjzAxAaviNDHJ08tLXEMXZQ +lbA7dX8z6lpoQfwnPzjBwB01mVegwXPeIwIIfT/FmAiGzvSnAMXBGSGWRRdzof0M +/vPFbgllcQmM4AnEGcErCgFRpwcssO87T2jnvf6QVE5JCcnUcGIli1ThxCU9TRZM +5s6R7Nvk3/UjwcpRcqMtnGpTT2QXSnRwvWUfM+bKTwaxz4PjqKpgIc11kwJAjlxk +4CxYf1mDGLwJAoIBAGFJRTNS8ejDKRXyOE6PaGNVOz2FGLTILJoF34JBQfKfYQFE +gEfiOYry9Dr3AdBW2fnLhmi//3jTZoB2CHwnKDhC1h1STSPaadq8KZ+ExuZZbNlE +WxrfzJlpyNPNiZpxJht/54K57Vc0D0PCX2dFb82ZVm5wQqGinJBocpwcugX1NCpW +GaOmmw9xBCigvWjWffriA/kvPhhVQtEaqg4Vwoctwd18FG645Gf7HV4Pd3WrHIrA +6xzHV0T7To6XHpNTpYybbDT50ZW3o4LjellqsPz8yfK+izdbizjJiM+6t/w+uauw +Ag2Tqm8HsWSPwbtVaoIFbLPqs+8EUTaieFp+qnECggEAVuaTdd9uFfrtCNKchh8z +CoAV2uj2pAim6E3//k0j2qURQozVnFdCC6zk9aWkvYB8BGZrXUwUbAjgnp+P8xD3 +cmctG77G+STls66WWMMcAUFFWHGe5y/JMxVvXuSWJ1i+L4m/FVRRWPHhZjznkSdu +jjtZpOLY+N9igIU4JHn/qbKDUrj7w8X1tuMzPuiVBqYDWDe1bg2x/6xS6qLb/71z +xeDdgrKhGOqFud1XARmCaW/M6tdKxg/lp7fokOpZFHBcf2kGL1ogj6LK2HHj+ZGQ +Bc4VZh7H9/BmaPA7IP0S1kKAeBPVOp/TFD737Pm/BC7KQ2DzHusAZEI/jkHfqO/k +0QKCAQEAuiYLn9iLgk4uQO9oaSBGWKrJsR2L2dqI7IWU0X9xJlsQrJKcEeWg4LXt +djLsz0HrxZV/c+Pnh79hmFlBoEmH+hz32D/xd+/qrwwAcMkHAwMbznJu0IIuW2O9 +Uzma++7SvVmr9H0DkUwXFP3jn1A2n3uuI4czqtQ8N7GiH0UAWR5CsIP7azHvZTSj +s4Fzf8rTE6pNqVgQXjrVbI9H/h0uPP4alJbhnPba9mgB1cGmfBEnPkKgYNqSZse+ +95G2TlcK74sKBUSdBKqYBZ4ZUeTXV974Nva9guE9vzDQt1Cj6k0HWISVPUshPzIh +qrdHdxcM6yhA0Z0Gu6zj+Zsy4lU8gA== +-----END PRIVATE KEY----- diff --git a/docker/test/performance-comparison/compare.sh b/docker/test/performance-comparison/compare.sh index 7d6de732489..f10236b7135 100755 --- a/docker/test/performance-comparison/compare.sh +++ b/docker/test/performance-comparison/compare.sh @@ -189,6 +189,8 @@ function run_tests test_prefix=right/performance fi + run_only_changed_tests=0 + # Determine which tests to run. if [ -v CHPC_TEST_GREP ] then @@ -203,6 +205,7 @@ function run_tests # tests. The lists of changed files are prepared in entrypoint.sh because # it has the repository. test_files=($(sed "s/tests\/performance/${test_prefix//\//\\/}/" changed-test-definitions.txt)) + run_only_changed_tests=1 else # The default -- run all tests found in the test dir. test_files=($(ls "$test_prefix"/*.xml)) @@ -226,6 +229,13 @@ function run_tests test_files=("${test_files[@]}") fi + if [ "$run_only_changed_tests" -ne 0 ]; then + if [ ${#test_files[@]} -eq 0 ]; then + time "$script_dir/report.py" --no-tests-run > report.html + exit 0 + fi + fi + # For PRs w/o changes in test definitons, test only a subset of queries, # and run them less times. If the corresponding environment variables are # already set, keep those values. diff --git a/docker/test/performance-comparison/config/users.d/perf-comparison-tweaks-users.xml b/docker/test/performance-comparison/config/users.d/perf-comparison-tweaks-users.xml index cb591f1a184..e780a99ecde 100644 --- a/docker/test/performance-comparison/config/users.d/perf-comparison-tweaks-users.xml +++ b/docker/test/performance-comparison/config/users.d/perf-comparison-tweaks-users.xml @@ -34,9 +34,4 @@ 0 - - - 1 - - diff --git a/docker/test/performance-comparison/report.py b/docker/test/performance-comparison/report.py index 7da30ba7a08..c2bc773bd54 100755 --- a/docker/test/performance-comparison/report.py +++ b/docker/test/performance-comparison/report.py @@ -19,6 +19,7 @@ parser.add_argument( choices=["main", "all-queries"], help="Which report to build", ) +parser.add_argument("--no-tests-run", action="store_true", default=False) args = parser.parse_args() tables = [] @@ -354,6 +355,36 @@ if args.report == "main": add_tested_commits() + def print_status(status, message): + print( + ( + """ + + + """.format( + status=status, message=message + ) + ) + ) + + if args.no_tests_run: + for t in tables: + print(t) + print( + "

No tests to run. Only changed tests were run, but all changed tests are from another batch.

" + ) + print( + f""" + + {os.getenv("CHPC_ADD_REPORT_LINKS") or ''} + + + """ + ) + # Why failure? Because otherwise we will not notice if we have a bug that leads to 0 tests being run + print_status("failure", "No tests changed, nothing to run") + exit(0) + run_error_rows = tsvRows("run-errors.tsv") error_tests += len(run_error_rows) addSimpleTable("Run Errors", ["Test", "Error"], run_error_rows) @@ -646,16 +677,7 @@ if args.report == "main": status = "failure" message = "Errors while building the report." - print( - ( - """ - - - """.format( - status=status, message=message - ) - ) - ) + print_status(status, message) elif args.report == "all-queries": print((header_template.format())) diff --git a/docker/test/stateless/run.sh b/docker/test/stateless/run.sh index 34fc12d1a72..07b40ea3b3d 100755 --- a/docker/test/stateless/run.sh +++ b/docker/test/stateless/run.sh @@ -19,6 +19,11 @@ dpkg -i package_folder/clickhouse-common-static-dbg_*.deb dpkg -i package_folder/clickhouse-server_*.deb dpkg -i package_folder/clickhouse-client_*.deb +# Check that the tools are available under short names +ch --query "SELECT 1" || exit 1 +chl --query "SELECT 1" || exit 1 +chc --version || exit 1 + ln -s /usr/share/clickhouse-test/clickhouse-test /usr/bin/clickhouse-test # shellcheck disable=SC1091 @@ -62,7 +67,7 @@ if [ "$NUM_TRIES" -gt "1" ]; then export THREAD_FUZZER_pthread_mutex_unlock_AFTER_SLEEP_TIME_US=10000 mkdir -p /var/run/clickhouse-server - # simpliest way to forward env variables to server + # simplest way to forward env variables to server sudo -E -u clickhouse /usr/bin/clickhouse-server --config /etc/clickhouse-server/config.xml --daemon --pid-file /var/run/clickhouse-server/clickhouse-server.pid else sudo clickhouse start @@ -212,6 +217,9 @@ ls -la / clickhouse-client -q "system flush logs" ||: +# stop logs replication to make it possible to dump logs tables via clickhouse-local +stop_logs_replication + # Stop server so we can safely read data with clickhouse-local. # Why do we read data with clickhouse-local? # Because it's the simplest way to read it when server has crashed. diff --git a/docker/test/stateless/stress_tests.lib b/docker/test/stateless/stress_tests.lib index e1642517187..8f89c1b80dd 100644 --- a/docker/test/stateless/stress_tests.lib +++ b/docker/test/stateless/stress_tests.lib @@ -55,23 +55,29 @@ function configure() function randomize_config_boolean_value { value=$(($RANDOM % 2)) - sudo cat /etc/clickhouse-server/config.d/keeper_port.xml \ + sudo cat /etc/clickhouse-server/config.d/$2.xml \ | sed "s|<$1>[01]|<$1>$value|" \ - > /etc/clickhouse-server/config.d/keeper_port.xml.tmp - sudo mv /etc/clickhouse-server/config.d/keeper_port.xml.tmp /etc/clickhouse-server/config.d/keeper_port.xml + > /etc/clickhouse-server/config.d/$2.xml.tmp + sudo mv /etc/clickhouse-server/config.d/$2.xml.tmp /etc/clickhouse-server/config.d/$2.xml } if [[ -n "$RANDOMIZE_KEEPER_FEATURE_FLAGS" ]] && [[ "$RANDOMIZE_KEEPER_FEATURE_FLAGS" -eq 1 ]]; then # Randomize all Keeper feature flags - randomize_config_boolean_value filtered_list - randomize_config_boolean_value multi_read - randomize_config_boolean_value check_not_exists - randomize_config_boolean_value create_if_not_exists + randomize_config_boolean_value filtered_list keeper_port + randomize_config_boolean_value multi_read keeper_port + randomize_config_boolean_value check_not_exists keeper_port + randomize_config_boolean_value create_if_not_exists keeper_port fi sudo chown clickhouse /etc/clickhouse-server/config.d/keeper_port.xml sudo chgrp clickhouse /etc/clickhouse-server/config.d/keeper_port.xml + if [[ -n "$ZOOKEEPER_FAULT_INJECTION" ]] && [[ "$ZOOKEEPER_FAULT_INJECTION" -eq 1 ]]; then + randomize_config_boolean_value use_compression zookeeper_fault_injection + else + randomize_config_boolean_value use_compression zookeeper + fi + # for clickhouse-server (via service) echo "ASAN_OPTIONS='malloc_context_size=10 verbosity=1 allocator_release_to_os_interval_ms=10000'" >> /etc/environment # for clickhouse-client @@ -134,21 +140,6 @@ EOL --> $PWD -EOL - - # Analyzer is not yet ready for testing - cat > /etc/clickhouse-server/users.d/no_analyzer.xml < - - - - - - - - - - EOL } @@ -177,6 +168,9 @@ function stop() echo "thread apply all backtrace (on stop)" >> /test_output/gdb.log timeout 30m gdb -batch -ex 'thread apply all backtrace' -p "$pid" | ts '%Y-%m-%d %H:%M:%S' >> /test_output/gdb.log clickhouse stop --force + else + echo -e "Warning: server did not stop yet$OK" >> /test_output/test_results.tsv + clickhouse stop --force fi } diff --git a/docker/test/unit/run.sh b/docker/test/unit/run.sh index e87432214d8..7323c384d9c 100644 --- a/docker/test/unit/run.sh +++ b/docker/test/unit/run.sh @@ -2,5 +2,4 @@ set -x -service zookeeper start && sleep 7 && /usr/share/zookeeper/bin/zkCli.sh -server localhost:2181 -create create /clickhouse_test ''; timeout 40m gdb -q -ex 'set print inferior-events off' -ex 'set confirm off' -ex 'set print thread-events off' -ex run -ex bt -ex quit --args ./unit_tests_dbms --gtest_output='json:test_output/test_result.json' | tee test_output/test_result.txt diff --git a/docker/test/upgrade/run.sh b/docker/test/upgrade/run.sh index c69d90b9af0..57b683a16c3 100644 --- a/docker/test/upgrade/run.sh +++ b/docker/test/upgrade/run.sh @@ -78,6 +78,7 @@ remove_keeper_config "create_if_not_exists" "[01]" rm /etc/clickhouse-server/config.d/merge_tree.xml rm /etc/clickhouse-server/config.d/enable_wait_for_shutdown_replicated_tables.xml rm /etc/clickhouse-server/users.d/nonconst_timezone.xml +rm /etc/clickhouse-server/users.d/s3_cache_new.xml start stop @@ -114,6 +115,7 @@ sudo chgrp clickhouse /etc/clickhouse-server/config.d/s3_storage_policy_by_defau rm /etc/clickhouse-server/config.d/merge_tree.xml rm /etc/clickhouse-server/config.d/enable_wait_for_shutdown_replicated_tables.xml rm /etc/clickhouse-server/users.d/nonconst_timezone.xml +rm /etc/clickhouse-server/users.d/s3_cache_new.xml start @@ -189,6 +191,7 @@ rg -Fav -e "Code: 236. DB::Exception: Cancelled merging parts" \ -e "ZooKeeperClient" \ -e "KEEPER_EXCEPTION" \ -e "DirectoryMonitor" \ + -e "DistributedInsertQueue" \ -e "TABLE_IS_READ_ONLY" \ -e "Code: 1000, e.code() = 111, Connection refused" \ -e "UNFINISHED" \ diff --git a/docs/_includes/install/universal.sh b/docs/_includes/install/universal.sh index 0ae77f464eb..d474aa98e76 100755 --- a/docs/_includes/install/universal.sh +++ b/docs/_includes/install/universal.sh @@ -20,9 +20,9 @@ then fi elif [ "${ARCH}" = "aarch64" -o "${ARCH}" = "arm64" ] then - # If the system has >=ARMv8.2 (https://en.wikipedia.org/wiki/AArch64), choose the corresponding build, else fall back to a v8.0 - # compat build. Unfortunately, the ARM ISA level cannot be read directly, we need to guess from the "features" in /proc/cpuinfo. - # Also, the flags in /proc/cpuinfo are named differently than the flags passed to the compiler (cmake/cpu_features.cmake). + # Dispatch between standard and compatibility builds, see cmake/cpu_features.cmake for details. Unfortunately, (1) the ARM ISA level + # cannot be read directly, we need to guess from the "features" in /proc/cpuinfo, and (2) the flags in /proc/cpuinfo are named + # differently than the flags passed to the compiler in cpu_features.cmake. HAS_ARMV82=$(grep -m 1 'Features' /proc/cpuinfo | awk '/asimd/ && /sha1/ && /aes/ && /atomics/ && /lrcpc/') if [ "${HAS_ARMV82}" ] then diff --git a/docs/changelogs/v21.10.2.15-stable.md b/docs/changelogs/v21.10.2.15-stable.md index 42402808260..bff9c352d98 100644 --- a/docs/changelogs/v21.10.2.15-stable.md +++ b/docs/changelogs/v21.10.2.15-stable.md @@ -75,7 +75,7 @@ sidebar_label: 2022 * Fix usage of nested columns with non-array columns with the same prefix [2] [#28762](https://github.com/ClickHouse/ClickHouse/pull/28762) ([Anton Popov](https://github.com/CurtizJ)). * Lower compiled_expression_cache_size to 128MB [#28816](https://github.com/ClickHouse/ClickHouse/pull/28816) ([Maksim Kita](https://github.com/kitaisreal)). * Column default dictGet identifier fix [#28863](https://github.com/ClickHouse/ClickHouse/pull/28863) ([Maksim Kita](https://github.com/kitaisreal)). -* Don not add const group by key for query with only having. [#28975](https://github.com/ClickHouse/ClickHouse/pull/28975) ([Nikolai Kochetov](https://github.com/KochetovNicolai)). +* Do not add const group by key for query with only having. [#28975](https://github.com/ClickHouse/ClickHouse/pull/28975) ([Nikolai Kochetov](https://github.com/KochetovNicolai)). * Merging [#27963](https://github.com/ClickHouse/ClickHouse/issues/27963) [#29063](https://github.com/ClickHouse/ClickHouse/pull/29063) ([Maksim Kita](https://github.com/kitaisreal)). * Fix terminate on uncaught exception [#29216](https://github.com/ClickHouse/ClickHouse/pull/29216) ([Alexander Tokmakov](https://github.com/tavplubix)). * Fix arcadia (pre)stable 21.10 build [#29250](https://github.com/ClickHouse/ClickHouse/pull/29250) ([DimasKovas](https://github.com/DimasKovas)). diff --git a/docs/changelogs/v21.3.18.4-lts.md b/docs/changelogs/v21.3.18.4-lts.md index 33f4b86d81c..c97b1bd8fa5 100644 --- a/docs/changelogs/v21.3.18.4-lts.md +++ b/docs/changelogs/v21.3.18.4-lts.md @@ -29,7 +29,7 @@ sidebar_label: 2022 #### NOT FOR CHANGELOG / INSIGNIFICANT -* Don not add const group by key for query with only having. [#28975](https://github.com/ClickHouse/ClickHouse/pull/28975) ([Nikolai Kochetov](https://github.com/KochetovNicolai)). +* Do not add const group by key for query with only having. [#28975](https://github.com/ClickHouse/ClickHouse/pull/28975) ([Nikolai Kochetov](https://github.com/KochetovNicolai)). * Merging [#27963](https://github.com/ClickHouse/ClickHouse/issues/27963) [#29063](https://github.com/ClickHouse/ClickHouse/pull/29063) ([Maksim Kita](https://github.com/kitaisreal)). * May be fix s3 tests [#29762](https://github.com/ClickHouse/ClickHouse/pull/29762) ([Kseniia Sumarokova](https://github.com/kssenii)). * Fix ca-bundle.crt in kerberized_hadoop/Dockerfile [#30358](https://github.com/ClickHouse/ClickHouse/pull/30358) ([Vladimir C](https://github.com/vdimir)). diff --git a/docs/changelogs/v21.7.11.3-stable.md b/docs/changelogs/v21.7.11.3-stable.md index 8ccc31657de..e1fde88d261 100644 --- a/docs/changelogs/v21.7.11.3-stable.md +++ b/docs/changelogs/v21.7.11.3-stable.md @@ -14,6 +14,6 @@ sidebar_label: 2022 #### NOT FOR CHANGELOG / INSIGNIFICANT -* Don not add const group by key for query with only having. [#28975](https://github.com/ClickHouse/ClickHouse/pull/28975) ([Nikolai Kochetov](https://github.com/KochetovNicolai)). +* Do not add const group by key for query with only having. [#28975](https://github.com/ClickHouse/ClickHouse/pull/28975) ([Nikolai Kochetov](https://github.com/KochetovNicolai)). * Merging [#27963](https://github.com/ClickHouse/ClickHouse/issues/27963) [#29063](https://github.com/ClickHouse/ClickHouse/pull/29063) ([Maksim Kita](https://github.com/kitaisreal)). * Fix terminate on uncaught exception [#29216](https://github.com/ClickHouse/ClickHouse/pull/29216) ([Alexander Tokmakov](https://github.com/tavplubix)). diff --git a/docs/changelogs/v21.8.7.22-lts.md b/docs/changelogs/v21.8.7.22-lts.md index 7a751be4132..d09296fdd80 100644 --- a/docs/changelogs/v21.8.7.22-lts.md +++ b/docs/changelogs/v21.8.7.22-lts.md @@ -15,6 +15,6 @@ sidebar_label: 2022 #### NOT FOR CHANGELOG / INSIGNIFICANT -* Don not add const group by key for query with only having. [#28975](https://github.com/ClickHouse/ClickHouse/pull/28975) ([Nikolai Kochetov](https://github.com/KochetovNicolai)). +* Do not add const group by key for query with only having. [#28975](https://github.com/ClickHouse/ClickHouse/pull/28975) ([Nikolai Kochetov](https://github.com/KochetovNicolai)). * Merging [#27963](https://github.com/ClickHouse/ClickHouse/issues/27963) [#29063](https://github.com/ClickHouse/ClickHouse/pull/29063) ([Maksim Kita](https://github.com/kitaisreal)). * Fix terminate on uncaught exception [#29216](https://github.com/ClickHouse/ClickHouse/pull/29216) ([Alexander Tokmakov](https://github.com/tavplubix)). diff --git a/docs/changelogs/v21.9.4.35-stable.md b/docs/changelogs/v21.9.4.35-stable.md index 0b300574559..04d404fba7f 100644 --- a/docs/changelogs/v21.9.4.35-stable.md +++ b/docs/changelogs/v21.9.4.35-stable.md @@ -13,6 +13,6 @@ sidebar_label: 2022 #### NOT FOR CHANGELOG / INSIGNIFICANT -* Don not add const group by key for query with only having. [#28975](https://github.com/ClickHouse/ClickHouse/pull/28975) ([Nikolai Kochetov](https://github.com/KochetovNicolai)). +* Do not add const group by key for query with only having. [#28975](https://github.com/ClickHouse/ClickHouse/pull/28975) ([Nikolai Kochetov](https://github.com/KochetovNicolai)). * Merging [#27963](https://github.com/ClickHouse/ClickHouse/issues/27963) [#29063](https://github.com/ClickHouse/ClickHouse/pull/29063) ([Maksim Kita](https://github.com/kitaisreal)). * Fix terminate on uncaught exception [#29216](https://github.com/ClickHouse/ClickHouse/pull/29216) ([Alexander Tokmakov](https://github.com/tavplubix)). diff --git a/docs/changelogs/v23.10.1.1976-stable.md b/docs/changelogs/v23.10.1.1976-stable.md new file mode 100644 index 00000000000..0e7e7bcd55a --- /dev/null +++ b/docs/changelogs/v23.10.1.1976-stable.md @@ -0,0 +1,406 @@ +--- +sidebar_position: 1 +sidebar_label: 2023 +--- + +# 2023 Changelog + +### ClickHouse release v23.10.1.1976-stable (13adae0e42f) FIXME as compared to v23.9.1.1854-stable (8f9a227de1f) + +#### Backward Incompatible Change +* Rewrited storage S3Queue completely: changed the way we keep information in zookeeper which allows to make less zookeeper requests, added caching of zookeeper state in cases when we know the state will not change, improved the polling from s3 process to make it less aggressive, changed the way ttl and max set for trached files is maintained, now it is a background process. Added `system.s3queue` and `system.s3queue_log` tables. Closes [#54998](https://github.com/ClickHouse/ClickHouse/issues/54998). [#54422](https://github.com/ClickHouse/ClickHouse/pull/54422) ([Kseniia Sumarokova](https://github.com/kssenii)). +* There is no longer an option to automatically remove broken data parts. This closes [#55174](https://github.com/ClickHouse/ClickHouse/issues/55174). [#55184](https://github.com/ClickHouse/ClickHouse/pull/55184) ([Alexey Milovidov](https://github.com/alexey-milovidov)). +* The obsolete in-memory data parts can no longer be read from the write-ahead log. If you have configured in-memory parts before, they have to be removed before the upgrade. [#55186](https://github.com/ClickHouse/ClickHouse/pull/55186) ([Alexey Milovidov](https://github.com/alexey-milovidov)). +* Remove the integration with Meilisearch. Reason: it was compatible only with the old version 0.18. The recent version of Meilisearch changed the protocol and does not work anymore. Note: we would appreciate it if you help to return it back. [#55189](https://github.com/ClickHouse/ClickHouse/pull/55189) ([Alexey Milovidov](https://github.com/alexey-milovidov)). +* Rename directory monitor concept into background INSERT. All settings `*directory_monitor*` had been renamed to `distributed_background_insert*`. **Backward compatibility should be preserved** (since old settings had been added as an alias). [#55978](https://github.com/ClickHouse/ClickHouse/pull/55978) ([Azat Khuzhin](https://github.com/azat)). +* Do not mix-up send_timeout and receive_timeout. [#56035](https://github.com/ClickHouse/ClickHouse/pull/56035) ([Azat Khuzhin](https://github.com/azat)). +* Comparison of time intervals with different units will throw an exception. This closes [#55942](https://github.com/ClickHouse/ClickHouse/issues/55942). You might have occasionally rely on the previous behavior when the underlying numeric values were compared regardless of the units. [#56090](https://github.com/ClickHouse/ClickHouse/pull/56090) ([Alexey Milovidov](https://github.com/alexey-milovidov)). + +#### New Feature +* Add function "arrayFold(x1, ..., xn, accum -> expression, array1, ..., arrayn, init_accum)" which applies a lambda function to multiple arrays of the same cardinality and collects the result in an accumulator. [#49794](https://github.com/ClickHouse/ClickHouse/pull/49794) ([Lirikl](https://github.com/Lirikl)). +* Added aggregation function lttb which uses the [Largest-Triangle-Three-Buckets](https://skemman.is/bitstream/1946/15343/3/SS_MSthesis.pdf) algorithm for downsampling data for visualization. [#53145](https://github.com/ClickHouse/ClickHouse/pull/53145) ([Sinan](https://github.com/sinsinan)). +* Query`CHECK TABLE` has better performance and usability (sends progress updates, cancellable). Support checking particular part with `CHECK TABLE ... PART 'part_name'`. [#53404](https://github.com/ClickHouse/ClickHouse/pull/53404) ([vdimir](https://github.com/vdimir)). +* Added function `jsonMergePatch`. When working with JSON data as strings, it provides a way to merge these strings (of JSON objects) together to form a single string containing a single JSON object. [#54364](https://github.com/ClickHouse/ClickHouse/pull/54364) ([Memo](https://github.com/Joeywzr)). +* Added a new SQL function, "arrayRandomSample(arr, k)" which returns a sample of k elements from the input array. Similar functionality could previously be achieved only with less convenient syntax, e.g. "SELECT arrayReduce('groupArraySample(3)', range(10))". [#54391](https://github.com/ClickHouse/ClickHouse/pull/54391) ([itayisraelov](https://github.com/itayisraelov)). +* Added new function `getHttpHeader` to get HTTP request header value used for a request to ClickHouse server. Return empty string if the request is not done over HTTP protocol or there is no such header. [#54813](https://github.com/ClickHouse/ClickHouse/pull/54813) ([凌涛](https://github.com/lingtaolf)). +* Introduce -ArgMin/-ArgMax aggregate combinators which allow to aggregate by min/max values only. One use case can be found in [#54818](https://github.com/ClickHouse/ClickHouse/issues/54818). This PR also reorganize combinators into dedicated folder. [#54947](https://github.com/ClickHouse/ClickHouse/pull/54947) ([Amos Bird](https://github.com/amosbird)). +* Allow to drop cache for Protobuf format with `SYSTEM DROP SCHEMA FORMAT CACHE [FOR Protobuf]`. [#55064](https://github.com/ClickHouse/ClickHouse/pull/55064) ([Aleksandr Musorin](https://github.com/AVMusorin)). +* Add external HTTP Basic authenticator. [#55199](https://github.com/ClickHouse/ClickHouse/pull/55199) ([Aleksei Filatov](https://github.com/aalexfvk)). +* Added function `byteSwap` which reverses the bytes of unsigned integers. This is particularly useful for reversing values of types which are represented as unsigned integers internally such as IPv4. [#55211](https://github.com/ClickHouse/ClickHouse/pull/55211) ([Priyansh Agrawal](https://github.com/Priyansh121096)). +* Added function `formatQuery()` which returns a formatted version (possibly spanning multiple lines) of a SQL query string. Also added function `formatQuerySingleLine()` which does the same but the returned string will not contain linebreaks. [#55239](https://github.com/ClickHouse/ClickHouse/pull/55239) ([Salvatore Mesoraca](https://github.com/aiven-sal)). +* Added DWARF input format that reads debug symbols from an ELF executable/library/object file. [#55450](https://github.com/ClickHouse/ClickHouse/pull/55450) ([Michael Kolupaev](https://github.com/al13n321)). +* Allow to save unparsed records and errors in RabbitMQ, NATS and FileLog engines. Add virtual columns `_error` and `_raw_message`(for NATS and RabbitMQ), `_raw_record` (for FileLog) that are filled when ClickHouse fails to parse new record. The behaviour is controlled under storage settings `nats_handle_error_mode` for NATS, `rabbitmq_handle_error_mode` for RabbitMQ, `handle_error_mode` for FileLog similar to `kafka_handle_error_mode`. If it's set to `default`, en exception will be thrown when ClickHouse fails to parse a record, if it's set to `stream`, erorr and raw record will be saved into virtual columns. Closes [#36035](https://github.com/ClickHouse/ClickHouse/issues/36035). [#55477](https://github.com/ClickHouse/ClickHouse/pull/55477) ([Kruglov Pavel](https://github.com/Avogar)). +* Keeper client improvement: add get_all_children_number command that returns number of all children nodes under a specific path. [#55485](https://github.com/ClickHouse/ClickHouse/pull/55485) ([guoxiaolong](https://github.com/guoxiaolongzte)). +* If a table has a space-filling curve in its key, e.g., `ORDER BY mortonEncode(x, y)`, the conditions on its arguments, e.g., `x >= 10 AND x <= 20 AND y >= 20 AND y <= 30` can be used for indexing. A setting `analyze_index_with_space_filling_curves` is added to enable or disable this analysis. This closes [#41195](https://github.com/ClickHouse/ClickHouse/issues/41195). Continuation of [#4538](https://github.com/ClickHouse/ClickHouse/issues/4538). Continuation of [#6286](https://github.com/ClickHouse/ClickHouse/issues/6286). Continuation of [#28130](https://github.com/ClickHouse/ClickHouse/issues/28130). Continuation of [#41753](https://github.com/ClickHouse/ClickHouse/issues/41753). [#55642](https://github.com/ClickHouse/ClickHouse/pull/55642) ([Alexey Milovidov](https://github.com/alexey-milovidov)). +* Add setting `optimize_trivial_approximate_count_query` to use `count()` approximation for storage EmbeddedRocksDB. Enable trivial count for StorageJoin. [#55806](https://github.com/ClickHouse/ClickHouse/pull/55806) ([Duc Canh Le](https://github.com/canhld94)). +* Keeper client improvement: add get_direct_children_number command that returns number of direct children nodes under a path. [#55898](https://github.com/ClickHouse/ClickHouse/pull/55898) ([xuzifu666](https://github.com/xuzifu666)). +* Add statement `SHOW SETTING setting_name` which is a simpler version of existing statement `SHOW SETTINGS`. [#55979](https://github.com/ClickHouse/ClickHouse/pull/55979) ([Maksim Kita](https://github.com/kitaisreal)). +* This pr gives possibility to pass data in Npy format to Clickhouse. ``` SELECT * FROM file('example_array.npy', Npy). [#55982](https://github.com/ClickHouse/ClickHouse/pull/55982) ([Yarik Briukhovetskyi](https://github.com/yariks5s)). +* This PR impleents a new setting called `force_optimize_projection_name`, it takes a name of projection as an argument. If it's value set to a non-empty string, ClickHouse checks that this projection is used in the query at least once. Closes [#55331](https://github.com/ClickHouse/ClickHouse/issues/55331). [#56134](https://github.com/ClickHouse/ClickHouse/pull/56134) ([Yarik Briukhovetskyi](https://github.com/yariks5s)). + +#### Performance Improvement +* Add option `query_plan_preserve_num_streams_after_window_functions` to preserve the number of streams after evaluating window functions to allow parallel stream processing. [#50771](https://github.com/ClickHouse/ClickHouse/pull/50771) ([frinkr](https://github.com/frinkr)). +* Release more num_streams if data is small. [#53867](https://github.com/ClickHouse/ClickHouse/pull/53867) ([Jiebin Sun](https://github.com/jiebinn)). +* RoaringBitmaps being optimized before serialization. [#55044](https://github.com/ClickHouse/ClickHouse/pull/55044) ([UnamedRus](https://github.com/UnamedRus)). +* Posting lists in inverted indexes are now optimized to use the smallest possible representation for internal bitmaps. Depending on the repetitiveness of the data, this may significantly reduce the space consumption of inverted indexes. [#55069](https://github.com/ClickHouse/ClickHouse/pull/55069) ([Harry Lee](https://github.com/HarryLeeIBM)). +* Fix contention on Context lock, this significantly improves performance for a lot of short-running concurrent queries. [#55121](https://github.com/ClickHouse/ClickHouse/pull/55121) ([Maksim Kita](https://github.com/kitaisreal)). +* Improved the performance of inverted index creation by 30%. This was achieved by replacing `std::unordered_map` with `absl::flat_hash_map`. [#55210](https://github.com/ClickHouse/ClickHouse/pull/55210) ([Harry Lee](https://github.com/HarryLeeIBM)). +* Support orc filter push down (rowgroup level). [#55330](https://github.com/ClickHouse/ClickHouse/pull/55330) ([李扬](https://github.com/taiyang-li)). +* Improve performance of external aggregation with a lot of temporary files. [#55489](https://github.com/ClickHouse/ClickHouse/pull/55489) ([Maksim Kita](https://github.com/kitaisreal)). +* Set a reasonable size for the marks cache for secondary indices by default to avoid loading the marks over and over again. [#55654](https://github.com/ClickHouse/ClickHouse/pull/55654) ([Alexey Milovidov](https://github.com/alexey-milovidov)). +* Avoid unnecessary reconstruction of index granules when reading skip indexes. This addresses [#55653](https://github.com/ClickHouse/ClickHouse/issues/55653)#issuecomment-1763766009 . [#55683](https://github.com/ClickHouse/ClickHouse/pull/55683) ([Amos Bird](https://github.com/amosbird)). +* Cache cast function in set during execution to improve the performance of function `IN` when set element type doesn't exactly match column type. [#55712](https://github.com/ClickHouse/ClickHouse/pull/55712) ([Duc Canh Le](https://github.com/canhld94)). +* Performance improvement for `ColumnVector::insertMany` and `ColumnVector::insertManyFrom`. [#55714](https://github.com/ClickHouse/ClickHouse/pull/55714) ([frinkr](https://github.com/frinkr)). +* ... Getting values from a map is widely used. In practice, the key structrues are usally the same in the same map column, we could try to predict the next row's key position and reduce the comparisons. [#55929](https://github.com/ClickHouse/ClickHouse/pull/55929) ([lgbo](https://github.com/lgbo-ustc)). +* ... Fix an issue that struct field prune doesn't work in some cases. For example ```sql INSERT INTO FUNCTION file('test_parquet_struct', Parquet, 'x Tuple(a UInt32, b UInt32, c String)') SELECT tuple(number, rand(), concat('testxxxxxxx' toString(number))) FROM numbers(10);. [#56117](https://github.com/ClickHouse/ClickHouse/pull/56117) ([lgbo](https://github.com/lgbo-ustc)). + +#### Improvement +* This is the second part of Kusto Query Language dialect support. [Phase 1 implementation ](https://github.com/ClickHouse/ClickHouse/pull/37961) has been merged. [#42510](https://github.com/ClickHouse/ClickHouse/pull/42510) ([larryluogit](https://github.com/larryluogit)). +* Op processors IDs are raw ptrs casted to UInt64. Print it in a prettier manner:. [#48852](https://github.com/ClickHouse/ClickHouse/pull/48852) ([Vlad Seliverstov](https://github.com/behebot)). +* Creating a direct dictionary with a lifetime field set will be rejected at create time. Fixes: [#27861](https://github.com/ClickHouse/ClickHouse/issues/27861). [#49043](https://github.com/ClickHouse/ClickHouse/pull/49043) ([Rory Crispin](https://github.com/RoryCrispin)). +* Allow parameters in queries with partitions like `ALTER TABLE t DROP PARTITION`. Closes [#49449](https://github.com/ClickHouse/ClickHouse/issues/49449). [#49516](https://github.com/ClickHouse/ClickHouse/pull/49516) ([Nikolay Degterinsky](https://github.com/evillique)). +* 1.Refactor the code about zookeeper_connection 2.Add a new column xid for zookeeper_connection. [#50702](https://github.com/ClickHouse/ClickHouse/pull/50702) ([helifu](https://github.com/helifu)). +* Add the ability to tune the number of parallel replicas used in a query execution based on the estimation of rows to read. [#51692](https://github.com/ClickHouse/ClickHouse/pull/51692) ([Raúl Marín](https://github.com/Algunenano)). +* Distributed queries executed in `async_socket_for_remote` mode (default) now respect `max_threads` limit. Previously, some queries could create excessive threads (up to `max_distributed_connections`), causing server performance issues. [#53504](https://github.com/ClickHouse/ClickHouse/pull/53504) ([filimonov](https://github.com/filimonov)). +* Display the correct server settings after reload. [#53774](https://github.com/ClickHouse/ClickHouse/pull/53774) ([helifu](https://github.com/helifu)). +* Add support for mathematical minus `−` character in queries, similar to `-`. [#54100](https://github.com/ClickHouse/ClickHouse/pull/54100) ([Alexey Milovidov](https://github.com/alexey-milovidov)). +* Add replica groups to the Replicated database engine. Closes [#53620](https://github.com/ClickHouse/ClickHouse/issues/53620). [#54421](https://github.com/ClickHouse/ClickHouse/pull/54421) ([Nikolay Degterinsky](https://github.com/evillique)). +* This PR will fix UBsan test error [here](https://github.com/ClickHouse/ClickHouse/pull/54566). [#54568](https://github.com/ClickHouse/ClickHouse/pull/54568) ([JackyWoo](https://github.com/JackyWoo)). +* Support asynchronous inserts with external data via native protocol. Previously it worked only if data is inlined into query. [#54730](https://github.com/ClickHouse/ClickHouse/pull/54730) ([Anton Popov](https://github.com/CurtizJ)). +* It is better to retry retriable s3 errors than totally fail the query. Set bigger value to the s3_retry_attempts by default. [#54770](https://github.com/ClickHouse/ClickHouse/pull/54770) ([Sema Checherinda](https://github.com/CheSema)). +* Optimised external aggregation memory consumption in case many temporary files were generated. [#54798](https://github.com/ClickHouse/ClickHouse/pull/54798) ([Nikita Taranov](https://github.com/nickitat)). +* Add load balancing test_hostname_levenshtein_distance. [#54826](https://github.com/ClickHouse/ClickHouse/pull/54826) ([JackyWoo](https://github.com/JackyWoo)). +* Caching skip-able entries while executing DDL from Zookeeper distributed DDL queue. [#54828](https://github.com/ClickHouse/ClickHouse/pull/54828) ([Duc Canh Le](https://github.com/canhld94)). +* Improve hiding secrets in logs. [#55089](https://github.com/ClickHouse/ClickHouse/pull/55089) ([Vitaly Baranov](https://github.com/vitlibar)). +* Added fields `substreams` and `filenames` to the `system.parts_columns` table. [#55108](https://github.com/ClickHouse/ClickHouse/pull/55108) ([Anton Popov](https://github.com/CurtizJ)). +* For now the projection analysis will be performed only on top of query plan. The setting `query_plan_optimize_projection` became obsolete (it was enabled by default long time ago). [#55112](https://github.com/ClickHouse/ClickHouse/pull/55112) ([Nikita Mikhaylov](https://github.com/nikitamikhaylov)). +* When function "untuple()" is now called on a tuple with named elements and itself has an alias (e.g. "select untuple(tuple(1)::Tuple(element_alias Int)) AS untuple_alias"), then the result column name is now generated from the untuple alias and the tuple element alias (in the example: "untuple_alias.element_alias"). [#55123](https://github.com/ClickHouse/ClickHouse/pull/55123) ([garcher22](https://github.com/garcher22)). +* Added setting `describe_include_virtual_columns`, which allows to include virtual columns of table into result of `DESCRIBE` query. Added setting `describe_compact_output`. If it is set to `true`, `DESCRIBE` query returns only names and types of columns without extra information. [#55129](https://github.com/ClickHouse/ClickHouse/pull/55129) ([Anton Popov](https://github.com/CurtizJ)). +* Sometimes `OPTIMIZE` with `optimize_throw_if_noop=1` may fail with an error `unknown reason` while the real cause of it - different projections in different parts. This behavior is fixed. [#55130](https://github.com/ClickHouse/ClickHouse/pull/55130) ([Nikita Mikhaylov](https://github.com/nikitamikhaylov)). +* Allow to have several MaterializedPostgreSQL tables following the same Postgres table. By default this behaviour is not enabled (for compatibility, because it is backward-incompatible change), but can be turned on with setting `materialized_postgresql_use_unique_replication_consumer_identifier`. Closes [#54918](https://github.com/ClickHouse/ClickHouse/issues/54918). [#55145](https://github.com/ClickHouse/ClickHouse/pull/55145) ([Kseniia Sumarokova](https://github.com/kssenii)). +* Allow to parse negative DateTime64 and DateTime with fractional part from short strings. [#55146](https://github.com/ClickHouse/ClickHouse/pull/55146) ([Andrey Zvonov](https://github.com/zvonand)). +* To improve compatibility with MySQL, 1. "information_schema.tables" now includes the new field "table_rows", and 2. "information_schema.columns" now includes the new field "extra". [#55215](https://github.com/ClickHouse/ClickHouse/pull/55215) ([Robert Schulze](https://github.com/rschu1ze)). +* Clickhouse-client won't show "0 rows in set" if it is zero and if exception was thrown. [#55240](https://github.com/ClickHouse/ClickHouse/pull/55240) ([Salvatore Mesoraca](https://github.com/aiven-sal)). +* Support rename table without keyword `TABLE` like `RENAME db.t1 to db.t2`. [#55373](https://github.com/ClickHouse/ClickHouse/pull/55373) ([凌涛](https://github.com/lingtaolf)). +* Add internal_replication to system.clusters. [#55377](https://github.com/ClickHouse/ClickHouse/pull/55377) ([Konstantin Morozov](https://github.com/k-morozov)). +* Select remote proxy resolver based on request protocol, add proxy feature docs and remove `DB::ProxyConfiguration::Protocol::ANY`. [#55430](https://github.com/ClickHouse/ClickHouse/pull/55430) ([Arthur Passos](https://github.com/arthurpassos)). +* Avoid retrying keeper operations on INSERT after table shutdown. [#55519](https://github.com/ClickHouse/ClickHouse/pull/55519) ([Azat Khuzhin](https://github.com/azat)). +* Improved overall resilience for ClickHouse in case of many parts within partition (more than 1000). It might reduce the number of `TOO_MANY_PARTS` errors. [#55526](https://github.com/ClickHouse/ClickHouse/pull/55526) ([Nikita Mikhaylov](https://github.com/nikitamikhaylov)). +* Follow up https://github.com/ClickHouse/ClickHouse/pull/55184 to not to fall into `UNKNOWN_SETTING` error when the user uses the deleted MergeTree settings. [#55557](https://github.com/ClickHouse/ClickHouse/pull/55557) ([Jihyuk Bok](https://github.com/tomahawk28)). +* Updated `dashboard.html()` added a `if` to check if there is one chart, then "maximize" and "drag" buttons are not shown. [#55581](https://github.com/ClickHouse/ClickHouse/pull/55581) ([bhavuk2002](https://github.com/bhavuk2002)). +* Functions `toDayOfWeek()` (MySQL alias: `DAYOFWEEK()`), `toYearWeek()` (`YEARWEEK()`) and `toWeek()` (`WEEK()`) now supports `String` arguments. This makes its behavior consistent with MySQL's behavior. [#55589](https://github.com/ClickHouse/ClickHouse/pull/55589) ([Robert Schulze](https://github.com/rschu1ze)). +* Implement query parameters support for `ALTER TABLE ... ACTION PARTITION [ID] {parameter_name:ParameterType}`. Merges [#49516](https://github.com/ClickHouse/ClickHouse/issues/49516). Closes [#49449](https://github.com/ClickHouse/ClickHouse/issues/49449). [#55604](https://github.com/ClickHouse/ClickHouse/pull/55604) ([alesapin](https://github.com/alesapin)). +* Inverted indexes do not store tokens with too many matches (i.e. row ids in the posting list). This saves space and avoids ineffective index lookups when sequential scans would be equally fast or faster. The previous heuristics (`density` parameter passed to the index definition) that controlled when tokens would not be stored was too confusing for users. A much simpler heuristics based on parameter `max_rows_per_postings_list` (default: 64k) is introduced which directly controls the maximum allowed number of row ids in a postings list. [#55616](https://github.com/ClickHouse/ClickHouse/pull/55616) ([Harry Lee](https://github.com/HarryLeeIBM)). +* `SHOW COLUMNS` now correctly reports type `FixedString` as `BLOB` if setting `use_mysql_types_in_show_columns` is on. Also added two new settings, `mysql_map_string_to_text_in_show_columns` and `mysql_map_fixed_string_to_text_in_show_columns` to switch the output for types `String` and `FixedString` as `TEXT` or `BLOB`. [#55617](https://github.com/ClickHouse/ClickHouse/pull/55617) ([Serge Klochkov](https://github.com/slvrtrn)). +* During ReplicatedMergeTree tables startup clickhouse server checks set of parts for unexpected parts (exists locally, but not in zookeeper). All unexpected parts move to detached directory and instead of them server tries to restore some ancestor (covered) parts. Now server tries to restore closest ancestors instead of random covered parts. [#55645](https://github.com/ClickHouse/ClickHouse/pull/55645) ([alesapin](https://github.com/alesapin)). +* The advanced dashboard now supports draggable charts on touch devices. This closes [#54206](https://github.com/ClickHouse/ClickHouse/issues/54206). [#55649](https://github.com/ClickHouse/ClickHouse/pull/55649) ([Alexey Milovidov](https://github.com/alexey-milovidov)). +* Introduced setting `date_time_overflow_behavior` with possible values `ignore`, `throw`, `saturate` that controls the overflow behavior when converting from Date, Date32, DateTime64, Integer or Float to Date, Date32, DateTime or DateTime64. [#55696](https://github.com/ClickHouse/ClickHouse/pull/55696) ([Andrey Zvonov](https://github.com/zvonand)). +* Improve write performance to rocksdb. [#55732](https://github.com/ClickHouse/ClickHouse/pull/55732) ([Duc Canh Le](https://github.com/canhld94)). +* Use the default query format if declared when outputting exception with http_write_exception_in_output_format. [#55739](https://github.com/ClickHouse/ClickHouse/pull/55739) ([Raúl Marín](https://github.com/Algunenano)). +* Use upstream repo for apache datasketches. [#55787](https://github.com/ClickHouse/ClickHouse/pull/55787) ([Nikita Taranov](https://github.com/nickitat)). +* Add support for SHOW MERGES query. [#55815](https://github.com/ClickHouse/ClickHouse/pull/55815) ([megao](https://github.com/jetgm)). +* Provide a better message for common MV pitfalls. [#55826](https://github.com/ClickHouse/ClickHouse/pull/55826) ([Raúl Marín](https://github.com/Algunenano)). +* Reduced memory consumption during loading of hierarchical dictionaries. [#55838](https://github.com/ClickHouse/ClickHouse/pull/55838) ([Nikita Taranov](https://github.com/nickitat)). +* All dictionaries support setting `dictionary_use_async_executor`. [#55839](https://github.com/ClickHouse/ClickHouse/pull/55839) ([vdimir](https://github.com/vdimir)). +* If you dropped the current database, you will still be able to run some queries in `clickhouse-local` and switch to another database. This makes the behavior consistent with `clickhouse-client`. This closes [#55834](https://github.com/ClickHouse/ClickHouse/issues/55834). [#55853](https://github.com/ClickHouse/ClickHouse/pull/55853) ([Alexey Milovidov](https://github.com/alexey-milovidov)). +* Functions `(add|subtract)(Year|Quarter|Month|Week|Day|Hour|Minute|Second|Millisecond|Microsecond|Nanosecond)` now support string-encoded date arguments, e.g. `SELECT addDays('2023-10-22', 1)`. This increases compatibility with MySQL and is needed by Tableau Online. [#55869](https://github.com/ClickHouse/ClickHouse/pull/55869) ([Robert Schulze](https://github.com/rschu1ze)). +* Introduce setting `create_table_empty_primary_key_by_default` for default `ORDER BY ()`. [#55899](https://github.com/ClickHouse/ClickHouse/pull/55899) ([Srikanth Chekuri](https://github.com/srikanthccv)). +* Prevent excesive memory usage when deserializing AggregateFunctionTopKGenericData. [#55947](https://github.com/ClickHouse/ClickHouse/pull/55947) ([Raúl Marín](https://github.com/Algunenano)). +* The setting `apply_deleted_mask` when disabled allows to read rows that where marked as deleted by lightweight DELETE queries. This is useful for debugging. [#55952](https://github.com/ClickHouse/ClickHouse/pull/55952) ([Alexander Gololobov](https://github.com/davenger)). +* Allow skip null values when serailize tuple to json objects, which makes it possible to keep compatiability with spark to_json function, which is also useful for gluten. [#55956](https://github.com/ClickHouse/ClickHouse/pull/55956) ([李扬](https://github.com/taiyang-li)). +* Functions `(add|sub)Date()` now support string-encoded date arguments, e.g. `SELECT addDate('2023-10-22 11:12:13', INTERVAL 5 MINUTE)`. The same support for string-encoded date arguments is added to the plus and minus operators, e.g. `SELECT '2023-10-23' + INTERVAL 1 DAY`. This increases compatibility with MySQL and is needed by Tableau Online. [#55960](https://github.com/ClickHouse/ClickHouse/pull/55960) ([Robert Schulze](https://github.com/rschu1ze)). +* Allow unquoted strings with CR in CSV format. Closes [#39930](https://github.com/ClickHouse/ClickHouse/issues/39930). [#56046](https://github.com/ClickHouse/ClickHouse/pull/56046) ([Kruglov Pavel](https://github.com/Avogar)). +* On a Keeper with lots of watches AsyncMetrics threads can consume 100% of CPU for noticable time in `DB::KeeperStorage::getSessionsWithWatchesCount()`. The fix is to avoid traversing heavy `watches` and `list_watches` sets. [#56054](https://github.com/ClickHouse/ClickHouse/pull/56054) ([Alexander Gololobov](https://github.com/davenger)). +* Allow to run `clickhouse-keeper` using embedded config. [#56086](https://github.com/ClickHouse/ClickHouse/pull/56086) ([Maksim Kita](https://github.com/kitaisreal)). +* Set limit of the maximum configuration value for queued.min.messages to avoid problem with start fetching data with Kafka. [#56121](https://github.com/ClickHouse/ClickHouse/pull/56121) ([Stas Morozov](https://github.com/r3b-fish)). +* Fixed typo in SQL function `minSampleSizeContinous` (renamed `minSampleSizeContinuous`). Old name is preserved for backward compatibility. This closes: [#56139](https://github.com/ClickHouse/ClickHouse/issues/56139). [#56143](https://github.com/ClickHouse/ClickHouse/pull/56143) ([Dorota Szeremeta](https://github.com/orotaday)). +* Print corrupted part path on disk before shutdown server. Before this change if a part is corrupted on disk and server cannot start, it was almost impossible to understand which part is broken. This is fixed. [#56181](https://github.com/ClickHouse/ClickHouse/pull/56181) ([Duc Canh Le](https://github.com/canhld94)). + +#### Build/Testing/Packaging Improvement +* If the database is already initialized, it doesn't need to be initialized again upon subsequent launches. This can potentially fix the issue of infinite container restarts when the database fails to load within 1000 attempts (relevant for very large databases and multi-node setups). [#50724](https://github.com/ClickHouse/ClickHouse/pull/50724) ([Alexander Nikolaev](https://github.com/AlexNik)). +* Resource with source code including submodules is built in Darwin special build task. It may be used to build ClickHouse without checkouting submodules. [#51435](https://github.com/ClickHouse/ClickHouse/pull/51435) ([Ilya Yatsishin](https://github.com/qoega)). +* An error will occur when building ClickHouse with the avx series of instructions enabled. CMake command: ```shell cmake .. -DCMAKE_BUILD_TYPE=Release -DENABLE_AVX=ON -DENABLE_AVX2=ON -DENABLE_AVX2_FOR_SPEC_OP=ON ``` Failed message: ``` [1558/11630] Building CXX object contrib/snappy-cmake/CMakeFiles/_snappy.dir/__/snappy/snappy.cc.o FAILED: contrib/snappy-cmake/CMakeFiles/_snappy.dir/__/snappy/snappy.cc.o /usr/bin/ccache /usr/bin/clang++-17 --target=x86_64-linux-gnu --sysroot=/opt/ClickHouse/cmake/linux/../../contrib/sysroot/linux-x86_64/x86_64-linux-gnu/libc -DHAVE_CONFIG_H -DSTD_EXCEPTION_HAS_STACK_TRACE=1 -D_LIBCPP_ENABLE_THREAD_SAFETY_ANNOTATIONS -I/opt/ClickHouse/base/glibc-compatibility/memcpy -isystem /opt/ClickHouse/contrib/snappy -isystem /opt/ClickHouse/build/contrib/snappy-cmake -isystem /opt/ClickHouse/contrib/llvm-project/libcxx/include -isystem /opt/ClickHouse/contrib/llvm-project/libcxxabi/include -isystem /opt/ClickHouse/contrib/libunwind/include --gcc-toolchain=/opt/ClickHouse/cmake/linux/../../contrib/sysroot/linux-x86_64 -fdiagnostics-color=always -Wno-enum-constexpr-conversion -fsized-deallocation -pipe -mssse3 -msse4.1 -msse4.2 -mpclmul -mpopcnt -mavx -mavx2 -fasynchronous-unwind-tables -ffile-prefix-map=/opt/ClickHouse=. -falign-functions=32 -mbranches-within-32B-boundaries -fdiagnostics-absolute-paths -fstrict-vtable-pointers -w -O3 -DNDEBUG -D OS_LINUX -Werror -nostdinc++ -std=c++2b -MD -MT contrib/snappy-cmake/CMakeFiles/_snappy.dir/__/snappy/snappy.cc.o -MF contrib/snappy-cmake/CMakeFiles/_snappy.dir/__/snappy/snappy.cc.o.d -o contrib/snappy-cmake/CMakeFiles/_snappy.dir/__/snappy/snappy.cc.o -c /opt/ClickHouse/contrib/snappy/snappy.cc /opt/ClickHouse/contrib/snappy/snappy.cc:1061:3: error: unknown type name '__m256i' 1061 | __m256i data = _mm256_lddqu_si256(static_cast(src)); | ^ /opt/ClickHouse/contrib/snappy/snappy.cc:1061:55: error: unknown type name '__m256i' 1061 | __m256i data = _mm256_lddqu_si256(static_cast(src)); | ^ /opt/ClickHouse/contrib/snappy/snappy.cc:1061:18: error: use of undeclared identifier '_mm256_lddqu_si256'; did you mean '_mm_lddqu_si128'? 1061 | __m256i data = _mm256_lddqu_si256(static_cast(src)); | ^~~~~~~~~~~~~~~~~~ | _mm_lddqu_si128 /usr/lib/llvm-17/lib/clang/17/include/pmmintrin.h:38:1: note: '_mm_lddqu_si128' declared here 38 | _mm_lddqu_si128(__m128i_u const *__p) | ^ /opt/ClickHouse/contrib/snappy/snappy.cc:1061:66: error: cannot initialize a parameter of type 'const __m128i_u *' with an lvalue of type 'const void *' 1061 | __m256i data = _mm256_lddqu_si256(static_cast(src)); | ^~~ /usr/lib/llvm-17/lib/clang/17/include/pmmintrin.h:38:34: note: passing argument to parameter '__p' here 38 | _mm_lddqu_si128(__m128i_u const *__p) | ^ /opt/ClickHouse/contrib/snappy/snappy.cc:1062:40: error: unknown type name '__m256i' 1062 | _mm256_storeu_si256(reinterpret_cast<__m256i *>(dst), data); | ^ /opt/ClickHouse/contrib/snappy/snappy.cc:1065:49: error: unknown type name '__m256i' 1065 | data = _mm256_lddqu_si256(static_cast(src) + 1); | ^ /opt/ClickHouse/contrib/snappy/snappy.cc:1065:65: error: arithmetic on a pointer to void 1065 | data = _mm256_lddqu_si256(static_cast(src) + 1); | ~~~ ^ /opt/ClickHouse/contrib/snappy/snappy.cc:1066:42: error: unknown type name '__m256i' 1066 | _mm256_storeu_si256(reinterpret_cast<__m256i *>(dst) + 1, data); | ^ 8 errors generated. [1567/11630] Building CXX object contrib/rocksdb-cma...rocksdb.dir/__/rocksdb/db/arena_wrapped_db_iter.cc.o ninja: build stopped: subcommand failed. ``` The reason is that snappy does not enable SNAPPY_HAVE_X86_CRC32. [#55049](https://github.com/ClickHouse/ClickHouse/pull/55049) ([monchickey](https://github.com/monchickey)). +* Add `instance_env_variables` option to integration tests. [#55208](https://github.com/ClickHouse/ClickHouse/pull/55208) ([Arthur Passos](https://github.com/arthurpassos)). +* Solve issue with launching standalone clickhouse-keeper from clickhouse-server package. [#55226](https://github.com/ClickHouse/ClickHouse/pull/55226) ([Mikhail f. Shiryaev](https://github.com/Felixoid)). +* In tests RabbitMQ version is updated to 3.12.6. Improved logs collection for RabbitMQ tests. [#55424](https://github.com/ClickHouse/ClickHouse/pull/55424) ([Ilya Yatsishin](https://github.com/qoega)). +* Fix integration check python script to use gh api url - Add Readme for CI tests. [#55476](https://github.com/ClickHouse/ClickHouse/pull/55476) ([Max K.](https://github.com/mkaynov)). +* Fix integration check python script to use gh api url - Add Readme for CI tests. [#55716](https://github.com/ClickHouse/ClickHouse/pull/55716) ([Max K.](https://github.com/mkaynov)). +* Check sha512 for tgz; use a proper repository for keeper; write only filenames to TGZ.sha512 files for tarball packages. Prerequisite for [#31473](https://github.com/ClickHouse/ClickHouse/issues/31473). [#55717](https://github.com/ClickHouse/ClickHouse/pull/55717) ([Mikhail f. Shiryaev](https://github.com/Felixoid)). +* Updated to get free port for azurite. [#55796](https://github.com/ClickHouse/ClickHouse/pull/55796) ([SmitaRKulkarni](https://github.com/SmitaRKulkarni)). +* Reduce the ouput info. [#55938](https://github.com/ClickHouse/ClickHouse/pull/55938) ([helifu](https://github.com/helifu)). +* Modified the error message difference between openssl and boringssl to fix the functional test. [#55975](https://github.com/ClickHouse/ClickHouse/pull/55975) ([MeenaRenganathan22](https://github.com/MeenaRenganathan22)). +* Changes to support the HDFS for s390x. [#56128](https://github.com/ClickHouse/ClickHouse/pull/56128) ([MeenaRenganathan22](https://github.com/MeenaRenganathan22)). +* Fix flaky test of jbod balancer by relaxing the Gini coefficient and introducing more determinism in insertions. [#56175](https://github.com/ClickHouse/ClickHouse/pull/56175) ([Amos Bird](https://github.com/amosbird)). + +#### Bug Fix (user-visible misbehavior in an official stable release) + +* skip hardlinking inverted index files in mutation [#47663](https://github.com/ClickHouse/ClickHouse/pull/47663) ([cangyin](https://github.com/cangyin)). +* Fix 'Cannot find column' in read-in-order optimization with ARRAY JOIN [#51746](https://github.com/ClickHouse/ClickHouse/pull/51746) ([Nikolai Kochetov](https://github.com/KochetovNicolai)). +* Support missed Object(Nullable(json)) subcolumns in query. [#54052](https://github.com/ClickHouse/ClickHouse/pull/54052) ([zps](https://github.com/VanDarkholme7)). +* Re-add fix for `accurateCastOrNull()` [#54629](https://github.com/ClickHouse/ClickHouse/pull/54629) ([Salvatore Mesoraca](https://github.com/aiven-sal)). +* Fix detecting DEFAULT for columns of a Distributed table created without AS [#55060](https://github.com/ClickHouse/ClickHouse/pull/55060) ([Vitaly Baranov](https://github.com/vitlibar)). +* Proper cleanup in case of exception in ctor of ShellCommandSource [#55103](https://github.com/ClickHouse/ClickHouse/pull/55103) ([Alexander Gololobov](https://github.com/davenger)). +* Fix deadlock in LDAP assigned role update [#55119](https://github.com/ClickHouse/ClickHouse/pull/55119) ([Julian Maicher](https://github.com/jmaicher)). +* Suppress error statistics update for internal exceptions [#55128](https://github.com/ClickHouse/ClickHouse/pull/55128) ([Robert Schulze](https://github.com/rschu1ze)). +* Fix deadlock in backups [#55132](https://github.com/ClickHouse/ClickHouse/pull/55132) ([alesapin](https://github.com/alesapin)). +* Fix storage Iceberg files retrieval [#55144](https://github.com/ClickHouse/ClickHouse/pull/55144) ([Kseniia Sumarokova](https://github.com/kssenii)). +* Fix partition pruning of extra columns in set. [#55172](https://github.com/ClickHouse/ClickHouse/pull/55172) ([Amos Bird](https://github.com/amosbird)). +* Fix recalculation of skip indexes in ALTER UPDATE queries when table has adaptive granularity [#55202](https://github.com/ClickHouse/ClickHouse/pull/55202) ([Duc Canh Le](https://github.com/canhld94)). +* Fix for background download in fs cache [#55252](https://github.com/ClickHouse/ClickHouse/pull/55252) ([Kseniia Sumarokova](https://github.com/kssenii)). +* Avoid possible memory leaks in compressors in case of missing buffer finalization [#55262](https://github.com/ClickHouse/ClickHouse/pull/55262) ([Azat Khuzhin](https://github.com/azat)). +* Fix functions execution over sparse columns [#55275](https://github.com/ClickHouse/ClickHouse/pull/55275) ([Azat Khuzhin](https://github.com/azat)). +* Fix incorrect merging of Nested for SELECT FINAL FROM SummingMergeTree [#55276](https://github.com/ClickHouse/ClickHouse/pull/55276) ([Azat Khuzhin](https://github.com/azat)). +* Fix bug with inability to drop detached partition in replicated merge tree on top of S3 without zero copy [#55309](https://github.com/ClickHouse/ClickHouse/pull/55309) ([alesapin](https://github.com/alesapin)). +* Fix SIGSEGV in MergeSortingPartialResultTransform (due to zero chunks after remerge()) [#55335](https://github.com/ClickHouse/ClickHouse/pull/55335) ([Azat Khuzhin](https://github.com/azat)). +* Fix data-race in CreatingSetsTransform (on errors) due to throwing shared exception [#55338](https://github.com/ClickHouse/ClickHouse/pull/55338) ([Azat Khuzhin](https://github.com/azat)). +* Fix trash optimization (up to a certain extent) [#55353](https://github.com/ClickHouse/ClickHouse/pull/55353) ([Alexey Milovidov](https://github.com/alexey-milovidov)). +* Fix leak in StorageHDFS [#55370](https://github.com/ClickHouse/ClickHouse/pull/55370) ([Azat Khuzhin](https://github.com/azat)). +* Fix parsing of arrays in cast operator [#55417](https://github.com/ClickHouse/ClickHouse/pull/55417) ([Anton Popov](https://github.com/CurtizJ)). +* Fix filtering by virtual columns with OR filter in query [#55418](https://github.com/ClickHouse/ClickHouse/pull/55418) ([Azat Khuzhin](https://github.com/azat)). +* Fix MongoDB connection issues [#55419](https://github.com/ClickHouse/ClickHouse/pull/55419) ([Nikolay Degterinsky](https://github.com/evillique)). +* Fix MySQL interface boolean representation [#55427](https://github.com/ClickHouse/ClickHouse/pull/55427) ([Serge Klochkov](https://github.com/slvrtrn)). +* Fix MySQL text protocol DateTime formatting and LowCardinality(Nullable(T)) types reporting [#55479](https://github.com/ClickHouse/ClickHouse/pull/55479) ([Serge Klochkov](https://github.com/slvrtrn)). +* Make `use_mysql_types_in_show_columns` affect only `SHOW COLUMNS` [#55481](https://github.com/ClickHouse/ClickHouse/pull/55481) ([Robert Schulze](https://github.com/rschu1ze)). +* Fix stack symbolizer parsing DW_FORM_ref_addr incorrectly and sometimes crashing [#55483](https://github.com/ClickHouse/ClickHouse/pull/55483) ([Michael Kolupaev](https://github.com/al13n321)). +* Destroy fiber in case of exception in cancelBefore in AsyncTaskExecutor [#55516](https://github.com/ClickHouse/ClickHouse/pull/55516) ([Kruglov Pavel](https://github.com/Avogar)). +* Fix Query Parameters not working with custom HTTP handlers [#55521](https://github.com/ClickHouse/ClickHouse/pull/55521) ([Konstantin Bogdanov](https://github.com/thevar1able)). +* Fix checking of non handled data for Values format [#55527](https://github.com/ClickHouse/ClickHouse/pull/55527) ([Azat Khuzhin](https://github.com/azat)). +* Fix 'Invalid cursor state' in odbc interacting with MS SQL Server [#55558](https://github.com/ClickHouse/ClickHouse/pull/55558) ([vdimir](https://github.com/vdimir)). +* Fix max execution time and 'break' overflow mode [#55577](https://github.com/ClickHouse/ClickHouse/pull/55577) ([Alexander Gololobov](https://github.com/davenger)). +* Fix crash in QueryNormalizer with cyclic aliases [#55602](https://github.com/ClickHouse/ClickHouse/pull/55602) ([vdimir](https://github.com/vdimir)). +* Disable wrong optimization and add a test [#55609](https://github.com/ClickHouse/ClickHouse/pull/55609) ([Alexey Milovidov](https://github.com/alexey-milovidov)). +* Merging [#52352](https://github.com/ClickHouse/ClickHouse/issues/52352) [#55621](https://github.com/ClickHouse/ClickHouse/pull/55621) ([Alexey Milovidov](https://github.com/alexey-milovidov)). +* Add a test to avoid incorrect decimal sorting [#55662](https://github.com/ClickHouse/ClickHouse/pull/55662) ([Amos Bird](https://github.com/amosbird)). +* Fix progress bar for s3 and azure Cluster functions with url without globs [#55666](https://github.com/ClickHouse/ClickHouse/pull/55666) ([Kruglov Pavel](https://github.com/Avogar)). +* Fix filtering by virtual columns with OR filter in query (resubmit) [#55678](https://github.com/ClickHouse/ClickHouse/pull/55678) ([Azat Khuzhin](https://github.com/azat)). +* Fixes and improvements for Iceberg storage [#55695](https://github.com/ClickHouse/ClickHouse/pull/55695) ([Kruglov Pavel](https://github.com/Avogar)). +* Fix data race in CreatingSetsTransform (v2) [#55786](https://github.com/ClickHouse/ClickHouse/pull/55786) ([Azat Khuzhin](https://github.com/azat)). +* Throw exception when parsing illegal string as float if precise_float_parsing is true [#55861](https://github.com/ClickHouse/ClickHouse/pull/55861) ([李扬](https://github.com/taiyang-li)). +* Disable predicate pushdown if the CTE contains stateful functions [#55871](https://github.com/ClickHouse/ClickHouse/pull/55871) ([Raúl Marín](https://github.com/Algunenano)). +* Fix normalize ASTSelectWithUnionQuery strip FORMAT of the query [#55887](https://github.com/ClickHouse/ClickHouse/pull/55887) ([flynn](https://github.com/ucasfl)). +* Try to fix possible segfault in Native ORC input format [#55891](https://github.com/ClickHouse/ClickHouse/pull/55891) ([Kruglov Pavel](https://github.com/Avogar)). +* Fix window functions in case of sparse columns. [#55895](https://github.com/ClickHouse/ClickHouse/pull/55895) ([János Benjamin Antal](https://github.com/antaljanosbenjamin)). +* fix: StorageNull supports subcolumns [#55912](https://github.com/ClickHouse/ClickHouse/pull/55912) ([FFish](https://github.com/wxybear)). +* Do not write retriable errors for Replicated mutate/merge into error log [#55944](https://github.com/ClickHouse/ClickHouse/pull/55944) ([Azat Khuzhin](https://github.com/azat)). +* Fix `SHOW DATABASES LIMIT ` [#55962](https://github.com/ClickHouse/ClickHouse/pull/55962) ([Raúl Marín](https://github.com/Algunenano)). +* Fix autogenerated Protobuf schema with fields with underscore [#55974](https://github.com/ClickHouse/ClickHouse/pull/55974) ([Kruglov Pavel](https://github.com/Avogar)). +* Fix dateTime64ToSnowflake64() with non-default scale [#55983](https://github.com/ClickHouse/ClickHouse/pull/55983) ([Robert Schulze](https://github.com/rschu1ze)). +* Fix output/input of Arrow dictionary column [#55989](https://github.com/ClickHouse/ClickHouse/pull/55989) ([Kruglov Pavel](https://github.com/Avogar)). +* Fix fetching schema from schema registry in AvroConfluent [#55991](https://github.com/ClickHouse/ClickHouse/pull/55991) ([Kruglov Pavel](https://github.com/Avogar)). +* Fix 'Block structure mismatch' on concurrent ALTER and INSERTs in Buffer table [#55995](https://github.com/ClickHouse/ClickHouse/pull/55995) ([Michael Kolupaev](https://github.com/al13n321)). +* Fix incorrect free space accounting for least_used JBOD policy [#56030](https://github.com/ClickHouse/ClickHouse/pull/56030) ([Azat Khuzhin](https://github.com/azat)). +* Fix missing scalar issue when evaluating subqueries inside table functions [#56057](https://github.com/ClickHouse/ClickHouse/pull/56057) ([Amos Bird](https://github.com/amosbird)). +* Fix wrong query result when http_write_exception_in_output_format=1 [#56135](https://github.com/ClickHouse/ClickHouse/pull/56135) ([Kruglov Pavel](https://github.com/Avogar)). +* Fix schema cache for fallback JSON->JSONEachRow with changed settings [#56172](https://github.com/ClickHouse/ClickHouse/pull/56172) ([Kruglov Pavel](https://github.com/Avogar)). +* Add error handler to odbc-bridge [#56185](https://github.com/ClickHouse/ClickHouse/pull/56185) ([Yakov Olkhovskiy](https://github.com/yakov-olkhovskiy)). + +#### NO CL ENTRY + +* NO CL ENTRY: 'Revert "Fix libssh+openssl3 & s390x (part 2)"'. [#55188](https://github.com/ClickHouse/ClickHouse/pull/55188) ([Alexey Milovidov](https://github.com/alexey-milovidov)). +* NO CL ENTRY: 'Revert "Support SAMPLE BY for VIEW"'. [#55357](https://github.com/ClickHouse/ClickHouse/pull/55357) ([Alexey Milovidov](https://github.com/alexey-milovidov)). +* NO CL ENTRY: 'Revert "Revert "refine error code of duplicated index in create query""'. [#55467](https://github.com/ClickHouse/ClickHouse/pull/55467) ([Han Fei](https://github.com/hanfei1991)). +* NO CL ENTRY: 'Update mysql.md - Remove the Private Preview Note'. [#55486](https://github.com/ClickHouse/ClickHouse/pull/55486) ([Ryadh DAHIMENE](https://github.com/Ryado)). +* NO CL ENTRY: 'Revert "Removed "maximize" and "drag" buttons from `dashboard` in case of single chart"'. [#55623](https://github.com/ClickHouse/ClickHouse/pull/55623) ([Alexey Milovidov](https://github.com/alexey-milovidov)). +* NO CL ENTRY: 'Revert "Fix filtering by virtual columns with OR filter in query"'. [#55657](https://github.com/ClickHouse/ClickHouse/pull/55657) ([Antonio Andelic](https://github.com/antonio2368)). +* NO CL ENTRY: 'Revert "Improve ColumnDecimal, ColumnVector getPermutation performance using pdqsort with RadixSort"'. [#55682](https://github.com/ClickHouse/ClickHouse/pull/55682) ([Alexey Milovidov](https://github.com/alexey-milovidov)). +* NO CL ENTRY: 'Revert "Integration check script fix ups"'. [#55694](https://github.com/ClickHouse/ClickHouse/pull/55694) ([alesapin](https://github.com/alesapin)). +* NO CL ENTRY: 'Revert "Fix 'Block structure mismatch' on concurrent ALTER and INSERTs in Buffer table"'. [#56103](https://github.com/ClickHouse/ClickHouse/pull/56103) ([Alexey Milovidov](https://github.com/alexey-milovidov)). +* NO CL ENTRY: 'Revert "Add function getHttpHeader"'. [#56109](https://github.com/ClickHouse/ClickHouse/pull/56109) ([Alexey Milovidov](https://github.com/alexey-milovidov)). +* NO CL ENTRY: 'Revert "Fix output/input of Arrow dictionary column"'. [#56150](https://github.com/ClickHouse/ClickHouse/pull/56150) ([Alexey Milovidov](https://github.com/alexey-milovidov)). + +#### NOT FOR CHANGELOG / INSIGNIFICANT + +* Set defaults_for_omitted_fields to true for hive text format [#49486](https://github.com/ClickHouse/ClickHouse/pull/49486) ([李扬](https://github.com/taiyang-li)). +* Fixing join tests with analyzer [#49555](https://github.com/ClickHouse/ClickHouse/pull/49555) ([vdimir](https://github.com/vdimir)). +* Make exception about `ALTER TABLE ... DROP COLUMN|INDEX|PROJECTION` more clear [#50181](https://github.com/ClickHouse/ClickHouse/pull/50181) ([Alexander Gololobov](https://github.com/davenger)). +* ANTI JOIN: Invalid number of rows in Chunk [#50944](https://github.com/ClickHouse/ClickHouse/pull/50944) ([Nikolai Kochetov](https://github.com/KochetovNicolai)). +* Analyzer: fix row policy [#53170](https://github.com/ClickHouse/ClickHouse/pull/53170) ([Dmitry Novik](https://github.com/novikd)). +* Add a test with Block structure mismatch in grace hash join. [#53278](https://github.com/ClickHouse/ClickHouse/pull/53278) ([Nikolai Kochetov](https://github.com/KochetovNicolai)). +* Support skip_unused_shards in Analyzer [#53282](https://github.com/ClickHouse/ClickHouse/pull/53282) ([Nikolai Kochetov](https://github.com/KochetovNicolai)). +* Revert "Revert "Planner prepare filters for analysis"" [#53792](https://github.com/ClickHouse/ClickHouse/pull/53792) ([Nikolai Kochetov](https://github.com/KochetovNicolai)). +* Add setting allow_experimental_partial_result [#54514](https://github.com/ClickHouse/ClickHouse/pull/54514) ([vdimir](https://github.com/vdimir)). +* Fix CI skip build and skip tests checks [#54532](https://github.com/ClickHouse/ClickHouse/pull/54532) ([SmitaRKulkarni](https://github.com/SmitaRKulkarni)). +* `MergeTreeData::forcefullyMovePartToDetachedAndRemoveFromMemory` does not respect mutations [#54653](https://github.com/ClickHouse/ClickHouse/pull/54653) ([Nikolai Kochetov](https://github.com/KochetovNicolai)). +* Fix azure test by using unique names [#54738](https://github.com/ClickHouse/ClickHouse/pull/54738) ([SmitaRKulkarni](https://github.com/SmitaRKulkarni)). +* Use `--filter` to reduce checkout time [#54857](https://github.com/ClickHouse/ClickHouse/pull/54857) ([Mikhail f. Shiryaev](https://github.com/Felixoid)). +* Remove tests [#54873](https://github.com/ClickHouse/ClickHouse/pull/54873) ([János Benjamin Antal](https://github.com/antaljanosbenjamin)). +* Remove useless path from lockSharedData in StorageReplicatedMergeTree [#54989](https://github.com/ClickHouse/ClickHouse/pull/54989) ([Mike Kot](https://github.com/myrrc)). +* Fix broken test [#55002](https://github.com/ClickHouse/ClickHouse/pull/55002) ([Kseniia Sumarokova](https://github.com/kssenii)). +* Update version_date.tsv and changelogs after v23.8.3.48-lts [#55063](https://github.com/ClickHouse/ClickHouse/pull/55063) ([robot-clickhouse](https://github.com/robot-clickhouse)). +* Use `source` instead of `bash` for pre-build script [#55071](https://github.com/ClickHouse/ClickHouse/pull/55071) ([Nikita Mikhaylov](https://github.com/nikitamikhaylov)). +* Update s3queue.md to add experimental flag [#55093](https://github.com/ClickHouse/ClickHouse/pull/55093) ([Peignon Melvyn](https://github.com/melvynator)). +* Clean data dir and always start an old server version in aggregate functions compatibility test. [#55105](https://github.com/ClickHouse/ClickHouse/pull/55105) ([Nikolai Kochetov](https://github.com/KochetovNicolai)). +* Update version_date.tsv and changelogs after v23.9.1.1854-stable [#55118](https://github.com/ClickHouse/ClickHouse/pull/55118) ([robot-clickhouse](https://github.com/robot-clickhouse)). +* Add links to check reports in status comment [#55122](https://github.com/ClickHouse/ClickHouse/pull/55122) ([vdimir](https://github.com/vdimir)). +* Bump croaring to 2.0.2 [#55127](https://github.com/ClickHouse/ClickHouse/pull/55127) ([Robert Schulze](https://github.com/rschu1ze)). +* check if block is empty after async insert retries [#55143](https://github.com/ClickHouse/ClickHouse/pull/55143) ([Han Fei](https://github.com/hanfei1991)). +* Improve linker detection on macOS [#55147](https://github.com/ClickHouse/ClickHouse/pull/55147) ([Robert Schulze](https://github.com/rschu1ze)). +* Fix libssh+openssl3 & s390x [#55154](https://github.com/ClickHouse/ClickHouse/pull/55154) ([Boris Kuschel](https://github.com/bkuschel)). +* Fix file cache temporary file segment range in FileSegment::reserve [#55164](https://github.com/ClickHouse/ClickHouse/pull/55164) ([vdimir](https://github.com/vdimir)). +* Fix libssh+openssl3 & s390x (part 2) [#55187](https://github.com/ClickHouse/ClickHouse/pull/55187) ([Boris Kuschel](https://github.com/bkuschel)). +* Fix wrong test name [#55190](https://github.com/ClickHouse/ClickHouse/pull/55190) ([Alexey Milovidov](https://github.com/alexey-milovidov)). +* Fix libssh+openssl3 & s390x (Part 2.1) [#55192](https://github.com/ClickHouse/ClickHouse/pull/55192) ([Boris Kuschel](https://github.com/bkuschel)). +* Reset signals caught by clickhouse-client if a pager is in use [#55193](https://github.com/ClickHouse/ClickHouse/pull/55193) ([Azat Khuzhin](https://github.com/azat)). +* Update README.md [#55209](https://github.com/ClickHouse/ClickHouse/pull/55209) ([Tyler Hannan](https://github.com/tylerhannan)). +* remove the blocker to grow the metadata file version [#55218](https://github.com/ClickHouse/ClickHouse/pull/55218) ([Sema Checherinda](https://github.com/CheSema)). +* Fix syntax highlight in client for spaceship operator [#55224](https://github.com/ClickHouse/ClickHouse/pull/55224) ([Azat Khuzhin](https://github.com/azat)). +* Fix mypy errors [#55228](https://github.com/ClickHouse/ClickHouse/pull/55228) ([Mikhail f. Shiryaev](https://github.com/Felixoid)). +* Upgrade MinIO to support accepting non signed requests [#55245](https://github.com/ClickHouse/ClickHouse/pull/55245) ([Azat Khuzhin](https://github.com/azat)). +* tests: switch test_throttling to S3 over https to make it more production like [#55247](https://github.com/ClickHouse/ClickHouse/pull/55247) ([Azat Khuzhin](https://github.com/azat)). +* Evaluate defaults during async insert safer [#55253](https://github.com/ClickHouse/ClickHouse/pull/55253) ([Vitaly Baranov](https://github.com/vitlibar)). +* Fix data race in context [#55260](https://github.com/ClickHouse/ClickHouse/pull/55260) ([Kseniia Sumarokova](https://github.com/kssenii)). +* Do not allow tests with state ERROR be overwritten by PASSED [#55261](https://github.com/ClickHouse/ClickHouse/pull/55261) ([Azat Khuzhin](https://github.com/azat)). +* Fix query formatting for SYSTEM queries [#55277](https://github.com/ClickHouse/ClickHouse/pull/55277) ([Azat Khuzhin](https://github.com/azat)). +* Context added TSA [#55278](https://github.com/ClickHouse/ClickHouse/pull/55278) ([Maksim Kita](https://github.com/kitaisreal)). +* Improve logging in query cache [#55296](https://github.com/ClickHouse/ClickHouse/pull/55296) ([Robert Schulze](https://github.com/rschu1ze)). +* MaterializedPostgreSQL: remove back check [#55297](https://github.com/ClickHouse/ClickHouse/pull/55297) ([Kseniia Sumarokova](https://github.com/kssenii)). +* Make `use_mysql_types_in_show_columns` independent from the connection [#55298](https://github.com/ClickHouse/ClickHouse/pull/55298) ([Robert Schulze](https://github.com/rschu1ze)). +* Make `HandlingRuleHTTPHandlerFactory` more stupid, but less error prone [#55307](https://github.com/ClickHouse/ClickHouse/pull/55307) ([alesapin](https://github.com/alesapin)). +* Fix tsan issue in croaring [#55311](https://github.com/ClickHouse/ClickHouse/pull/55311) ([Robert Schulze](https://github.com/rschu1ze)). +* Refactorings and better documentation for `toStartOfInterval()` [#55327](https://github.com/ClickHouse/ClickHouse/pull/55327) ([Robert Schulze](https://github.com/rschu1ze)). +* Review [#51946](https://github.com/ClickHouse/ClickHouse/issues/51946) and partially revert it [#55336](https://github.com/ClickHouse/ClickHouse/pull/55336) ([Alexey Milovidov](https://github.com/alexey-milovidov)). +* Update README.md [#55339](https://github.com/ClickHouse/ClickHouse/pull/55339) ([Tyler Hannan](https://github.com/tylerhannan)). +* Context locks small fixes [#55352](https://github.com/ClickHouse/ClickHouse/pull/55352) ([Maksim Kita](https://github.com/kitaisreal)). +* Fix bad test `01605_dictinct_two_level` [#55354](https://github.com/ClickHouse/ClickHouse/pull/55354) ([Alexey Milovidov](https://github.com/alexey-milovidov)). +* Fix test_max_rows_to_read_leaf_with_view flakiness (due to prefer_localhost_replica) [#55355](https://github.com/ClickHouse/ClickHouse/pull/55355) ([Azat Khuzhin](https://github.com/azat)). +* Better exception messages [#55356](https://github.com/ClickHouse/ClickHouse/pull/55356) ([Alexey Milovidov](https://github.com/alexey-milovidov)). +* Update CHANGELOG.md [#55359](https://github.com/ClickHouse/ClickHouse/pull/55359) ([Alexey Milovidov](https://github.com/alexey-milovidov)). +* Better recursion depth check [#55361](https://github.com/ClickHouse/ClickHouse/pull/55361) ([Alexey Milovidov](https://github.com/alexey-milovidov)). +* Merging [#43085](https://github.com/ClickHouse/ClickHouse/issues/43085) [#55362](https://github.com/ClickHouse/ClickHouse/pull/55362) ([Alexey Milovidov](https://github.com/alexey-milovidov)). +* Fix data-race in web disk [#55372](https://github.com/ClickHouse/ClickHouse/pull/55372) ([Azat Khuzhin](https://github.com/azat)). +* Disable skim under TSan (Rust does not supports ThreadSanitizer) [#55378](https://github.com/ClickHouse/ClickHouse/pull/55378) ([Azat Khuzhin](https://github.com/azat)). +* Fix missing thread accounting for insert_distributed_sync=1 [#55392](https://github.com/ClickHouse/ClickHouse/pull/55392) ([Azat Khuzhin](https://github.com/azat)). +* Improve tests for untuple() [#55425](https://github.com/ClickHouse/ClickHouse/pull/55425) ([Robert Schulze](https://github.com/rschu1ze)). +* Fix test that never worked test_rabbitmq_random_detach [#55453](https://github.com/ClickHouse/ClickHouse/pull/55453) ([Ilya Yatsishin](https://github.com/qoega)). +* Fix out of bound error in system.remote_data_paths + disk web [#55468](https://github.com/ClickHouse/ClickHouse/pull/55468) ([alesapin](https://github.com/alesapin)). +* Remove existing moving/ dir if allow_remove_stale_moving_parts is off [#55480](https://github.com/ClickHouse/ClickHouse/pull/55480) ([Mike Kot](https://github.com/myrrc)). +* Bump curl to 8.4 [#55492](https://github.com/ClickHouse/ClickHouse/pull/55492) ([Robert Schulze](https://github.com/rschu1ze)). +* Minor fixes for 02882_replicated_fetch_checksums_doesnt_match.sql [#55493](https://github.com/ClickHouse/ClickHouse/pull/55493) ([vdimir](https://github.com/vdimir)). +* AggregatingTransform initGenerate race condition fix [#55495](https://github.com/ClickHouse/ClickHouse/pull/55495) ([Maksim Kita](https://github.com/kitaisreal)). +* HashTable resize exception handle fix [#55497](https://github.com/ClickHouse/ClickHouse/pull/55497) ([Maksim Kita](https://github.com/kitaisreal)). +* fix lots of 'Structure does not match' warnings in ci [#55503](https://github.com/ClickHouse/ClickHouse/pull/55503) ([Han Fei](https://github.com/hanfei1991)). +* Cleanup: parallel replica coordinator usage [#55515](https://github.com/ClickHouse/ClickHouse/pull/55515) ([Igor Nikonov](https://github.com/devcrafter)). +* add k-morozov to trusted contributors [#55523](https://github.com/ClickHouse/ClickHouse/pull/55523) ([Mike Kot](https://github.com/myrrc)). +* Forbid create inverted index if setting not enabled [#55529](https://github.com/ClickHouse/ClickHouse/pull/55529) ([flynn](https://github.com/ucasfl)). +* Better exception messages but without SEGFAULT [#55541](https://github.com/ClickHouse/ClickHouse/pull/55541) ([Antonio Andelic](https://github.com/antonio2368)). +* Avoid setting same promise twice [#55553](https://github.com/ClickHouse/ClickHouse/pull/55553) ([Antonio Andelic](https://github.com/antonio2368)). +* Better error message in case when merge selecting task failed. [#55554](https://github.com/ClickHouse/ClickHouse/pull/55554) ([Nikita Mikhaylov](https://github.com/nikitamikhaylov)). +* Add a test [#55564](https://github.com/ClickHouse/ClickHouse/pull/55564) ([Kseniia Sumarokova](https://github.com/kssenii)). +* Added healthcheck for LDAP [#55571](https://github.com/ClickHouse/ClickHouse/pull/55571) ([Nikita Mikhaylov](https://github.com/nikitamikhaylov)). +* Fix global context for tests with --gtest_filter [#55583](https://github.com/ClickHouse/ClickHouse/pull/55583) ([Azat Khuzhin](https://github.com/azat)). +* Fix replica groups for Replicated database engine [#55587](https://github.com/ClickHouse/ClickHouse/pull/55587) ([Azat Khuzhin](https://github.com/azat)). +* Remove unused protobuf includes [#55590](https://github.com/ClickHouse/ClickHouse/pull/55590) ([Raúl Marín](https://github.com/Algunenano)). +* Apply Context changes to standalone Keeper [#55591](https://github.com/ClickHouse/ClickHouse/pull/55591) ([Antonio Andelic](https://github.com/antonio2368)). +* Do not fail if label-to-remove does not exists in PR [#55592](https://github.com/ClickHouse/ClickHouse/pull/55592) ([Mikhail f. Shiryaev](https://github.com/Felixoid)). +* CI: cast extra column expression `pull_request_number` to Int32 [#55599](https://github.com/ClickHouse/ClickHouse/pull/55599) ([Han Fei](https://github.com/hanfei1991)). +* Add back a test that was removed by mistake [#55605](https://github.com/ClickHouse/ClickHouse/pull/55605) ([Alexander Tokmakov](https://github.com/tavplubix)). +* Bump croaring to v2.0.4 [#55606](https://github.com/ClickHouse/ClickHouse/pull/55606) ([Robert Schulze](https://github.com/rschu1ze)). +* byteswap: Add 16/32-byte integer support [#55607](https://github.com/ClickHouse/ClickHouse/pull/55607) ([Robert Schulze](https://github.com/rschu1ze)). +* Revert [#54421](https://github.com/ClickHouse/ClickHouse/issues/54421) [#55613](https://github.com/ClickHouse/ClickHouse/pull/55613) ([Alexander Tokmakov](https://github.com/tavplubix)). +* Fix: race condition in kusto implementation [#55615](https://github.com/ClickHouse/ClickHouse/pull/55615) ([Yakov Olkhovskiy](https://github.com/yakov-olkhovskiy)). +* Remove passed tests from `analyzer_tech_debt.txt` [#55618](https://github.com/ClickHouse/ClickHouse/pull/55618) ([Alexey Milovidov](https://github.com/alexey-milovidov)). +* Enable 02161_addressToLineWithInlines [#55622](https://github.com/ClickHouse/ClickHouse/pull/55622) ([Michael Kolupaev](https://github.com/al13n321)). +* KeyCondition: preparation [#55625](https://github.com/ClickHouse/ClickHouse/pull/55625) ([Alexey Milovidov](https://github.com/alexey-milovidov)). +* Fix flakiness of test_system_merges (by increasing sleep interval properly) [#55627](https://github.com/ClickHouse/ClickHouse/pull/55627) ([Azat Khuzhin](https://github.com/azat)). +* fix `structure does not match` logs again [#55628](https://github.com/ClickHouse/ClickHouse/pull/55628) ([Han Fei](https://github.com/hanfei1991)). +* KeyCondition: small changes [#55640](https://github.com/ClickHouse/ClickHouse/pull/55640) ([Alexey Milovidov](https://github.com/alexey-milovidov)). +* Resubmit [#54421](https://github.com/ClickHouse/ClickHouse/issues/54421) [#55641](https://github.com/ClickHouse/ClickHouse/pull/55641) ([Nikolay Degterinsky](https://github.com/evillique)). +* Fix some typos [#55646](https://github.com/ClickHouse/ClickHouse/pull/55646) ([alesapin](https://github.com/alesapin)). +* Show move/maximize only if there is more than a single chart [#55648](https://github.com/ClickHouse/ClickHouse/pull/55648) ([Alexey Milovidov](https://github.com/alexey-milovidov)). +* Enable test_query_is_lock_free[detach table] for the analyzer [#55668](https://github.com/ClickHouse/ClickHouse/pull/55668) ([Raúl Marín](https://github.com/Algunenano)). +* Allow FINAL with parallel replicas with custom key [#55679](https://github.com/ClickHouse/ClickHouse/pull/55679) ([Antonio Andelic](https://github.com/antonio2368)). +* Fix StorageMaterializedView::isRemote [#55681](https://github.com/ClickHouse/ClickHouse/pull/55681) ([vdimir](https://github.com/vdimir)). +* Bump gRPC to 1.34.1 [#55693](https://github.com/ClickHouse/ClickHouse/pull/55693) ([Robert Schulze](https://github.com/rschu1ze)). +* Randomize block_number column setting in ci [#55713](https://github.com/ClickHouse/ClickHouse/pull/55713) ([SmitaRKulkarni](https://github.com/SmitaRKulkarni)). +* Use pool for proxied S3 disk http sessions [#55718](https://github.com/ClickHouse/ClickHouse/pull/55718) ([Aleksei Filatov](https://github.com/aalexfvk)). +* Analyzer: fix block stucture mismatch in matview with engine distributed [#55741](https://github.com/ClickHouse/ClickHouse/pull/55741) ([vdimir](https://github.com/vdimir)). +* Use diff object again, since JSON API limits the files [#55750](https://github.com/ClickHouse/ClickHouse/pull/55750) ([Mikhail f. Shiryaev](https://github.com/Felixoid)). +* Big endian platform max intersection fix [#55756](https://github.com/ClickHouse/ClickHouse/pull/55756) ([Suzy Wang](https://github.com/SuzyWangIBMer)). +* Remove temporary debug logging in MultiplexedConnections [#55764](https://github.com/ClickHouse/ClickHouse/pull/55764) ([Michael Kolupaev](https://github.com/al13n321)). +* Check if partition ID is `nullptr` [#55765](https://github.com/ClickHouse/ClickHouse/pull/55765) ([Antonio Andelic](https://github.com/antonio2368)). +* Control Keeper feature flag randomization with env [#55766](https://github.com/ClickHouse/ClickHouse/pull/55766) ([Antonio Andelic](https://github.com/antonio2368)). +* Added test to check CapnProto cache [#55769](https://github.com/ClickHouse/ClickHouse/pull/55769) ([Aleksandr Musorin](https://github.com/AVMusorin)). +* Query Cache: Only cache initial query [#55771](https://github.com/ClickHouse/ClickHouse/pull/55771) ([zhongyuankai](https://github.com/zhongyuankai)). +* Temporarily disable flaky test [#55772](https://github.com/ClickHouse/ClickHouse/pull/55772) ([Kseniia Sumarokova](https://github.com/kssenii)). +* Fix test test_postgresql_replica_database_engine_2/test.py::test_replica_consumer [#55774](https://github.com/ClickHouse/ClickHouse/pull/55774) ([Kseniia Sumarokova](https://github.com/kssenii)). +* Improve ColumnDecimal, ColumnVector getPermutation performance using pdqsort with RadixSort fix [#55775](https://github.com/ClickHouse/ClickHouse/pull/55775) ([Maksim Kita](https://github.com/kitaisreal)). +* Fix black check [#55779](https://github.com/ClickHouse/ClickHouse/pull/55779) ([Kseniia Sumarokova](https://github.com/kssenii)). +* Correctly grep fuzzer.log [#55780](https://github.com/ClickHouse/ClickHouse/pull/55780) ([Antonio Andelic](https://github.com/antonio2368)). +* Parallel replicas: cleanup, less copying during announcement [#55781](https://github.com/ClickHouse/ClickHouse/pull/55781) ([Igor Nikonov](https://github.com/devcrafter)). +* Enable test_mutation_simple with the analyzer [#55791](https://github.com/ClickHouse/ClickHouse/pull/55791) ([Raúl Marín](https://github.com/Algunenano)). +* Improve enrich image [#55793](https://github.com/ClickHouse/ClickHouse/pull/55793) ([Mikhail f. Shiryaev](https://github.com/Felixoid)). +* Bump gRPC to v1.36.4 [#55811](https://github.com/ClickHouse/ClickHouse/pull/55811) ([Robert Schulze](https://github.com/rschu1ze)). +* Attemp to fix test_dictionaries_redis flakiness [#55813](https://github.com/ClickHouse/ClickHouse/pull/55813) ([Raúl Marín](https://github.com/Algunenano)). +* Add diagnostic checks for issue [#55041](https://github.com/ClickHouse/ClickHouse/issues/55041) [#55835](https://github.com/ClickHouse/ClickHouse/pull/55835) ([Robert Schulze](https://github.com/rschu1ze)). +* Correct aggregate functions ser/deserialization to be endianness-independent. [#55837](https://github.com/ClickHouse/ClickHouse/pull/55837) ([Austin Kothig](https://github.com/kothiga)). +* Bump gRPC to v1.37.1 [#55840](https://github.com/ClickHouse/ClickHouse/pull/55840) ([Robert Schulze](https://github.com/rschu1ze)). +* Fix 00002_log_and_exception_messages_formatting [#55844](https://github.com/ClickHouse/ClickHouse/pull/55844) ([Alexander Tokmakov](https://github.com/tavplubix)). +* Fix caching objects in pygithub, and changelogs [#55845](https://github.com/ClickHouse/ClickHouse/pull/55845) ([Mikhail f. Shiryaev](https://github.com/Felixoid)). +* Update version_date.tsv and changelogs after v23.3.14.78-lts [#55847](https://github.com/ClickHouse/ClickHouse/pull/55847) ([robot-clickhouse](https://github.com/robot-clickhouse)). +* Update version_date.tsv and changelogs after v23.9.2.56-stable [#55848](https://github.com/ClickHouse/ClickHouse/pull/55848) ([robot-clickhouse](https://github.com/robot-clickhouse)). +* Update version_date.tsv and changelogs after v23.8.4.69-lts [#55849](https://github.com/ClickHouse/ClickHouse/pull/55849) ([robot-clickhouse](https://github.com/robot-clickhouse)). +* fix node setting in the test [#55850](https://github.com/ClickHouse/ClickHouse/pull/55850) ([Sema Checherinda](https://github.com/CheSema)). +* Add load_metadata_threads to describe filesystem cache [#55863](https://github.com/ClickHouse/ClickHouse/pull/55863) ([Jordi Villar](https://github.com/jrdi)). +* One final leftover in diff_urls of PRInfo [#55874](https://github.com/ClickHouse/ClickHouse/pull/55874) ([Mikhail f. Shiryaev](https://github.com/Felixoid)). +* Fix digest check in replicated ddl worker [#55877](https://github.com/ClickHouse/ClickHouse/pull/55877) ([Sergei Trifonov](https://github.com/serxa)). +* Test parallel replicas with rollup [#55886](https://github.com/ClickHouse/ClickHouse/pull/55886) ([Raúl Marín](https://github.com/Algunenano)). +* Fix some tests with Replicated database [#55889](https://github.com/ClickHouse/ClickHouse/pull/55889) ([Alexander Tokmakov](https://github.com/tavplubix)). +* Update stress.py [#55890](https://github.com/ClickHouse/ClickHouse/pull/55890) ([Alexander Tokmakov](https://github.com/tavplubix)). +* Revert "Revert "Revert "Add settings for real-time updates during query execution""" [#55893](https://github.com/ClickHouse/ClickHouse/pull/55893) ([Alexey Milovidov](https://github.com/alexey-milovidov)). +* Make test `system_zookeeper_connection` better [#55900](https://github.com/ClickHouse/ClickHouse/pull/55900) ([Alexey Milovidov](https://github.com/alexey-milovidov)). +* A test `01019_alter_materialized_view_consistent` is unstable with Analyzer [#55901](https://github.com/ClickHouse/ClickHouse/pull/55901) ([Alexey Milovidov](https://github.com/alexey-milovidov)). +* Remove C++ templates, because they are stupid [#55910](https://github.com/ClickHouse/ClickHouse/pull/55910) ([Alexey Milovidov](https://github.com/alexey-milovidov)). +* Bump gRPC to v1.39.1 [#55914](https://github.com/ClickHouse/ClickHouse/pull/55914) ([Robert Schulze](https://github.com/rschu1ze)). +* Add sanity check to RPNBuilderFunctionTreeNode [#55915](https://github.com/ClickHouse/ClickHouse/pull/55915) ([Robert Schulze](https://github.com/rschu1ze)). +* Bump gRPC to v1.42.0 [#55916](https://github.com/ClickHouse/ClickHouse/pull/55916) ([Robert Schulze](https://github.com/rschu1ze)). +* Set storage.has_lightweight_delete_parts flag when a part has been loaded [#55935](https://github.com/ClickHouse/ClickHouse/pull/55935) ([Alexander Gololobov](https://github.com/davenger)). +* Include information about supported versions in bug report issue template [#55937](https://github.com/ClickHouse/ClickHouse/pull/55937) ([Nikita Taranov](https://github.com/nickitat)). +* arrayFold: Switch accumulator and array arguments [#55948](https://github.com/ClickHouse/ClickHouse/pull/55948) ([Robert Schulze](https://github.com/rschu1ze)). +* Fix overrides via connections_credentials in case of root directives exists [#55949](https://github.com/ClickHouse/ClickHouse/pull/55949) ([Azat Khuzhin](https://github.com/azat)). +* Test for Bug 43644 [#55955](https://github.com/ClickHouse/ClickHouse/pull/55955) ([Robert Schulze](https://github.com/rschu1ze)). +* Bump protobuf to v3.19.6 [#55963](https://github.com/ClickHouse/ClickHouse/pull/55963) ([Robert Schulze](https://github.com/rschu1ze)). +* Fix possible performance test error [#55964](https://github.com/ClickHouse/ClickHouse/pull/55964) ([Azat Khuzhin](https://github.com/azat)). +* Avoid counting lost parts twice [#55987](https://github.com/ClickHouse/ClickHouse/pull/55987) ([Alexander Tokmakov](https://github.com/tavplubix)). +* Convert unnecessary std::scoped_lock usage to std::lock_guard [#56006](https://github.com/ClickHouse/ClickHouse/pull/56006) ([Robert Schulze](https://github.com/rschu1ze)). +* Stress tests: Try to wait until server is responsive after gdb detach [#56009](https://github.com/ClickHouse/ClickHouse/pull/56009) ([Raúl Marín](https://github.com/Algunenano)). +* test_storage_s3_queue - add debug info [#56011](https://github.com/ClickHouse/ClickHouse/pull/56011) ([Kseniia Sumarokova](https://github.com/kssenii)). +* Bump protobuf to v21.9 [#56014](https://github.com/ClickHouse/ClickHouse/pull/56014) ([Robert Schulze](https://github.com/rschu1ze)). +* Correct the implementation of function `jsonMergePatch` [#56020](https://github.com/ClickHouse/ClickHouse/pull/56020) ([Anton Popov](https://github.com/CurtizJ)). +* Fix 02438_sync_replica_lightweight [#56023](https://github.com/ClickHouse/ClickHouse/pull/56023) ([Alexander Tokmakov](https://github.com/tavplubix)). +* Fix some bad code by making it worse [#56026](https://github.com/ClickHouse/ClickHouse/pull/56026) ([Alexander Tokmakov](https://github.com/tavplubix)). +* Fix bash completion for mawk (and update format list and add one more delimiter) [#56050](https://github.com/ClickHouse/ClickHouse/pull/56050) ([Azat Khuzhin](https://github.com/azat)). +* Analyzer: Fix crash on window resolve [#56055](https://github.com/ClickHouse/ClickHouse/pull/56055) ([Dmitry Novik](https://github.com/novikd)). +* Fix function_json_value_return_type_allow_nullable setting name in doc [#56056](https://github.com/ClickHouse/ClickHouse/pull/56056) ([vdimir](https://github.com/vdimir)). +* Force shutdown in upgrade test [#56074](https://github.com/ClickHouse/ClickHouse/pull/56074) ([Raúl Marín](https://github.com/Algunenano)). +* Try enable `01154_move_partition_long` with s3 [#56080](https://github.com/ClickHouse/ClickHouse/pull/56080) ([Alexander Tokmakov](https://github.com/tavplubix)). +* Fix race condition between DROP_RANGE and committing existing block [#56083](https://github.com/ClickHouse/ClickHouse/pull/56083) ([Alexander Tokmakov](https://github.com/tavplubix)). +* Fix flakiness of 02263_lazy_mark_load [#56087](https://github.com/ClickHouse/ClickHouse/pull/56087) ([Michael Kolupaev](https://github.com/al13n321)). +* Make the code less bloated [#56091](https://github.com/ClickHouse/ClickHouse/pull/56091) ([Alexey Milovidov](https://github.com/alexey-milovidov)). +* Maybe smaller binary [#56112](https://github.com/ClickHouse/ClickHouse/pull/56112) ([Alexey Milovidov](https://github.com/alexey-milovidov)). +* Remove old trash from unit tests [#56113](https://github.com/ClickHouse/ClickHouse/pull/56113) ([Alexey Milovidov](https://github.com/alexey-milovidov)). +* Remove some bloat [#56114](https://github.com/ClickHouse/ClickHouse/pull/56114) ([Alexey Milovidov](https://github.com/alexey-milovidov)). +* Fix test_format_schema_on_server flakiness [#56116](https://github.com/ClickHouse/ClickHouse/pull/56116) ([Azat Khuzhin](https://github.com/azat)). +* Fix: schedule delayed part checks correctly [#56123](https://github.com/ClickHouse/ClickHouse/pull/56123) ([Igor Nikonov](https://github.com/devcrafter)). +* Beautify `show merges` [#56124](https://github.com/ClickHouse/ClickHouse/pull/56124) ([Denny Crane](https://github.com/den-crane)). +* Better options for disabling frame pointer omitting [#56130](https://github.com/ClickHouse/ClickHouse/pull/56130) ([Nikita Mikhaylov](https://github.com/nikitamikhaylov)). +* Fix: incorrect brace style in clang-format [#56133](https://github.com/ClickHouse/ClickHouse/pull/56133) ([Igor Nikonov](https://github.com/devcrafter)). +* Do not try to activate covered parts when handilng unexpected parts [#56137](https://github.com/ClickHouse/ClickHouse/pull/56137) ([Alexander Tokmakov](https://github.com/tavplubix)). +* Re-fix 'Block structure mismatch' on concurrent ALTER and INSERTs in Buffer table [#56140](https://github.com/ClickHouse/ClickHouse/pull/56140) ([Michael Kolupaev](https://github.com/al13n321)). +* Update version_date.tsv and changelogs after v23.3.15.29-lts [#56145](https://github.com/ClickHouse/ClickHouse/pull/56145) ([robot-clickhouse](https://github.com/robot-clickhouse)). +* Update version_date.tsv and changelogs after v23.8.5.16-lts [#56146](https://github.com/ClickHouse/ClickHouse/pull/56146) ([robot-clickhouse](https://github.com/robot-clickhouse)). +* Update version_date.tsv and changelogs after v23.9.3.12-stable [#56147](https://github.com/ClickHouse/ClickHouse/pull/56147) ([robot-clickhouse](https://github.com/robot-clickhouse)). +* Update version_date.tsv and changelogs after v23.7.6.111-stable [#56148](https://github.com/ClickHouse/ClickHouse/pull/56148) ([robot-clickhouse](https://github.com/robot-clickhouse)). +* Fasttest timeout setting [#56160](https://github.com/ClickHouse/ClickHouse/pull/56160) ([Max K.](https://github.com/mkaynov)). +* Use monotonic clock for part check scheduling [#56162](https://github.com/ClickHouse/ClickHouse/pull/56162) ([Igor Nikonov](https://github.com/devcrafter)). +* More metrics for fs cache [#56165](https://github.com/ClickHouse/ClickHouse/pull/56165) ([Kseniia Sumarokova](https://github.com/kssenii)). +* FileCache minor changes [#56168](https://github.com/ClickHouse/ClickHouse/pull/56168) ([Kseniia Sumarokova](https://github.com/kssenii)). +* Update 01414_mutations_and_errors_zookeeper.sh [#56176](https://github.com/ClickHouse/ClickHouse/pull/56176) ([Alexander Tokmakov](https://github.com/tavplubix)). +* Shard fs cache keys [#56194](https://github.com/ClickHouse/ClickHouse/pull/56194) ([Kseniia Sumarokova](https://github.com/kssenii)). +* Do less work when there lots of read requests and watches for same paths [#56197](https://github.com/ClickHouse/ClickHouse/pull/56197) ([Alexander Gololobov](https://github.com/davenger)). +* Remove skip_unused_shards tests from analyzer skiplist [#56200](https://github.com/ClickHouse/ClickHouse/pull/56200) ([Nikolai Kochetov](https://github.com/KochetovNicolai)). +* Easy tests fix for analyzer [#56211](https://github.com/ClickHouse/ClickHouse/pull/56211) ([Nikolai Kochetov](https://github.com/KochetovNicolai)). +* Add a warning if delayed accounting is not enabled (breaks OSIOWaitMicroseconds) [#56227](https://github.com/ClickHouse/ClickHouse/pull/56227) ([Azat Khuzhin](https://github.com/azat)). +* Do not remove part if `Too many open files` is thrown [#56238](https://github.com/ClickHouse/ClickHouse/pull/56238) ([Nikolay Degterinsky](https://github.com/evillique)). +* Fix ORC commit [#56261](https://github.com/ClickHouse/ClickHouse/pull/56261) ([Raúl Marín](https://github.com/Algunenano)). +* Fix typo in largestTriangleThreeBuckets.md [#56263](https://github.com/ClickHouse/ClickHouse/pull/56263) ([Nikita Taranov](https://github.com/nickitat)). + diff --git a/docs/changelogs/v23.10.2.13-stable.md b/docs/changelogs/v23.10.2.13-stable.md new file mode 100644 index 00000000000..4961c991047 --- /dev/null +++ b/docs/changelogs/v23.10.2.13-stable.md @@ -0,0 +1,18 @@ +--- +sidebar_position: 1 +sidebar_label: 2023 +--- + +# 2023 Changelog + +### ClickHouse release v23.10.2.13-stable (65d8522bb1d) FIXME as compared to v23.10.1.1976-stable (13adae0e42f) + +#### Bug Fix (user-visible misbehavior in an official stable release) + +* Fix a crash during table loading on startup [#56232](https://github.com/ClickHouse/ClickHouse/pull/56232) ([Nikolay Degterinsky](https://github.com/evillique)). +* Fix segfault in signal handler for Keeper [#56266](https://github.com/ClickHouse/ClickHouse/pull/56266) ([Antonio Andelic](https://github.com/antonio2368)). +* Fix incomplete query result for UNION in view() function. [#56274](https://github.com/ClickHouse/ClickHouse/pull/56274) ([Nikolai Kochetov](https://github.com/KochetovNicolai)). +* Fix inconsistency of "cast('0' as DateTime64(3))" and "cast('0' as Nullable(DateTime64(3)))" [#56286](https://github.com/ClickHouse/ClickHouse/pull/56286) ([李扬](https://github.com/taiyang-li)). +* Fix crash in case of adding a column with type Object(JSON) [#56307](https://github.com/ClickHouse/ClickHouse/pull/56307) ([Nikita Mikhaylov](https://github.com/nikitamikhaylov)). +* Fix buffer overflow in T64 [#56434](https://github.com/ClickHouse/ClickHouse/pull/56434) ([Alexey Milovidov](https://github.com/alexey-milovidov)). + diff --git a/docs/changelogs/v23.10.3.5-stable.md b/docs/changelogs/v23.10.3.5-stable.md new file mode 100644 index 00000000000..2357b069cdb --- /dev/null +++ b/docs/changelogs/v23.10.3.5-stable.md @@ -0,0 +1,16 @@ +--- +sidebar_position: 1 +sidebar_label: 2023 +--- + +# 2023 Changelog + +### ClickHouse release v23.10.3.5-stable (b2ba7637a41) FIXME as compared to v23.10.2.13-stable (65d8522bb1d) + +#### Improvement +* Backported in [#56513](https://github.com/ClickHouse/ClickHouse/issues/56513): Allow backup of materialized view with dropped inner table instead of failing the backup. [#56387](https://github.com/ClickHouse/ClickHouse/pull/56387) ([Kseniia Sumarokova](https://github.com/kssenii)). + +#### NO CL CATEGORY + +* Backported in [#56605](https://github.com/ClickHouse/ClickHouse/issues/56605):. [#56598](https://github.com/ClickHouse/ClickHouse/pull/56598) ([Maksim Kita](https://github.com/kitaisreal)). + diff --git a/docs/changelogs/v23.10.4.25-stable.md b/docs/changelogs/v23.10.4.25-stable.md new file mode 100644 index 00000000000..2d7d2a38e04 --- /dev/null +++ b/docs/changelogs/v23.10.4.25-stable.md @@ -0,0 +1,28 @@ +--- +sidebar_position: 1 +sidebar_label: 2023 +--- + +# 2023 Changelog + +### ClickHouse release v23.10.4.25-stable (330fd687d41) FIXME as compared to v23.10.3.5-stable (b2ba7637a41) + +#### Build/Testing/Packaging Improvement +* Backported in [#56633](https://github.com/ClickHouse/ClickHouse/issues/56633): In [#54043](https://github.com/ClickHouse/ClickHouse/issues/54043) the setup plan started to appear in the logs. It should be only in the `runner_get_all_tests.log` only. As well, send the failed infrastructure event to CI db. [#56214](https://github.com/ClickHouse/ClickHouse/pull/56214) ([Mikhail f. Shiryaev](https://github.com/Felixoid)). +* Backported in [#56737](https://github.com/ClickHouse/ClickHouse/issues/56737): Do not fetch changed submodules in the builder container. [#56689](https://github.com/ClickHouse/ClickHouse/pull/56689) ([Mikhail f. Shiryaev](https://github.com/Felixoid)). + +#### Bug Fix (user-visible misbehavior in an official stable release) + +* Select from system tables when table based on table function. [#55540](https://github.com/ClickHouse/ClickHouse/pull/55540) ([MikhailBurdukov](https://github.com/MikhailBurdukov)). +* Fix restore from backup with `flatten_nested` and `data_type_default_nullable` [#56306](https://github.com/ClickHouse/ClickHouse/pull/56306) ([Kseniia Sumarokova](https://github.com/kssenii)). +* Fix segfault during Kerberos initialization [#56401](https://github.com/ClickHouse/ClickHouse/pull/56401) ([Nikolay Degterinsky](https://github.com/evillique)). +* Fix: RabbitMQ OpenSSL dynamic loading issue [#56703](https://github.com/ClickHouse/ClickHouse/pull/56703) ([Igor Nikonov](https://github.com/devcrafter)). +* Fix crash in GCD codec in case when zeros present in data [#56704](https://github.com/ClickHouse/ClickHouse/pull/56704) ([Nikita Mikhaylov](https://github.com/nikitamikhaylov)). +* Fix crash in FPC codec [#56795](https://github.com/ClickHouse/ClickHouse/pull/56795) ([Alexey Milovidov](https://github.com/alexey-milovidov)). + +#### NOT FOR CHANGELOG / INSIGNIFICANT + +* Rewrite jobs to use callable workflow [#56385](https://github.com/ClickHouse/ClickHouse/pull/56385) ([Mikhail f. Shiryaev](https://github.com/Felixoid)). +* Continue rewriting workflows to reusable tests [#56501](https://github.com/ClickHouse/ClickHouse/pull/56501) ([Mikhail f. Shiryaev](https://github.com/Felixoid)). +* Better exception messages [#56854](https://github.com/ClickHouse/ClickHouse/pull/56854) ([Antonio Andelic](https://github.com/antonio2368)). + diff --git a/docs/changelogs/v23.10.5.20-stable.md b/docs/changelogs/v23.10.5.20-stable.md new file mode 100644 index 00000000000..03e8c47481b --- /dev/null +++ b/docs/changelogs/v23.10.5.20-stable.md @@ -0,0 +1,28 @@ +--- +sidebar_position: 1 +sidebar_label: 2023 +--- + +# 2023 Changelog + +### ClickHouse release v23.10.5.20-stable (e84001e5c61) FIXME as compared to v23.10.4.25-stable (330fd687d41) + +#### Improvement +* Backported in [#56924](https://github.com/ClickHouse/ClickHouse/issues/56924): There was a potential vulnerability in previous ClickHouse versions: if a user has connected and unsuccessfully tried to authenticate with the "interserver secret" method, the server didn't terminate the connection immediately but continued to receive and ignore the leftover packets from the client. While these packets are ignored, they are still parsed, and if they use a compression method with another known vulnerability, it will lead to exploitation of it without authentication. This issue was found with [ClickHouse Bug Bounty Program](https://github.com/ClickHouse/ClickHouse/issues/38986) by https://twitter.com/malacupa. [#56794](https://github.com/ClickHouse/ClickHouse/pull/56794) ([Alexey Milovidov](https://github.com/alexey-milovidov)). + +#### Build/Testing/Packaging Improvement +* Backported in [#57023](https://github.com/ClickHouse/ClickHouse/issues/57023): There was an attempt to have the proper listing in [#44311](https://github.com/ClickHouse/ClickHouse/issues/44311), but the fix itself was in the wrong place, so it's still broken. See an [example](https://github.com/ClickHouse/ClickHouse/actions/runs/6897342568/job/18781001022#step:8:25). [#56989](https://github.com/ClickHouse/ClickHouse/pull/56989) ([Mikhail f. Shiryaev](https://github.com/Felixoid)). + +#### Bug Fix (user-visible misbehavior in an official stable release) + +* Fix ON CLUSTER queries without database on initial node [#56484](https://github.com/ClickHouse/ClickHouse/pull/56484) ([Nikolay Degterinsky](https://github.com/evillique)). +* Fix buffer overflow in Gorilla codec [#57107](https://github.com/ClickHouse/ClickHouse/pull/57107) ([Nikolay Degterinsky](https://github.com/evillique)). +* Close interserver connection on any exception before authentication [#57142](https://github.com/ClickHouse/ClickHouse/pull/57142) ([Antonio Andelic](https://github.com/antonio2368)). + +#### NOT FOR CHANGELOG / INSIGNIFICANT + +* Fix client suggestions for user without grants [#56234](https://github.com/ClickHouse/ClickHouse/pull/56234) ([Nikolay Degterinsky](https://github.com/evillique)). +* Fix pygithub [#56778](https://github.com/ClickHouse/ClickHouse/pull/56778) ([Mikhail f. Shiryaev](https://github.com/Felixoid)). +* Avoid dependencies with no fixed versions [#56914](https://github.com/ClickHouse/ClickHouse/pull/56914) ([Alexey Milovidov](https://github.com/alexey-milovidov)). +* Tiny improvement security [#57171](https://github.com/ClickHouse/ClickHouse/pull/57171) ([Mikhail f. Shiryaev](https://github.com/Felixoid)). + diff --git a/docs/changelogs/v23.3.15.29-lts.md b/docs/changelogs/v23.3.15.29-lts.md new file mode 100644 index 00000000000..e5fd5dd45a7 --- /dev/null +++ b/docs/changelogs/v23.3.15.29-lts.md @@ -0,0 +1,31 @@ +--- +sidebar_position: 1 +sidebar_label: 2023 +--- + +# 2023 Changelog + +### ClickHouse release v23.3.15.29-lts (218336662e4) FIXME as compared to v23.3.14.78-lts (c8f4ba52c65) + +#### Build/Testing/Packaging Improvement +* Backported in [#55671](https://github.com/ClickHouse/ClickHouse/issues/55671): If the database is already initialized, it doesn't need to be initialized again upon subsequent launches. This can potentially fix the issue of infinite container restarts when the database fails to load within 1000 attempts (relevant for very large databases and multi-node setups). [#50724](https://github.com/ClickHouse/ClickHouse/pull/50724) ([Alexander Nikolaev](https://github.com/AlexNik)). +* Backported in [#55734](https://github.com/ClickHouse/ClickHouse/issues/55734): Fix integration check python script to use gh api url - Add Readme for CI tests. [#55716](https://github.com/ClickHouse/ClickHouse/pull/55716) ([Max K.](https://github.com/mkaynov)). +* Backported in [#55829](https://github.com/ClickHouse/ClickHouse/issues/55829): Check sha512 for tgz; use a proper repository for keeper; write only filenames to TGZ.sha512 files for tarball packages. Prerequisite for [#31473](https://github.com/ClickHouse/ClickHouse/issues/31473). [#55717](https://github.com/ClickHouse/ClickHouse/pull/55717) ([Mikhail f. Shiryaev](https://github.com/Felixoid)). + +#### Bug Fix (user-visible misbehavior in an official stable release) + +* Fix bug with inability to drop detached partition in replicated merge tree on top of S3 without zero copy [#55309](https://github.com/ClickHouse/ClickHouse/pull/55309) ([alesapin](https://github.com/alesapin)). +* Fix crash in QueryNormalizer with cyclic aliases [#55602](https://github.com/ClickHouse/ClickHouse/pull/55602) ([vdimir](https://github.com/vdimir)). +* Fix window functions in case of sparse columns. [#55895](https://github.com/ClickHouse/ClickHouse/pull/55895) ([János Benjamin Antal](https://github.com/antaljanosbenjamin)). + +#### NO CL ENTRY + +* NO CL ENTRY: 'Pin rust version to fix GLIBC compatibility check'. [#55788](https://github.com/ClickHouse/ClickHouse/pull/55788) ([Mikhail f. Shiryaev](https://github.com/Felixoid)). + +#### NOT FOR CHANGELOG / INSIGNIFICANT + +* Fix incorrect createColumn call on join clause [#48998](https://github.com/ClickHouse/ClickHouse/pull/48998) ([Yi Sheng](https://github.com/ongkong)). +* Use `--filter` to reduce checkout time [#54857](https://github.com/ClickHouse/ClickHouse/pull/54857) ([Mikhail f. Shiryaev](https://github.com/Felixoid)). +* Improve enrich image [#55793](https://github.com/ClickHouse/ClickHouse/pull/55793) ([Mikhail f. Shiryaev](https://github.com/Felixoid)). +* One final leftover in diff_urls of PRInfo [#55874](https://github.com/ClickHouse/ClickHouse/pull/55874) ([Mikhail f. Shiryaev](https://github.com/Felixoid)). + diff --git a/docs/changelogs/v23.3.16.7-lts.md b/docs/changelogs/v23.3.16.7-lts.md new file mode 100644 index 00000000000..7f5aee06e0e --- /dev/null +++ b/docs/changelogs/v23.3.16.7-lts.md @@ -0,0 +1,14 @@ +--- +sidebar_position: 1 +sidebar_label: 2023 +--- + +# 2023 Changelog + +### ClickHouse release v23.3.16.7-lts (fb4125cc92a) FIXME as compared to v23.3.15.29-lts (218336662e4) + +#### Bug Fix (user-visible misbehavior in an official stable release) + +* Fix: avoid using regex match, possibly containing alternation, as a key condition. [#54696](https://github.com/ClickHouse/ClickHouse/pull/54696) ([Yakov Olkhovskiy](https://github.com/yakov-olkhovskiy)). +* Fix buffer overflow in T64 [#56434](https://github.com/ClickHouse/ClickHouse/pull/56434) ([Alexey Milovidov](https://github.com/alexey-milovidov)). + diff --git a/docs/changelogs/v23.3.17.13-lts.md b/docs/changelogs/v23.3.17.13-lts.md new file mode 100644 index 00000000000..a18ced70d46 --- /dev/null +++ b/docs/changelogs/v23.3.17.13-lts.md @@ -0,0 +1,23 @@ +--- +sidebar_position: 1 +sidebar_label: 2023 +--- + +# 2023 Changelog + +### ClickHouse release v23.3.17.13-lts (e867d59020f) FIXME as compared to v23.3.16.7-lts (fb4125cc92a) + +#### Build/Testing/Packaging Improvement +* Backported in [#56731](https://github.com/ClickHouse/ClickHouse/issues/56731): Do not fetch changed submodules in the builder container. [#56689](https://github.com/ClickHouse/ClickHouse/pull/56689) ([Mikhail f. Shiryaev](https://github.com/Felixoid)). + +#### Bug Fix (user-visible misbehavior in an official stable release) + +* Fix segfault during Kerberos initialization [#56401](https://github.com/ClickHouse/ClickHouse/pull/56401) ([Nikolay Degterinsky](https://github.com/evillique)). +* Fix crash in FPC codec [#56795](https://github.com/ClickHouse/ClickHouse/pull/56795) ([Alexey Milovidov](https://github.com/alexey-milovidov)). + +#### NOT FOR CHANGELOG / INSIGNIFICANT + +* Rewrite jobs to use callable workflow [#56385](https://github.com/ClickHouse/ClickHouse/pull/56385) ([Mikhail f. Shiryaev](https://github.com/Felixoid)). +* Continue rewriting workflows to reusable tests [#56501](https://github.com/ClickHouse/ClickHouse/pull/56501) ([Mikhail f. Shiryaev](https://github.com/Felixoid)). +* Better exception messages [#56854](https://github.com/ClickHouse/ClickHouse/pull/56854) ([Antonio Andelic](https://github.com/antonio2368)). + diff --git a/docs/changelogs/v23.3.18.15-lts.md b/docs/changelogs/v23.3.18.15-lts.md new file mode 100644 index 00000000000..3bf993a0960 --- /dev/null +++ b/docs/changelogs/v23.3.18.15-lts.md @@ -0,0 +1,26 @@ +--- +sidebar_position: 1 +sidebar_label: 2023 +--- + +# 2023 Changelog + +### ClickHouse release v23.3.18.15-lts (7228475d77a) FIXME as compared to v23.3.17.13-lts (e867d59020f) + +#### Improvement +* Backported in [#56928](https://github.com/ClickHouse/ClickHouse/issues/56928): There was a potential vulnerability in previous ClickHouse versions: if a user has connected and unsuccessfully tried to authenticate with the "interserver secret" method, the server didn't terminate the connection immediately but continued to receive and ignore the leftover packets from the client. While these packets are ignored, they are still parsed, and if they use a compression method with another known vulnerability, it will lead to exploitation of it without authentication. This issue was found with [ClickHouse Bug Bounty Program](https://github.com/ClickHouse/ClickHouse/issues/38986) by https://twitter.com/malacupa. [#56794](https://github.com/ClickHouse/ClickHouse/pull/56794) ([Alexey Milovidov](https://github.com/alexey-milovidov)). + +#### Build/Testing/Packaging Improvement +* Backported in [#57019](https://github.com/ClickHouse/ClickHouse/issues/57019): There was an attempt to have the proper listing in [#44311](https://github.com/ClickHouse/ClickHouse/issues/44311), but the fix itself was in the wrong place, so it's still broken. See an [example](https://github.com/ClickHouse/ClickHouse/actions/runs/6897342568/job/18781001022#step:8:25). [#56989](https://github.com/ClickHouse/ClickHouse/pull/56989) ([Mikhail f. Shiryaev](https://github.com/Felixoid)). + +#### Bug Fix (user-visible misbehavior in an official stable release) + +* Fix buffer overflow in Gorilla codec [#57107](https://github.com/ClickHouse/ClickHouse/pull/57107) ([Nikolay Degterinsky](https://github.com/evillique)). +* Close interserver connection on any exception before authentication [#57142](https://github.com/ClickHouse/ClickHouse/pull/57142) ([Antonio Andelic](https://github.com/antonio2368)). + +#### NOT FOR CHANGELOG / INSIGNIFICANT + +* Fix pygithub [#56778](https://github.com/ClickHouse/ClickHouse/pull/56778) ([Mikhail f. Shiryaev](https://github.com/Felixoid)). +* Avoid dependencies with no fixed versions [#56914](https://github.com/ClickHouse/ClickHouse/pull/56914) ([Alexey Milovidov](https://github.com/alexey-milovidov)). +* Tiny improvement security [#57171](https://github.com/ClickHouse/ClickHouse/pull/57171) ([Mikhail f. Shiryaev](https://github.com/Felixoid)). + diff --git a/docs/changelogs/v23.7.6.111-stable.md b/docs/changelogs/v23.7.6.111-stable.md new file mode 100644 index 00000000000..e00c55a8cca --- /dev/null +++ b/docs/changelogs/v23.7.6.111-stable.md @@ -0,0 +1,78 @@ +--- +sidebar_position: 1 +sidebar_label: 2023 +--- + +# 2023 Changelog + +### ClickHouse release v23.7.6.111-stable (6b047a47504) FIXME as compared to v23.7.5.30-stable (e86c21fb922) + +#### Improvement +* Backported in [#54285](https://github.com/ClickHouse/ClickHouse/issues/54285): Enable allow_remove_stale_moving_parts by default. [#54260](https://github.com/ClickHouse/ClickHouse/pull/54260) ([vdimir](https://github.com/vdimir)). + +#### Build/Testing/Packaging Improvement +* Backported in [#55291](https://github.com/ClickHouse/ClickHouse/issues/55291): Resource with source code including submodules is built in Darwin special build task. It may be used to build ClickHouse without checkouting submodules. [#51435](https://github.com/ClickHouse/ClickHouse/pull/51435) ([Ilya Yatsishin](https://github.com/qoega)). +* Backported in [#54705](https://github.com/ClickHouse/ClickHouse/issues/54705): Enrich `changed_images.json` with the latest tag from master for images that are not changed in the pull request. [#54369](https://github.com/ClickHouse/ClickHouse/pull/54369) ([János Benjamin Antal](https://github.com/antaljanosbenjamin)). +* Backported in [#54683](https://github.com/ClickHouse/ClickHouse/issues/54683): We build and upload them for every push, which isn't worth it. [#54675](https://github.com/ClickHouse/ClickHouse/pull/54675) ([Mikhail f. Shiryaev](https://github.com/Felixoid)). +* Backported in [#55407](https://github.com/ClickHouse/ClickHouse/issues/55407): Solve issue with launching standalone clickhouse-keeper from clickhouse-server package. [#55226](https://github.com/ClickHouse/ClickHouse/pull/55226) ([Mikhail f. Shiryaev](https://github.com/Felixoid)). +* Backported in [#55723](https://github.com/ClickHouse/ClickHouse/issues/55723): Fix integration check python script to use gh api url - Add Readme for CI tests. [#55716](https://github.com/ClickHouse/ClickHouse/pull/55716) ([Max K.](https://github.com/mkaynov)). + +#### Bug Fix (user-visible misbehavior in an official stable release) + +* Fix recalculation of skip indexes and projections in `ALTER DELETE` queries [#52530](https://github.com/ClickHouse/ClickHouse/pull/52530) ([Anton Popov](https://github.com/CurtizJ)). +* RFC: Fix filtering by virtual columns with OR expression [#52653](https://github.com/ClickHouse/ClickHouse/pull/52653) ([Azat Khuzhin](https://github.com/azat)). +* Fix reading of unnecessary column in case of multistage `PREWHERE` [#52689](https://github.com/ClickHouse/ClickHouse/pull/52689) ([Anton Popov](https://github.com/CurtizJ)). +* Fix sorting of sparse columns with large limit [#52827](https://github.com/ClickHouse/ClickHouse/pull/52827) ([Anton Popov](https://github.com/CurtizJ)). +* Fix reading of empty `Nested(Array(LowCardinality(...)))` [#52949](https://github.com/ClickHouse/ClickHouse/pull/52949) ([Anton Popov](https://github.com/CurtizJ)). +* Fix adding sub-second intervals to DateTime [#53309](https://github.com/ClickHouse/ClickHouse/pull/53309) ([Michael Kolupaev](https://github.com/al13n321)). +* Fix: moved to prewhere condition actions can lose column [#53492](https://github.com/ClickHouse/ClickHouse/pull/53492) ([Yakov Olkhovskiy](https://github.com/yakov-olkhovskiy)). +* Fix crash in join on sparse column [#53548](https://github.com/ClickHouse/ClickHouse/pull/53548) ([vdimir](https://github.com/vdimir)). +* Fix named_collection_admin alias [#54066](https://github.com/ClickHouse/ClickHouse/pull/54066) ([Kseniia Sumarokova](https://github.com/kssenii)). +* Fix rows_before_limit_at_least for DelayedSource. [#54122](https://github.com/ClickHouse/ClickHouse/pull/54122) ([Nikolai Kochetov](https://github.com/KochetovNicolai)). +* Fix: allow IPv6 for bloom filter [#54200](https://github.com/ClickHouse/ClickHouse/pull/54200) ([Yakov Olkhovskiy](https://github.com/yakov-olkhovskiy)). +* Check for overflow before addition in `analysisOfVariance` function [#54385](https://github.com/ClickHouse/ClickHouse/pull/54385) ([Antonio Andelic](https://github.com/antonio2368)). +* reproduce and fix the bug in removeSharedRecursive [#54430](https://github.com/ClickHouse/ClickHouse/pull/54430) ([Sema Checherinda](https://github.com/CheSema)). +* Fix aggregate projections with normalized states [#54480](https://github.com/ClickHouse/ClickHouse/pull/54480) ([Amos Bird](https://github.com/amosbird)). +* Fix possible parsing error in WithNames formats with disabled input_format_with_names_use_header [#54513](https://github.com/ClickHouse/ClickHouse/pull/54513) ([Kruglov Pavel](https://github.com/Avogar)). +* Fix rare case of CHECKSUM_DOESNT_MATCH error [#54549](https://github.com/ClickHouse/ClickHouse/pull/54549) ([alesapin](https://github.com/alesapin)). +* Fix zero copy garbage [#54550](https://github.com/ClickHouse/ClickHouse/pull/54550) ([Alexander Tokmakov](https://github.com/tavplubix)). +* Fix "Invalid number of rows in Chunk" in MaterializedPostgreSQL [#54844](https://github.com/ClickHouse/ClickHouse/pull/54844) ([Kseniia Sumarokova](https://github.com/kssenii)). +* Prevent attaching parts from tables with different projections or indices [#55062](https://github.com/ClickHouse/ClickHouse/pull/55062) ([János Benjamin Antal](https://github.com/antaljanosbenjamin)). +* Fix deadlock in LDAP assigned role update [#55119](https://github.com/ClickHouse/ClickHouse/pull/55119) ([Julian Maicher](https://github.com/jmaicher)). +* Fix storage Iceberg files retrieval [#55144](https://github.com/ClickHouse/ClickHouse/pull/55144) ([Kseniia Sumarokova](https://github.com/kssenii)). +* Fix functions execution over sparse columns [#55275](https://github.com/ClickHouse/ClickHouse/pull/55275) ([Azat Khuzhin](https://github.com/azat)). +* Fix bug with inability to drop detached partition in replicated merge tree on top of S3 without zero copy [#55309](https://github.com/ClickHouse/ClickHouse/pull/55309) ([alesapin](https://github.com/alesapin)). +* Fix trash optimization (up to a certain extent) [#55353](https://github.com/ClickHouse/ClickHouse/pull/55353) ([Alexey Milovidov](https://github.com/alexey-milovidov)). +* Fix parsing of arrays in cast operator [#55417](https://github.com/ClickHouse/ClickHouse/pull/55417) ([Anton Popov](https://github.com/CurtizJ)). +* Fix filtering by virtual columns with OR filter in query [#55418](https://github.com/ClickHouse/ClickHouse/pull/55418) ([Azat Khuzhin](https://github.com/azat)). +* Fix MongoDB connection issues [#55419](https://github.com/ClickHouse/ClickHouse/pull/55419) ([Nikolay Degterinsky](https://github.com/evillique)). +* Destroy fiber in case of exception in cancelBefore in AsyncTaskExecutor [#55516](https://github.com/ClickHouse/ClickHouse/pull/55516) ([Kruglov Pavel](https://github.com/Avogar)). +* Fix crash in QueryNormalizer with cyclic aliases [#55602](https://github.com/ClickHouse/ClickHouse/pull/55602) ([vdimir](https://github.com/vdimir)). +* Fix filtering by virtual columns with OR filter in query (resubmit) [#55678](https://github.com/ClickHouse/ClickHouse/pull/55678) ([Azat Khuzhin](https://github.com/azat)). +* Fix window functions in case of sparse columns. [#55895](https://github.com/ClickHouse/ClickHouse/pull/55895) ([János Benjamin Antal](https://github.com/antaljanosbenjamin)). + +#### NO CL CATEGORY + +* Backported in [#55704](https://github.com/ClickHouse/ClickHouse/issues/55704):. [#55657](https://github.com/ClickHouse/ClickHouse/pull/55657) ([Antonio Andelic](https://github.com/antonio2368)). + +#### NO CL ENTRY + +* NO CL ENTRY: 'Revert "Merge pull request [#52395](https://github.com/ClickHouse/ClickHouse/issues/52395) from azat/rust/reproducible-builds"'. [#55517](https://github.com/ClickHouse/ClickHouse/pull/55517) ([Mikhail f. Shiryaev](https://github.com/Felixoid)). + +#### NOT FOR CHANGELOG / INSIGNIFICANT + +* Test libunwind changes. [#51436](https://github.com/ClickHouse/ClickHouse/pull/51436) ([Nikolai Kochetov](https://github.com/KochetovNicolai)). +* Retry blob listing in test_alter_moving_garbage [#52193](https://github.com/ClickHouse/ClickHouse/pull/52193) ([vdimir](https://github.com/vdimir)). +* add tests with connection reset by peer error, and retry it inside client [#52441](https://github.com/ClickHouse/ClickHouse/pull/52441) ([Sema Checherinda](https://github.com/CheSema)). +* Small fix for HTTPHeaderFilter [#53146](https://github.com/ClickHouse/ClickHouse/pull/53146) ([San](https://github.com/santrancisco)). +* fix Logical Error in AsynchronousBoundedReadBuffer [#53651](https://github.com/ClickHouse/ClickHouse/pull/53651) ([Sema Checherinda](https://github.com/CheSema)). +* Replace dlcdn.apache.org by archive domain [#54081](https://github.com/ClickHouse/ClickHouse/pull/54081) ([Mikhail f. Shiryaev](https://github.com/Felixoid)). +* Fix segfault in system.zookeeper [#54326](https://github.com/ClickHouse/ClickHouse/pull/54326) ([Alexander Tokmakov](https://github.com/tavplubix)). +* Fix CI skip build and skip tests checks [#54532](https://github.com/ClickHouse/ClickHouse/pull/54532) ([SmitaRKulkarni](https://github.com/SmitaRKulkarni)). +* Update WebObjectStorage.cpp [#54695](https://github.com/ClickHouse/ClickHouse/pull/54695) ([Kseniia Sumarokova](https://github.com/kssenii)). +* Use `--filter` to reduce checkout time [#54857](https://github.com/ClickHouse/ClickHouse/pull/54857) ([Mikhail f. Shiryaev](https://github.com/Felixoid)). +* check if block is empty after async insert retries [#55143](https://github.com/ClickHouse/ClickHouse/pull/55143) ([Han Fei](https://github.com/hanfei1991)). +* MaterializedPostgreSQL: remove back check [#55297](https://github.com/ClickHouse/ClickHouse/pull/55297) ([Kseniia Sumarokova](https://github.com/kssenii)). +* Remove existing moving/ dir if allow_remove_stale_moving_parts is off [#55480](https://github.com/ClickHouse/ClickHouse/pull/55480) ([Mike Kot](https://github.com/myrrc)). +* One final leftover in diff_urls of PRInfo [#55874](https://github.com/ClickHouse/ClickHouse/pull/55874) ([Mikhail f. Shiryaev](https://github.com/Felixoid)). + diff --git a/docs/changelogs/v23.8.5.16-lts.md b/docs/changelogs/v23.8.5.16-lts.md new file mode 100644 index 00000000000..4a23b8892be --- /dev/null +++ b/docs/changelogs/v23.8.5.16-lts.md @@ -0,0 +1,24 @@ +--- +sidebar_position: 1 +sidebar_label: 2023 +--- + +# 2023 Changelog + +### ClickHouse release v23.8.5.16-lts (e8a1af5fe2f) FIXME as compared to v23.8.4.69-lts (d4d1e7b9ded) + +#### Build/Testing/Packaging Improvement +* Backported in [#55830](https://github.com/ClickHouse/ClickHouse/issues/55830): Check sha512 for tgz; use a proper repository for keeper; write only filenames to TGZ.sha512 files for tarball packages. Prerequisite for [#31473](https://github.com/ClickHouse/ClickHouse/issues/31473). [#55717](https://github.com/ClickHouse/ClickHouse/pull/55717) ([Mikhail f. Shiryaev](https://github.com/Felixoid)). + +#### Bug Fix (user-visible misbehavior in an official stable release) + +* Fix storage Iceberg files retrieval [#55144](https://github.com/ClickHouse/ClickHouse/pull/55144) ([Kseniia Sumarokova](https://github.com/kssenii)). +* Try to fix possible segfault in Native ORC input format [#55891](https://github.com/ClickHouse/ClickHouse/pull/55891) ([Kruglov Pavel](https://github.com/Avogar)). +* Fix window functions in case of sparse columns. [#55895](https://github.com/ClickHouse/ClickHouse/pull/55895) ([János Benjamin Antal](https://github.com/antaljanosbenjamin)). + +#### NOT FOR CHANGELOG / INSIGNIFICANT + +* Use `--filter` to reduce checkout time [#54857](https://github.com/ClickHouse/ClickHouse/pull/54857) ([Mikhail f. Shiryaev](https://github.com/Felixoid)). +* One final leftover in diff_urls of PRInfo [#55874](https://github.com/ClickHouse/ClickHouse/pull/55874) ([Mikhail f. Shiryaev](https://github.com/Felixoid)). +* Bring relevant commits from backport/23.8/55336 to 23.8 [#56029](https://github.com/ClickHouse/ClickHouse/pull/56029) ([Austin Kothig](https://github.com/kothiga)). + diff --git a/docs/changelogs/v23.8.6.16-lts.md b/docs/changelogs/v23.8.6.16-lts.md new file mode 100644 index 00000000000..6eb752e987c --- /dev/null +++ b/docs/changelogs/v23.8.6.16-lts.md @@ -0,0 +1,21 @@ +--- +sidebar_position: 1 +sidebar_label: 2023 +--- + +# 2023 Changelog + +### ClickHouse release v23.8.6.16-lts (077df679bed) FIXME as compared to v23.8.5.16-lts (e8a1af5fe2f) + +#### Bug Fix (user-visible misbehavior in an official stable release) + +* Fix rare case of CHECKSUM_DOESNT_MATCH error [#54549](https://github.com/ClickHouse/ClickHouse/pull/54549) ([alesapin](https://github.com/alesapin)). +* Fix: avoid using regex match, possibly containing alternation, as a key condition. [#54696](https://github.com/ClickHouse/ClickHouse/pull/54696) ([Yakov Olkhovskiy](https://github.com/yakov-olkhovskiy)). +* Fix a crash during table loading on startup [#56232](https://github.com/ClickHouse/ClickHouse/pull/56232) ([Nikolay Degterinsky](https://github.com/evillique)). +* Fix segfault in signal handler for Keeper [#56266](https://github.com/ClickHouse/ClickHouse/pull/56266) ([Antonio Andelic](https://github.com/antonio2368)). +* Fix buffer overflow in T64 [#56434](https://github.com/ClickHouse/ClickHouse/pull/56434) ([Alexey Milovidov](https://github.com/alexey-milovidov)). + +#### NOT FOR CHANGELOG / INSIGNIFICANT + +* Improve enrich image [#55793](https://github.com/ClickHouse/ClickHouse/pull/55793) ([Mikhail f. Shiryaev](https://github.com/Felixoid)). + diff --git a/docs/changelogs/v23.8.7.24-lts.md b/docs/changelogs/v23.8.7.24-lts.md new file mode 100644 index 00000000000..37862c17315 --- /dev/null +++ b/docs/changelogs/v23.8.7.24-lts.md @@ -0,0 +1,31 @@ +--- +sidebar_position: 1 +sidebar_label: 2023 +--- + +# 2023 Changelog + +### ClickHouse release v23.8.7.24-lts (812b95e14ba) FIXME as compared to v23.8.6.16-lts (077df679bed) + +#### Build/Testing/Packaging Improvement +* Backported in [#56733](https://github.com/ClickHouse/ClickHouse/issues/56733): Do not fetch changed submodules in the builder container. [#56689](https://github.com/ClickHouse/ClickHouse/pull/56689) ([Mikhail f. Shiryaev](https://github.com/Felixoid)). + +#### Bug Fix (user-visible misbehavior in an official stable release) + +* Select from system tables when table based on table function. [#55540](https://github.com/ClickHouse/ClickHouse/pull/55540) ([MikhailBurdukov](https://github.com/MikhailBurdukov)). +* Fix incomplete query result for UNION in view() function. [#56274](https://github.com/ClickHouse/ClickHouse/pull/56274) ([Nikolai Kochetov](https://github.com/KochetovNicolai)). +* Fix crash in case of adding a column with type Object(JSON) [#56307](https://github.com/ClickHouse/ClickHouse/pull/56307) ([Nikita Mikhaylov](https://github.com/nikitamikhaylov)). +* Fix segfault during Kerberos initialization [#56401](https://github.com/ClickHouse/ClickHouse/pull/56401) ([Nikolay Degterinsky](https://github.com/evillique)). +* Fix: RabbitMQ OpenSSL dynamic loading issue [#56703](https://github.com/ClickHouse/ClickHouse/pull/56703) ([Igor Nikonov](https://github.com/devcrafter)). +* Fix crash in FPC codec [#56795](https://github.com/ClickHouse/ClickHouse/pull/56795) ([Alexey Milovidov](https://github.com/alexey-milovidov)). + +#### NO CL CATEGORY + +* Backported in [#56601](https://github.com/ClickHouse/ClickHouse/issues/56601):. [#56598](https://github.com/ClickHouse/ClickHouse/pull/56598) ([Maksim Kita](https://github.com/kitaisreal)). + +#### NOT FOR CHANGELOG / INSIGNIFICANT + +* Rewrite jobs to use callable workflow [#56385](https://github.com/ClickHouse/ClickHouse/pull/56385) ([Mikhail f. Shiryaev](https://github.com/Felixoid)). +* Continue rewriting workflows to reusable tests [#56501](https://github.com/ClickHouse/ClickHouse/pull/56501) ([Mikhail f. Shiryaev](https://github.com/Felixoid)). +* Better exception messages [#56854](https://github.com/ClickHouse/ClickHouse/pull/56854) ([Antonio Andelic](https://github.com/antonio2368)). + diff --git a/docs/changelogs/v23.8.8.20-lts.md b/docs/changelogs/v23.8.8.20-lts.md new file mode 100644 index 00000000000..345cfcccf17 --- /dev/null +++ b/docs/changelogs/v23.8.8.20-lts.md @@ -0,0 +1,28 @@ +--- +sidebar_position: 1 +sidebar_label: 2023 +--- + +# 2023 Changelog + +### ClickHouse release v23.8.8.20-lts (5e012a03bf2) FIXME as compared to v23.8.7.24-lts (812b95e14ba) + +#### Improvement +* Backported in [#56509](https://github.com/ClickHouse/ClickHouse/issues/56509): Allow backup of materialized view with dropped inner table instead of failing the backup. [#56387](https://github.com/ClickHouse/ClickHouse/pull/56387) ([Kseniia Sumarokova](https://github.com/kssenii)). +* Backported in [#56929](https://github.com/ClickHouse/ClickHouse/issues/56929): There was a potential vulnerability in previous ClickHouse versions: if a user has connected and unsuccessfully tried to authenticate with the "interserver secret" method, the server didn't terminate the connection immediately but continued to receive and ignore the leftover packets from the client. While these packets are ignored, they are still parsed, and if they use a compression method with another known vulnerability, it will lead to exploitation of it without authentication. This issue was found with [ClickHouse Bug Bounty Program](https://github.com/ClickHouse/ClickHouse/issues/38986) by https://twitter.com/malacupa. [#56794](https://github.com/ClickHouse/ClickHouse/pull/56794) ([Alexey Milovidov](https://github.com/alexey-milovidov)). + +#### Build/Testing/Packaging Improvement +* Backported in [#57020](https://github.com/ClickHouse/ClickHouse/issues/57020): There was an attempt to have the proper listing in [#44311](https://github.com/ClickHouse/ClickHouse/issues/44311), but the fix itself was in the wrong place, so it's still broken. See an [example](https://github.com/ClickHouse/ClickHouse/actions/runs/6897342568/job/18781001022#step:8:25). [#56989](https://github.com/ClickHouse/ClickHouse/pull/56989) ([Mikhail f. Shiryaev](https://github.com/Felixoid)). + +#### Bug Fix (user-visible misbehavior in an official stable release) + +* Fix ON CLUSTER queries without database on initial node [#56484](https://github.com/ClickHouse/ClickHouse/pull/56484) ([Nikolay Degterinsky](https://github.com/evillique)). +* Fix buffer overflow in Gorilla codec [#57107](https://github.com/ClickHouse/ClickHouse/pull/57107) ([Nikolay Degterinsky](https://github.com/evillique)). +* Close interserver connection on any exception before authentication [#57142](https://github.com/ClickHouse/ClickHouse/pull/57142) ([Antonio Andelic](https://github.com/antonio2368)). + +#### NOT FOR CHANGELOG / INSIGNIFICANT + +* Fix pygithub [#56778](https://github.com/ClickHouse/ClickHouse/pull/56778) ([Mikhail f. Shiryaev](https://github.com/Felixoid)). +* Avoid dependencies with no fixed versions [#56914](https://github.com/ClickHouse/ClickHouse/pull/56914) ([Alexey Milovidov](https://github.com/alexey-milovidov)). +* Tiny improvement security [#57171](https://github.com/ClickHouse/ClickHouse/pull/57171) ([Mikhail f. Shiryaev](https://github.com/Felixoid)). + diff --git a/docs/changelogs/v23.9.3.12-stable.md b/docs/changelogs/v23.9.3.12-stable.md new file mode 100644 index 00000000000..0d2a12fdf92 --- /dev/null +++ b/docs/changelogs/v23.9.3.12-stable.md @@ -0,0 +1,20 @@ +--- +sidebar_position: 1 +sidebar_label: 2023 +--- + +# 2023 Changelog + +### ClickHouse release v23.9.3.12-stable (b7230b06563) FIXME as compared to v23.9.2.56-stable (a1bf3f1de55) + +#### Bug Fix (user-visible misbehavior in an official stable release) + +* Fix storage Iceberg files retrieval [#55144](https://github.com/ClickHouse/ClickHouse/pull/55144) ([Kseniia Sumarokova](https://github.com/kssenii)). +* Try to fix possible segfault in Native ORC input format [#55891](https://github.com/ClickHouse/ClickHouse/pull/55891) ([Kruglov Pavel](https://github.com/Avogar)). +* Fix window functions in case of sparse columns. [#55895](https://github.com/ClickHouse/ClickHouse/pull/55895) ([János Benjamin Antal](https://github.com/antaljanosbenjamin)). + +#### NOT FOR CHANGELOG / INSIGNIFICANT + +* Use `--filter` to reduce checkout time [#54857](https://github.com/ClickHouse/ClickHouse/pull/54857) ([Mikhail f. Shiryaev](https://github.com/Felixoid)). +* One final leftover in diff_urls of PRInfo [#55874](https://github.com/ClickHouse/ClickHouse/pull/55874) ([Mikhail f. Shiryaev](https://github.com/Felixoid)). + diff --git a/docs/changelogs/v23.9.4.11-stable.md b/docs/changelogs/v23.9.4.11-stable.md new file mode 100644 index 00000000000..a5d100ea606 --- /dev/null +++ b/docs/changelogs/v23.9.4.11-stable.md @@ -0,0 +1,17 @@ +--- +sidebar_position: 1 +sidebar_label: 2023 +--- + +# 2023 Changelog + +### ClickHouse release v23.9.4.11-stable (74c1f49dd6a) FIXME as compared to v23.9.3.12-stable (b7230b06563) + +#### Bug Fix (user-visible misbehavior in an official stable release) + +* Fix wrong query result when http_write_exception_in_output_format=1 [#56135](https://github.com/ClickHouse/ClickHouse/pull/56135) ([Kruglov Pavel](https://github.com/Avogar)). +* Fix schema cache for fallback JSON->JSONEachRow with changed settings [#56172](https://github.com/ClickHouse/ClickHouse/pull/56172) ([Kruglov Pavel](https://github.com/Avogar)). +* Fix a crash during table loading on startup [#56232](https://github.com/ClickHouse/ClickHouse/pull/56232) ([Nikolay Degterinsky](https://github.com/evillique)). +* Fix segfault in signal handler for Keeper [#56266](https://github.com/ClickHouse/ClickHouse/pull/56266) ([Antonio Andelic](https://github.com/antonio2368)). +* Fix buffer overflow in T64 [#56434](https://github.com/ClickHouse/ClickHouse/pull/56434) ([Alexey Milovidov](https://github.com/alexey-milovidov)). + diff --git a/docs/changelogs/v23.9.5.29-stable.md b/docs/changelogs/v23.9.5.29-stable.md new file mode 100644 index 00000000000..02572d0e562 --- /dev/null +++ b/docs/changelogs/v23.9.5.29-stable.md @@ -0,0 +1,34 @@ +--- +sidebar_position: 1 +sidebar_label: 2023 +--- + +# 2023 Changelog + +### ClickHouse release v23.9.5.29-stable (f8554c1a1ff) FIXME as compared to v23.9.4.11-stable (74c1f49dd6a) + +#### Build/Testing/Packaging Improvement +* Backported in [#56631](https://github.com/ClickHouse/ClickHouse/issues/56631): In [#54043](https://github.com/ClickHouse/ClickHouse/issues/54043) the setup plan started to appear in the logs. It should be only in the `runner_get_all_tests.log` only. As well, send the failed infrastructure event to CI db. [#56214](https://github.com/ClickHouse/ClickHouse/pull/56214) ([Mikhail f. Shiryaev](https://github.com/Felixoid)). +* Backported in [#56735](https://github.com/ClickHouse/ClickHouse/issues/56735): Do not fetch changed submodules in the builder container. [#56689](https://github.com/ClickHouse/ClickHouse/pull/56689) ([Mikhail f. Shiryaev](https://github.com/Felixoid)). + +#### Bug Fix (user-visible misbehavior in an official stable release) + +* Select from system tables when table based on table function. [#55540](https://github.com/ClickHouse/ClickHouse/pull/55540) ([MikhailBurdukov](https://github.com/MikhailBurdukov)). +* Fix incomplete query result for UNION in view() function. [#56274](https://github.com/ClickHouse/ClickHouse/pull/56274) ([Nikolai Kochetov](https://github.com/KochetovNicolai)). +* Fix crash in case of adding a column with type Object(JSON) [#56307](https://github.com/ClickHouse/ClickHouse/pull/56307) ([Nikita Mikhaylov](https://github.com/nikitamikhaylov)). +* Fix segfault during Kerberos initialization [#56401](https://github.com/ClickHouse/ClickHouse/pull/56401) ([Nikolay Degterinsky](https://github.com/evillique)). +* Fix: RabbitMQ OpenSSL dynamic loading issue [#56703](https://github.com/ClickHouse/ClickHouse/pull/56703) ([Igor Nikonov](https://github.com/devcrafter)). +* Fix crash in GCD codec in case when zeros present in data [#56704](https://github.com/ClickHouse/ClickHouse/pull/56704) ([Nikita Mikhaylov](https://github.com/nikitamikhaylov)). +* Fix crash in FPC codec [#56795](https://github.com/ClickHouse/ClickHouse/pull/56795) ([Alexey Milovidov](https://github.com/alexey-milovidov)). + +#### NO CL CATEGORY + +* Backported in [#56603](https://github.com/ClickHouse/ClickHouse/issues/56603):. [#56598](https://github.com/ClickHouse/ClickHouse/pull/56598) ([Maksim Kita](https://github.com/kitaisreal)). + +#### NOT FOR CHANGELOG / INSIGNIFICANT + +* Improve enrich image [#55793](https://github.com/ClickHouse/ClickHouse/pull/55793) ([Mikhail f. Shiryaev](https://github.com/Felixoid)). +* Rewrite jobs to use callable workflow [#56385](https://github.com/ClickHouse/ClickHouse/pull/56385) ([Mikhail f. Shiryaev](https://github.com/Felixoid)). +* Continue rewriting workflows to reusable tests [#56501](https://github.com/ClickHouse/ClickHouse/pull/56501) ([Mikhail f. Shiryaev](https://github.com/Felixoid)). +* Better exception messages [#56854](https://github.com/ClickHouse/ClickHouse/pull/56854) ([Antonio Andelic](https://github.com/antonio2368)). + diff --git a/docs/changelogs/v23.9.6.20-stable.md b/docs/changelogs/v23.9.6.20-stable.md new file mode 100644 index 00000000000..b4aed625fea --- /dev/null +++ b/docs/changelogs/v23.9.6.20-stable.md @@ -0,0 +1,28 @@ +--- +sidebar_position: 1 +sidebar_label: 2023 +--- + +# 2023 Changelog + +### ClickHouse release v23.9.6.20-stable (cf7e84bb8cf) FIXME as compared to v23.9.5.29-stable (f8554c1a1ff) + +#### Improvement +* Backported in [#56930](https://github.com/ClickHouse/ClickHouse/issues/56930): There was a potential vulnerability in previous ClickHouse versions: if a user has connected and unsuccessfully tried to authenticate with the "interserver secret" method, the server didn't terminate the connection immediately but continued to receive and ignore the leftover packets from the client. While these packets are ignored, they are still parsed, and if they use a compression method with another known vulnerability, it will lead to exploitation of it without authentication. This issue was found with [ClickHouse Bug Bounty Program](https://github.com/ClickHouse/ClickHouse/issues/38986) by https://twitter.com/malacupa. [#56794](https://github.com/ClickHouse/ClickHouse/pull/56794) ([Alexey Milovidov](https://github.com/alexey-milovidov)). + +#### Build/Testing/Packaging Improvement +* Backported in [#57022](https://github.com/ClickHouse/ClickHouse/issues/57022): There was an attempt to have the proper listing in [#44311](https://github.com/ClickHouse/ClickHouse/issues/44311), but the fix itself was in the wrong place, so it's still broken. See an [example](https://github.com/ClickHouse/ClickHouse/actions/runs/6897342568/job/18781001022#step:8:25). [#56989](https://github.com/ClickHouse/ClickHouse/pull/56989) ([Mikhail f. Shiryaev](https://github.com/Felixoid)). + +#### Bug Fix (user-visible misbehavior in an official stable release) + +* Fix ON CLUSTER queries without database on initial node [#56484](https://github.com/ClickHouse/ClickHouse/pull/56484) ([Nikolay Degterinsky](https://github.com/evillique)). +* Fix buffer overflow in Gorilla codec [#57107](https://github.com/ClickHouse/ClickHouse/pull/57107) ([Nikolay Degterinsky](https://github.com/evillique)). +* Close interserver connection on any exception before authentication [#57142](https://github.com/ClickHouse/ClickHouse/pull/57142) ([Antonio Andelic](https://github.com/antonio2368)). + +#### NOT FOR CHANGELOG / INSIGNIFICANT + +* Fix client suggestions for user without grants [#56234](https://github.com/ClickHouse/ClickHouse/pull/56234) ([Nikolay Degterinsky](https://github.com/evillique)). +* Fix pygithub [#56778](https://github.com/ClickHouse/ClickHouse/pull/56778) ([Mikhail f. Shiryaev](https://github.com/Felixoid)). +* Avoid dependencies with no fixed versions [#56914](https://github.com/ClickHouse/ClickHouse/pull/56914) ([Alexey Milovidov](https://github.com/alexey-milovidov)). +* Tiny improvement security [#57171](https://github.com/ClickHouse/ClickHouse/pull/57171) ([Mikhail f. Shiryaev](https://github.com/Felixoid)). + diff --git a/docs/en/development/architecture.md b/docs/en/development/architecture.md index ba81b31b8ef..cfdd2bbcc41 100644 --- a/docs/en/development/architecture.md +++ b/docs/en/development/architecture.md @@ -67,22 +67,30 @@ Implementations of `ReadBuffer`/`WriteBuffer` are used for working with files an Read/WriteBuffers only deal with bytes. There are functions from `ReadHelpers` and `WriteHelpers` header files to help with formatting input/output. For example, there are helpers to write a number in decimal format. -Let’s look at what happens when you want to write a result set in `JSON` format to stdout. You have a result set ready to be fetched from `IBlockInputStream`. You create `WriteBufferFromFileDescriptor(STDOUT_FILENO)` to write bytes to stdout. You create `JSONRowOutputStream`, initialized with that `WriteBuffer`, to write rows in `JSON` to stdout. You create `BlockOutputStreamFromRowOutputStream` on top of it, to represent it as `IBlockOutputStream`. Then you call `copyData` to transfer data from `IBlockInputStream` to `IBlockOutputStream`, and everything works. Internally, `JSONRowOutputStream` will write various JSON delimiters and call the `IDataType::serializeTextJSON` method with a reference to `IColumn` and the row number as arguments. Consequently, `IDataType::serializeTextJSON` will call a method from `WriteHelpers.h`: for example, `writeText` for numeric types and `writeJSONString` for `DataTypeString`. +Let's examine what happens when you want to write a result set in `JSON` format to stdout. +You have a result set ready to be fetched from a pulling `QueryPipeline`. +First, you create a `WriteBufferFromFileDescriptor(STDOUT_FILENO)` to write bytes to stdout. +Next, you connect the result from the query pipeline to `JSONRowOutputFormat`, which is initialized with that `WriteBuffer`, to write rows in `JSON` format to stdout. +This can be done via the `complete` method, which turns a pulling `QueryPipeline` into a completed `QueryPipeline`. +Internally, `JSONRowOutputFormat` will write various JSON delimiters and call the `IDataType::serializeTextJSON` method with a reference to `IColumn` and the row number as arguments. Consequently, `IDataType::serializeTextJSON` will call a method from `WriteHelpers.h`: for example, `writeText` for numeric types and `writeJSONString` for `DataTypeString`. ## Tables {#tables} The `IStorage` interface represents tables. Different implementations of that interface are different table engines. Examples are `StorageMergeTree`, `StorageMemory`, and so on. Instances of these classes are just tables. -The key `IStorage` methods are `read` and `write`. There are also `alter`, `rename`, `drop`, and so on. The `read` method accepts the following arguments: the set of columns to read from a table, the `AST` query to consider, and the desired number of streams to return. It returns one or multiple `IBlockInputStream` objects and information about the stage of data processing that was completed inside a table engine during query execution. +The key methods in `IStorage` are `read` and `write`, along with others such as `alter`, `rename`, and `drop`. The `read` method accepts the following arguments: a set of columns to read from a table, the `AST` query to consider, and the desired number of streams. It returns a `Pipe`. -In most cases, the read method is only responsible for reading the specified columns from a table, not for any further data processing. All further data processing is done by the query interpreter and is outside the responsibility of `IStorage`. +In most cases, the read method is responsible only for reading the specified columns from a table, not for any further data processing. +All subsequent data processing is handled by another part of the pipeline, which falls outside the responsibility of `IStorage`. But there are notable exceptions: - The AST query is passed to the `read` method, and the table engine can use it to derive index usage and to read fewer data from a table. - Sometimes the table engine can process data itself to a specific stage. For example, `StorageDistributed` can send a query to remote servers, ask them to process data to a stage where data from different remote servers can be merged, and return that preprocessed data. The query interpreter then finishes processing the data. -The table’s `read` method can return multiple `IBlockInputStream` objects to allow parallel data processing. These multiple block input streams can read from a table in parallel. Then you can wrap these streams with various transformations (such as expression evaluation or filtering) that can be calculated independently and create a `UnionBlockInputStream` on top of them, to read from multiple streams in parallel. +The table’s `read` method can return a `Pipe` consisting of multiple `Processors`. These `Processors` can read from a table in parallel. +Then, you can connect these processors with various other transformations (such as expression evaluation or filtering), which can be calculated independently. +And then, create a `QueryPipeline` on top of them, and execute it via `PipelineExecutor`. There are also `TableFunction`s. These are functions that return a temporary `IStorage` object to use in the `FROM` clause of a query. @@ -98,9 +106,19 @@ A hand-written recursive descent parser parses a query. For example, `ParserSele ## Interpreters {#interpreters} -Interpreters are responsible for creating the query execution pipeline from an `AST`. There are simple interpreters, such as `InterpreterExistsQuery` and `InterpreterDropQuery`, or the more sophisticated `InterpreterSelectQuery`. The query execution pipeline is a combination of block input or output streams. For example, the result of interpreting the `SELECT` query is the `IBlockInputStream` to read the result set from; the result of the `INSERT` query is the `IBlockOutputStream` to write data for insertion to, and the result of interpreting the `INSERT SELECT` query is the `IBlockInputStream` that returns an empty result set on the first read, but that copies data from `SELECT` to `INSERT` at the same time. +Interpreters are responsible for creating the query execution pipeline from an AST. There are simple interpreters, such as `InterpreterExistsQuery` and `InterpreterDropQuery`, as well as the more sophisticated `InterpreterSelectQuery`. -`InterpreterSelectQuery` uses `ExpressionAnalyzer` and `ExpressionActions` machinery for query analysis and transformations. This is where most rule-based query optimizations are done. `ExpressionAnalyzer` is quite messy and should be rewritten: various query transformations and optimizations should be extracted to separate classes to allow modular transformations of query. +The query execution pipeline is a combination of processors that can consume and produce chunks (sets of columns with specific types). +A processor communicates via ports and can have multiple input ports and multiple output ports. +A more detailed description can be found in [src/Processors/IProcessor.h](https://github.com/ClickHouse/ClickHouse/blob/master/src/Processors/IProcessor.h). + +For example, the result of interpreting the `SELECT` query is a "pulling" `QueryPipeline` which has a special output port to read the result set from. +The result of the `INSERT` query is a "pushing" `QueryPipeline` with an input port to write data for insertion. +And the result of interpreting the `INSERT SELECT` query is a "completed" `QueryPipeline` that has no inputs or outputs but copies data from `SELECT` to `INSERT` simultaneously. + +`InterpreterSelectQuery` uses `ExpressionAnalyzer` and `ExpressionActions` machinery for query analysis and transformations. This is where most rule-based query optimizations are performed. `ExpressionAnalyzer` is quite messy and should be rewritten: various query transformations and optimizations should be extracted into separate classes to allow for modular transformations of the query. + +To address current problems that exist in interpreters, a new `InterpreterSelectQueryAnalyzer` is being developed. It is a new version of `InterpreterSelectQuery` that does not use `ExpressionAnalyzer` and introduces an additional abstraction level between `AST` and `QueryPipeline` called `QueryTree`. It is not production-ready yet, but it can be tested with the `allow_experimental_analyzer` flag. ## Functions {#functions} diff --git a/docs/en/development/build-cross-osx.md b/docs/en/development/build-cross-osx.md index b70fc36e28e..a04d676e92d 100644 --- a/docs/en/development/build-cross-osx.md +++ b/docs/en/development/build-cross-osx.md @@ -11,6 +11,8 @@ This is intended for continuous integration checks that run on Linux servers. If The cross-build for macOS is based on the [Build instructions](../development/build.md), follow them first. +The following sections provide a walk-through for building ClickHouse for `x86_64` macOS. If you’re targeting ARM architecture, simply substitute all occurrences of `x86_64` with `aarch64`. For example, replace `x86_64-apple-darwin` with `aarch64-apple-darwin` throughout the steps. + ## Install Clang-17 Follow the instructions from https://apt.llvm.org/ for your Ubuntu or Debian setup. @@ -30,13 +32,13 @@ export CCTOOLS=$(cd ~/cctools && pwd) mkdir ${CCTOOLS} cd ${CCTOOLS} -git clone https://github.com/tpoechtrager/apple-libtapi.git +git clone --depth=1 https://github.com/tpoechtrager/apple-libtapi.git cd apple-libtapi INSTALLPREFIX=${CCTOOLS} ./build.sh ./install.sh cd .. -git clone https://github.com/tpoechtrager/cctools-port.git +git clone --depth=1 https://github.com/tpoechtrager/cctools-port.git cd cctools-port/cctools ./configure --prefix=$(readlink -f ${CCTOOLS}) --with-libtapi=$(readlink -f ${CCTOOLS}) --target=x86_64-apple-darwin make install @@ -46,7 +48,7 @@ Also, we need to download macOS X SDK into the working tree. ``` bash cd ClickHouse/cmake/toolchain/darwin-x86_64 -curl -L 'https://github.com/phracker/MacOSX-SDKs/releases/download/10.15/MacOSX10.15.sdk.tar.xz' | tar xJ --strip-components=1 +curl -L 'https://github.com/phracker/MacOSX-SDKs/releases/download/11.3/MacOSX11.0.sdk.tar.xz' | tar xJ --strip-components=1 ``` ## Build ClickHouse {#build-clickhouse} diff --git a/docs/en/development/build-cross-riscv.md b/docs/en/development/build-cross-riscv.md index c21353f7f73..9ee5346f258 100644 --- a/docs/en/development/build-cross-riscv.md +++ b/docs/en/development/build-cross-riscv.md @@ -23,7 +23,7 @@ sudo bash -c "$(wget -O - https://apt.llvm.org/llvm.sh)" ``` bash cd ClickHouse mkdir build-riscv64 -CC=clang-16 CXX=clang++-16 cmake . -Bbuild-riscv64 -G Ninja -DCMAKE_TOOLCHAIN_FILE=cmake/linux/toolchain-riscv64.cmake -DGLIBC_COMPATIBILITY=OFF -DENABLE_LDAP=OFF -DOPENSSL_NO_ASM=ON -DENABLE_JEMALLOC=ON -DENABLE_PARQUET=OFF -DENABLE_GRPC=OFF -DENABLE_HDFS=OFF -DENABLE_MYSQL=OFF +CC=clang-17 CXX=clang++-17 cmake . -Bbuild-riscv64 -G Ninja -DCMAKE_TOOLCHAIN_FILE=cmake/linux/toolchain-riscv64.cmake -DGLIBC_COMPATIBILITY=OFF -DENABLE_LDAP=OFF -DOPENSSL_NO_ASM=ON -DENABLE_JEMALLOC=ON -DENABLE_PARQUET=OFF -DENABLE_GRPC=OFF -DENABLE_HDFS=OFF -DENABLE_MYSQL=OFF ninja -C build-riscv64 ``` diff --git a/docs/en/development/developer-instruction.md b/docs/en/development/developer-instruction.md index f51542d538c..9d6a80de904 100644 --- a/docs/en/development/developer-instruction.md +++ b/docs/en/development/developer-instruction.md @@ -23,43 +23,34 @@ Create a fork of ClickHouse repository. To do that please click on the “fork The development process consists of first committing the intended changes into your fork of ClickHouse and then creating a “pull request” for these changes to be accepted into the main repository (ClickHouse/ClickHouse). -To work with git repositories, please install `git`. - -To do that in Ubuntu you would run in the command line terminal: +To work with Git repositories, please install `git`. To do that in Ubuntu you would run in the command line terminal: sudo apt update sudo apt install git -A brief manual on using Git can be found here: https://education.github.com/git-cheat-sheet-education.pdf. -For a detailed manual on Git see https://git-scm.com/book/en/v2. +A brief manual on using Git can be found [here](https://education.github.com/git-cheat-sheet-education.pdf). +For a detailed manual on Git see [here](https://git-scm.com/book/en/v2). ## Cloning a Repository to Your Development Machine {#cloning-a-repository-to-your-development-machine} Next, you need to download the source files onto your working machine. This is called “to clone a repository” because it creates a local copy of the repository on your working machine. -In the command line terminal run: +Run in your terminal: - git clone --shallow-submodules git@github.com:your_github_username/ClickHouse.git + git clone git@github.com:your_github_username/ClickHouse.git # replace placeholder with your GitHub user name cd ClickHouse -Or (if you'd like to use sparse checkout for submodules and avoid checking out unneeded files): +This command will create a directory `ClickHouse/` containing the source code of ClickHouse. If you specify a custom checkout directory (after the URL), it is important that this path does not contain whitespaces as it may lead to problems with the build system. - git clone git@github.com:your_github_username/ClickHouse.git - cd ClickHouse - ./contrib/update-submodules.sh +To make library dependencies available for the build, the ClickHouse repository uses Git submodules, i.e. references to external repositories. These are not checked out by default. To do so, you can either -Note: please, substitute *your_github_username* with what is appropriate! +- run `git clone` with option `--recurse-submodules`, -This command will create a directory `ClickHouse` containing the working copy of the project. +- if `git clone` did not check out submodules, run `git submodule update --init --jobs ` (e.g. ` = 12` to parallelize the checkout) to achieve the same as the previous alternative, or -It is important that the path to the working directory contains no whitespaces as it may lead to problems with running the build system. +- if `git clone` did not check out submodules and you like to use [sparse](https://github.blog/2020-01-17-bring-your-monorepo-down-to-size-with-sparse-checkout/) and [shallow](https://github.blog/2020-12-21-get-up-to-speed-with-partial-clone-and-shallow-clone/) submodule checkout to omit unneeded files and history in submodules to save space (ca. 5 GB instead of ca. 15 GB), run `./contrib/update-submodules.sh`. Not really recommended as it generally makes working with submodules less convenient and slower. -Please note that ClickHouse repository uses `submodules`. That is what the references to additional repositories are called (i.e. external libraries on which the project depends). It means that when cloning the repository you need to specify the `--recursive` flag as in the example above. If the repository has been cloned without submodules, to download them you need to run the following: - - git submodule init - git submodule update - -You can check the status with the command: `git submodule status`. +You can check the Git status with the command: `git submodule status`. If you get the following error message: @@ -83,36 +74,6 @@ You can also add original ClickHouse repo address to your local repository to pu After successfully running this command you will be able to pull updates from the main ClickHouse repo by running `git pull upstream master`. -### Working with Submodules {#working-with-submodules} - -Working with submodules in git could be painful. Next commands will help to manage it: - - # ! each command accepts - # Update remote URLs for submodules. Barely rare case - git submodule sync - # Add new submodules - git submodule init - # Update existing submodules to the current state - git submodule update - # Two last commands could be merged together - git submodule update --init - -The next commands would help you to reset all submodules to the initial state (!WARNING! - any changes inside will be deleted): - - # Synchronizes submodules' remote URL with .gitmodules - git submodule sync - # Update the registered submodules with initialize not yet initialized - git submodule update --init - # Reset all changes done after HEAD - git submodule foreach git reset --hard - # Clean files from .gitignore - git submodule foreach git clean -xfd - # Repeat last 4 commands for all submodule - git submodule foreach git submodule sync - git submodule foreach git submodule update --init - git submodule foreach git submodule foreach git reset --hard - git submodule foreach git submodule foreach git clean -xfd - ## Build System {#build-system} ClickHouse uses CMake and Ninja for building. @@ -219,13 +180,21 @@ You can also run your custom-built ClickHouse binary with the config file from t ## IDE (Integrated Development Environment) {#ide-integrated-development-environment} -If you do not know which IDE to use, we recommend that you use CLion. CLion is commercial software, but it offers 30 days free trial period. It is also free of charge for students. CLion can be used both on Linux and on macOS. +**CLion (recommended)** -KDevelop and QTCreator are other great alternatives of an IDE for developing ClickHouse. KDevelop comes in as a very handy IDE although unstable. If KDevelop crashes after a while upon opening project, you should click “Stop All” button as soon as it has opened the list of project’s files. After doing so KDevelop should be fine to work with. +If you do not know which IDE to use, we recommend that you use [CLion](https://www.jetbrains.com/clion/). CLion is commercial software but it offers a 30 day free trial. It is also free of charge for students. CLion can be used on both Linux and macOS. -As simple code editors, you can use Sublime Text or Visual Studio Code, or Kate (all of which are available on Linux). +A few things to know when using CLion to develop ClickHouse: -Just in case, it is worth mentioning that CLion creates `build` path on its own, it also on its own selects `debug` for build type, for configuration it uses a version of CMake that is defined in CLion and not the one installed by you, and finally, CLion will use `make` to run build tasks instead of `ninja`. This is normal behaviour, just keep that in mind to avoid confusion. +- CLion creates a `build` path on its own and automatically selects `debug` for the build type +- It uses a version of CMake that is defined in CLion and not the one installed by you +- CLion will use `make` to run build tasks instead of `ninja` (this is normal behavior) + +**Other alternatives** + +[KDevelop](https://kdevelop.org/) and [QTCreator](https://www.qt.io/product/development-tools) are other great alternative IDEs for developing ClickHouse. While KDevelop is a great IDE, it is sometimes unstable. If KDevelop crashes when opening a project, you should click the “Stop All” button as soon as it has opened the list of project’s files. After doing so, KDevelop should be fine to work with. + +Other IDEs you can use are [Sublime Text](https://www.sublimetext.com/), [Visual Studio Code](https://code.visualstudio.com/), or [Kate](https://kate-editor.org/) (all of which are available on Linux). If you are using VS Code, we recommend using the [clangd extension](https://marketplace.visualstudio.com/items?itemName=llvm-vs-code-extensions.vscode-clangd) to replace IntelliSense as it is much more performant. ## Writing Code {#writing-code} diff --git a/docs/en/development/style.md b/docs/en/development/style.md index 5b03468623d..0b71a669638 100644 --- a/docs/en/development/style.md +++ b/docs/en/development/style.md @@ -345,7 +345,7 @@ struct ExtractDomain **7.** For abstract classes (interfaces) you can add the `I` prefix. ``` cpp -class IBlockInputStream +class IProcessor ``` **8.** If you use a variable locally, you can use the short name. diff --git a/docs/en/engines/database-engines/materialized-mysql.md b/docs/en/engines/database-engines/materialized-mysql.md index b7e567c7b6c..f32698f84f6 100644 --- a/docs/en/engines/database-engines/materialized-mysql.md +++ b/docs/en/engines/database-engines/materialized-mysql.md @@ -7,7 +7,10 @@ sidebar_position: 70 # [experimental] MaterializedMySQL :::note -This is an experimental feature that should not be used in production. +This database engine is experimental. To use it, set `allow_experimental_database_materialized_mysql` to 1 in your configuration files or by using the `SET` command: +```sql +SET allow_experimental_database_materialized_mysql=1 +``` ::: Creates a ClickHouse database with all the tables existing in MySQL, and all the data in those tables. The ClickHouse server works as MySQL replica. It reads `binlog` and performs DDL and DML queries. diff --git a/docs/en/engines/database-engines/materialized-postgresql.md b/docs/en/engines/database-engines/materialized-postgresql.md index 4e978947e36..3aa6dd01ea3 100644 --- a/docs/en/engines/database-engines/materialized-postgresql.md +++ b/docs/en/engines/database-engines/materialized-postgresql.md @@ -8,7 +8,7 @@ sidebar_position: 60 Creates a ClickHouse database with tables from PostgreSQL database. Firstly, database with engine `MaterializedPostgreSQL` creates a snapshot of PostgreSQL database and loads required tables. Required tables can include any subset of tables from any subset of schemas from specified database. Along with the snapshot database engine acquires LSN and once initial dump of tables is performed - it starts pulling updates from WAL. After database is created, newly added tables to PostgreSQL database are not automatically added to replication. They have to be added manually with `ATTACH TABLE db.table` query. -Replication is implemented with PostgreSQL Logical Replication Protocol, which does not allow to replicate DDL, but allows to know whether replication breaking changes happened (column type changes, adding/removing columns). Such changes are detected and according tables stop receiving updates. In this case you should use `ATTACH`/ `DETACH` queries to reload table completely. If DDL does not break replication (for example, renaming a column) table will still receive updates (insertion is done by position). +Replication is implemented with PostgreSQL Logical Replication Protocol, which does not allow to replicate DDL, but allows to know whether replication breaking changes happened (column type changes, adding/removing columns). Such changes are detected and according tables stop receiving updates. In this case you should use `ATTACH`/ `DETACH PERMANENTLY` queries to reload table completely. If DDL does not break replication (for example, renaming a column) table will still receive updates (insertion is done by position). :::note This database engine is experimental. To use it, set `allow_experimental_database_materialized_postgresql` to 1 in your configuration files or by using the `SET` command: @@ -63,7 +63,7 @@ Before version 22.1, adding a table to replication left a non-removed temporary It is possible to remove specific tables from replication: ``` sql -DETACH TABLE postgres_database.table_to_remove; +DETACH TABLE postgres_database.table_to_remove PERMANENTLY; ``` ## PostgreSQL schema {#schema} diff --git a/docs/en/engines/table-engines/integrations/azureBlobStorage.md b/docs/en/engines/table-engines/integrations/azureBlobStorage.md index 3df08ee2ffb..c6525121667 100644 --- a/docs/en/engines/table-engines/integrations/azureBlobStorage.md +++ b/docs/en/engines/table-engines/integrations/azureBlobStorage.md @@ -47,6 +47,12 @@ SELECT * FROM test_table; └──────┴───────┘ ``` +## Virtual columns {#virtual-columns} + +- `_path` — Path to the file. Type: `LowCardinalty(String)`. +- `_file` — Name of the file. Type: `LowCardinalty(String)`. +- `_size` — Size of the file in bytes. Type: `Nullable(UInt64)`. If the size is unknown, the value is `NULL`. + ## See also [Azure Blob Storage Table Function](/docs/en/sql-reference/table-functions/azureBlobStorage) diff --git a/docs/en/engines/table-engines/integrations/embedded-rocksdb.md b/docs/en/engines/table-engines/integrations/embedded-rocksdb.md index 23ab89e1983..9af857b0835 100644 --- a/docs/en/engines/table-engines/integrations/embedded-rocksdb.md +++ b/docs/en/engines/table-engines/integrations/embedded-rocksdb.md @@ -85,6 +85,10 @@ You can also change any [rocksdb options](https://github.com/facebook/rocksdb/wi ``` +By default trivial approximate count optimization is turned off, which might affect the performance `count()` queries. To enable this +optimization set up `optimize_trivial_approximate_count_query = 1`. Also, this setting affects `system.tables` for EmbeddedRocksDB engine, +turn on the settings to see approximate values for `total_rows` and `total_bytes`. + ## Supported operations {#supported-operations} ### Inserts diff --git a/docs/en/engines/table-engines/integrations/hdfs.md b/docs/en/engines/table-engines/integrations/hdfs.md index c677123a8d0..19221c256f9 100644 --- a/docs/en/engines/table-engines/integrations/hdfs.md +++ b/docs/en/engines/table-engines/integrations/hdfs.md @@ -230,8 +230,9 @@ libhdfs3 support HDFS namenode HA. ## Virtual Columns {#virtual-columns} -- `_path` — Path to the file. -- `_file` — Name of the file. +- `_path` — Path to the file. Type: `LowCardinalty(String)`. +- `_file` — Name of the file. Type: `LowCardinalty(String)`. +- `_size` — Size of the file in bytes. Type: `Nullable(UInt64)`. If the size is unknown, the value is `NULL`. ## Storage Settings {#storage-settings} diff --git a/docs/en/engines/table-engines/integrations/kafka.md b/docs/en/engines/table-engines/integrations/kafka.md index 5d04dce4c51..de1a090d491 100644 --- a/docs/en/engines/table-engines/integrations/kafka.md +++ b/docs/en/engines/table-engines/integrations/kafka.md @@ -28,7 +28,6 @@ SETTINGS kafka_topic_list = 'topic1,topic2,...', kafka_group_name = 'group_name', kafka_format = 'data_format'[,] - [kafka_row_delimiter = 'delimiter_symbol',] [kafka_schema = '',] [kafka_num_consumers = N,] [kafka_max_block_size = 0,] @@ -53,7 +52,6 @@ Required parameters: Optional parameters: -- `kafka_row_delimiter` — Delimiter character, which ends the message. **This setting is deprecated and is no longer used, not left for compatibility reasons.** - `kafka_schema` — Parameter that must be used if the format requires a schema definition. For example, [Cap’n Proto](https://capnproto.org/) requires the path to the schema file and the name of the root `schema.capnp:Message` object. - `kafka_num_consumers` — The number of consumers per table. Specify more consumers if the throughput of one consumer is insufficient. The total number of consumers should not exceed the number of partitions in the topic, since only one consumer can be assigned per partition, and must not be greater than the number of physical cores on the server where ClickHouse is deployed. Default: `1`. - `kafka_max_block_size` — The maximum batch size (in messages) for poll. Default: [max_insert_block_size](../../../operations/settings/settings.md#setting-max_insert_block_size). @@ -64,7 +62,7 @@ Optional parameters: - `kafka_poll_max_batch_size` — Maximum amount of messages to be polled in a single Kafka poll. Default: [max_block_size](../../../operations/settings/settings.md#setting-max_block_size). - `kafka_flush_interval_ms` — Timeout for flushing data from Kafka. Default: [stream_flush_interval_ms](../../../operations/settings/settings.md#stream-flush-interval-ms). - `kafka_thread_per_consumer` — Provide independent thread for each consumer. When enabled, every consumer flush the data independently, in parallel (otherwise — rows from several consumers squashed to form one block). Default: `0`. -- `kafka_handle_error_mode` — How to handle errors for Kafka engine. Possible values: default, stream. +- `kafka_handle_error_mode` — How to handle errors for Kafka engine. Possible values: default (the exception will be thrown if we fail to parse a message), stream (the exception message and raw message will be saved in virtual columns `_error` and `_raw_message`). - `kafka_commit_on_select` — Commit messages when select query is made. Default: `false`. - `kafka_max_rows_per_message` — The maximum number of rows written in one kafka message for row-based formats. Default : `1`. @@ -240,14 +238,21 @@ Example: ## Virtual Columns {#virtual-columns} -- `_topic` — Kafka topic. -- `_key` — Key of the message. -- `_offset` — Offset of the message. -- `_timestamp` — Timestamp of the message. -- `_timestamp_ms` — Timestamp in milliseconds of the message. -- `_partition` — Partition of Kafka topic. -- `_headers.name` — Array of message's headers keys. -- `_headers.value` — Array of message's headers values. +- `_topic` — Kafka topic. Data type: `LowCardinality(String)`. +- `_key` — Key of the message. Data type: `String`. +- `_offset` — Offset of the message. Data type: `UInt64`. +- `_timestamp` — Timestamp of the message Data type: `Nullable(DateTime)`. +- `_timestamp_ms` — Timestamp in milliseconds of the message. Data type: `Nullable(DateTime64(3))`. +- `_partition` — Partition of Kafka topic. Data type: `UInt64`. +- `_headers.name` — Array of message's headers keys. Data type: `Array(String)`. +- `_headers.value` — Array of message's headers values. Data type: `Array(String)`. + +Additional virtual columns when `kafka_handle_error_mode='stream'`: + +- `_raw_message` - Raw message that couldn't be parsed successfully. Data type: `String`. +- `_error` - Exception message happened during failed parsing. Data type: `String`. + +Note: `_raw_message` and `_error` virtual columns are filled only in case of exception during parsing, they are always empty when message was parsed successfully. ## Data formats support {#data-formats-support} diff --git a/docs/en/engines/table-engines/integrations/materialized-postgresql.md b/docs/en/engines/table-engines/integrations/materialized-postgresql.md index 47dae2ed494..4d83ca79d5c 100644 --- a/docs/en/engines/table-engines/integrations/materialized-postgresql.md +++ b/docs/en/engines/table-engines/integrations/materialized-postgresql.md @@ -2,11 +2,20 @@ slug: /en/engines/table-engines/integrations/materialized-postgresql sidebar_position: 130 sidebar_label: MaterializedPostgreSQL -title: MaterializedPostgreSQL --- +# [experimental] MaterializedPostgreSQL + Creates ClickHouse table with an initial data dump of PostgreSQL table and starts replication process, i.e. executes background job to apply new changes as they happen on PostgreSQL table in the remote PostgreSQL database. +:::note +This table engine is experimental. To use it, set `allow_experimental_materialized_postgresql_table` to 1 in your configuration files or by using the `SET` command: +```sql +SET allow_experimental_materialized_postgresql_table=1 +``` +::: + + If more than one table is required, it is highly recommended to use the [MaterializedPostgreSQL](../../../engines/database-engines/materialized-postgresql.md) database engine instead of the table engine and use the `materialized_postgresql_tables_list` setting, which specifies the tables to be replicated (will also be possible to add database `schema`). It will be much better in terms of CPU, fewer connections and fewer replication slots inside the remote PostgreSQL database. ## Creating a Table {#creating-a-table} diff --git a/docs/en/engines/table-engines/integrations/nats.md b/docs/en/engines/table-engines/integrations/nats.md index 570b219e5fa..37a41159fab 100644 --- a/docs/en/engines/table-engines/integrations/nats.md +++ b/docs/en/engines/table-engines/integrations/nats.md @@ -25,7 +25,6 @@ CREATE TABLE [IF NOT EXISTS] [db.]table_name [ON CLUSTER cluster] nats_url = 'host:port', nats_subjects = 'subject1,subject2,...', nats_format = 'data_format'[,] - [nats_row_delimiter = 'delimiter_symbol',] [nats_schema = '',] [nats_num_consumers = N,] [nats_queue_group = 'group_name',] @@ -40,7 +39,8 @@ CREATE TABLE [IF NOT EXISTS] [db.]table_name [ON CLUSTER cluster] [nats_password = 'password',] [nats_token = 'clickhouse',] [nats_startup_connect_tries = '5'] - [nats_max_rows_per_message = 1] + [nats_max_rows_per_message = 1,] + [nats_handle_error_mode = 'default'] ``` Required parameters: @@ -51,7 +51,6 @@ Required parameters: Optional parameters: -- `nats_row_delimiter` – Delimiter character, which ends the message. **This setting is deprecated and is no longer used, not left for compatibility reasons.** - `nats_schema` – Parameter that must be used if the format requires a schema definition. For example, [Cap’n Proto](https://capnproto.org/) requires the path to the schema file and the name of the root `schema.capnp:Message` object. - `nats_num_consumers` – The number of consumers per table. Default: `1`. Specify more consumers if the throughput of one consumer is insufficient. - `nats_queue_group` – Name for queue group of NATS subscribers. Default is the table name. @@ -66,6 +65,7 @@ Optional parameters: - `nats_token` - NATS auth token. - `nats_startup_connect_tries` - Number of connect tries at startup. Default: `5`. - `nats_max_rows_per_message` — The maximum number of rows written in one NATS message for row-based formats. (default : `1`). +- `nats_handle_error_mode` — How to handle errors for RabbitMQ engine. Possible values: default (the exception will be thrown if we fail to parse a message), stream (the exception message and raw message will be saved in virtual columns `_error` and `_raw_message`). SSL connection: @@ -163,7 +163,15 @@ If you want to change the target table by using `ALTER`, we recommend disabling ## Virtual Columns {#virtual-columns} -- `_subject` - NATS message subject. +- `_subject` - NATS message subject. Data type: `String`. + +Additional virtual columns when `kafka_handle_error_mode='stream'`: + +- `_raw_message` - Raw message that couldn't be parsed successfully. Data type: `Nullable(String)`. +- `_error` - Exception message happened during failed parsing. Data type: `Nullable(String)`. + +Note: `_raw_message` and `_error` virtual columns are filled only in case of exception during parsing, they are always `NULL` when message was parsed successfully. + ## Data formats support {#data-formats-support} diff --git a/docs/en/engines/table-engines/integrations/rabbitmq.md b/docs/en/engines/table-engines/integrations/rabbitmq.md index 4f6107764ec..53c6e089a70 100644 --- a/docs/en/engines/table-engines/integrations/rabbitmq.md +++ b/docs/en/engines/table-engines/integrations/rabbitmq.md @@ -28,7 +28,6 @@ CREATE TABLE [IF NOT EXISTS] [db.]table_name [ON CLUSTER cluster] [rabbitmq_exchange_type = 'exchange_type',] [rabbitmq_routing_key_list = 'key1,key2,...',] [rabbitmq_secure = 0,] - [rabbitmq_row_delimiter = 'delimiter_symbol',] [rabbitmq_schema = '',] [rabbitmq_num_consumers = N,] [rabbitmq_num_queues = N,] @@ -45,7 +44,8 @@ CREATE TABLE [IF NOT EXISTS] [db.]table_name [ON CLUSTER cluster] [rabbitmq_username = '',] [rabbitmq_password = '',] [rabbitmq_commit_on_select = false,] - [rabbitmq_max_rows_per_message = 1] + [rabbitmq_max_rows_per_message = 1,] + [rabbitmq_handle_error_mode = 'default'] ``` Required parameters: @@ -58,7 +58,6 @@ Optional parameters: - `rabbitmq_exchange_type` – The type of RabbitMQ exchange: `direct`, `fanout`, `topic`, `headers`, `consistent_hash`. Default: `fanout`. - `rabbitmq_routing_key_list` – A comma-separated list of routing keys. -- `rabbitmq_row_delimiter` – Delimiter character, which ends the message. **This setting is deprecated and is no longer used, not left for compatibility reasons.** - `rabbitmq_schema` – Parameter that must be used if the format requires a schema definition. For example, [Cap’n Proto](https://capnproto.org/) requires the path to the schema file and the name of the root `schema.capnp:Message` object. - `rabbitmq_num_consumers` – The number of consumers per table. Specify more consumers if the throughput of one consumer is insufficient. Default: `1` - `rabbitmq_num_queues` – Total number of queues. Increasing this number can significantly improve performance. Default: `1`. @@ -78,6 +77,7 @@ Optional parameters: - `rabbitmq_max_rows_per_message` — The maximum number of rows written in one RabbitMQ message for row-based formats. Default : `1`. - `rabbitmq_empty_queue_backoff_start` — A start backoff point to reschedule read if the rabbitmq queue is empty. - `rabbitmq_empty_queue_backoff_end` — An end backoff point to reschedule read if the rabbitmq queue is empty. +- `rabbitmq_handle_error_mode` — How to handle errors for RabbitMQ engine. Possible values: default (the exception will be thrown if we fail to parse a message), stream (the exception message and raw message will be saved in virtual columns `_error` and `_raw_message`). @@ -184,12 +184,19 @@ Example: ## Virtual Columns {#virtual-columns} -- `_exchange_name` - RabbitMQ exchange name. -- `_channel_id` - ChannelID, on which consumer, who received the message, was declared. -- `_delivery_tag` - DeliveryTag of the received message. Scoped per channel. -- `_redelivered` - `redelivered` flag of the message. -- `_message_id` - messageID of the received message; non-empty if was set, when message was published. -- `_timestamp` - timestamp of the received message; non-empty if was set, when message was published. +- `_exchange_name` - RabbitMQ exchange name. Data type: `String`. +- `_channel_id` - ChannelID, on which consumer, who received the message, was declared. Data type: `String`. +- `_delivery_tag` - DeliveryTag of the received message. Scoped per channel. Data type: `UInt64`. +- `_redelivered` - `redelivered` flag of the message. Data type: `UInt8`. +- `_message_id` - messageID of the received message; non-empty if was set, when message was published. Data type: `String`. +- `_timestamp` - timestamp of the received message; non-empty if was set, when message was published. Data type: `UInt64`. + +Additional virtual columns when `kafka_handle_error_mode='stream'`: + +- `_raw_message` - Raw message that couldn't be parsed successfully. Data type: `Nullable(String)`. +- `_error` - Exception message happened during failed parsing. Data type: `Nullable(String)`. + +Note: `_raw_message` and `_error` virtual columns are filled only in case of exception during parsing, they are always `NULL` when message was parsed successfully. ## Data formats support {#data-formats-support} diff --git a/docs/en/engines/table-engines/integrations/s3.md b/docs/en/engines/table-engines/integrations/s3.md index 2967a15494c..3144bdd32fa 100644 --- a/docs/en/engines/table-engines/integrations/s3.md +++ b/docs/en/engines/table-engines/integrations/s3.md @@ -142,8 +142,9 @@ Code: 48. DB::Exception: Received from localhost:9000. DB::Exception: Reading fr ## Virtual columns {#virtual-columns} -- `_path` — Path to the file. -- `_file` — Name of the file. +- `_path` — Path to the file. Type: `LowCardinalty(String)`. +- `_file` — Name of the file. Type: `LowCardinalty(String)`. +- `_size` — Size of the file in bytes. Type: `Nullable(UInt64)`. If the size is unknown, the value is `NULL`. For more information about virtual columns see [here](../../../engines/table-engines/index.md#table_engines-virtual_columns). diff --git a/docs/en/engines/table-engines/mergetree-family/custom-partitioning-key.md b/docs/en/engines/table-engines/mergetree-family/custom-partitioning-key.md index 7e564b23676..97d37e476ae 100644 --- a/docs/en/engines/table-engines/mergetree-family/custom-partitioning-key.md +++ b/docs/en/engines/table-engines/mergetree-family/custom-partitioning-key.md @@ -14,7 +14,7 @@ You should never use too granular of partitioning. Don't partition your data by Partitioning is available for the [MergeTree](../../../engines/table-engines/mergetree-family/mergetree.md) family tables (including [replicated](../../../engines/table-engines/mergetree-family/replication.md) tables). [Materialized views](../../../engines/table-engines/special/materializedview.md#materializedview) based on MergeTree tables support partitioning, as well. -A partition is a logical combination of records in a table by a specified criterion. You can set a partition by an arbitrary criterion, such as by month, by day, or by event type. Each partition is stored separately to simplify manipulations of this data. When accessing the data, ClickHouse uses the smallest subset of partitions possible. +A partition is a logical combination of records in a table by a specified criterion. You can set a partition by an arbitrary criterion, such as by month, by day, or by event type. Each partition is stored separately to simplify manipulations of this data. When accessing the data, ClickHouse uses the smallest subset of partitions possible. Partitions improve performance for queries containing a partitioning key because ClickHouse will filter for that partition before selecting the parts and granules within the partition. The partition is specified in the `PARTITION BY expr` clause when [creating a table](../../../engines/table-engines/mergetree-family/mergetree.md#table_engine-mergetree-creating-a-table). The partition key can be any expression from the table columns. For example, to specify partitioning by month, use the expression `toYYYYMM(date_column)`: diff --git a/docs/en/engines/table-engines/mergetree-family/mergetree.md b/docs/en/engines/table-engines/mergetree-family/mergetree.md index b90513acbad..9cbb48ef847 100644 --- a/docs/en/engines/table-engines/mergetree-family/mergetree.md +++ b/docs/en/engines/table-engines/mergetree-family/mergetree.md @@ -6,7 +6,7 @@ sidebar_label: MergeTree # MergeTree -The `MergeTree` engine and other engines of this family (`*MergeTree`) are the most robust ClickHouse table engines. +The `MergeTree` engine and other engines of this family (`*MergeTree`) are the most commonly used and most robust ClickHouse table engines. Engines in the `MergeTree` family are designed for inserting a very large amount of data into a table. The data is quickly written to the table part by part, then rules are applied for merging the parts in the background. This method is much more efficient than continually rewriting the data in storage during insert. @@ -32,6 +32,8 @@ Main features: The [Merge](/docs/en/engines/table-engines/special/merge.md/#merge) engine does not belong to the `*MergeTree` family. ::: +If you need to update rows frequently, we recommend using the [`ReplacingMergeTree`](/docs/en/engines/table-engines/mergetree-family/replacingmergetree.md) table engine. Using `ALTER TABLE my_table UPDATE` to update rows triggers a mutation, which causes parts to be re-written and uses IO/resources. With `ReplacingMergeTree`, you can simply insert the updated rows and the old rows will be replaced according to the table sorting key. + ## Creating a Table {#table_engine-mergetree-creating-a-table} ``` sql @@ -502,8 +504,8 @@ Indexes of type `set` can be utilized by all functions. The other index types ar | Function (operator) / Index | primary key | minmax | ngrambf_v1 | tokenbf_v1 | bloom_filter | inverted | |------------------------------------------------------------------------------------------------------------|-------------|--------|------------|------------|--------------|----------| -| [equals (=, ==)](/docs/en/sql-reference/functions/comparison-functions.md/#function-equals) | ✔ | ✔ | ✔ | ✔ | ✔ | ✔ | -| [notEquals(!=, <>)](/docs/en/sql-reference/functions/comparison-functions.md/#function-notequals) | ✔ | ✔ | ✔ | ✔ | ✔ | ✔ | +| [equals (=, ==)](/docs/en/sql-reference/functions/comparison-functions.md/#equals) | ✔ | ✔ | ✔ | ✔ | ✔ | ✔ | +| [notEquals(!=, <>)](/docs/en/sql-reference/functions/comparison-functions.md/#notequals) | ✔ | ✔ | ✔ | ✔ | ✔ | ✔ | | [like](/docs/en/sql-reference/functions/string-search-functions.md/#function-like) | ✔ | ✔ | ✔ | ✔ | ✗ | ✔ | | [notLike](/docs/en/sql-reference/functions/string-search-functions.md/#function-notlike) | ✔ | ✔ | ✔ | ✔ | ✗ | ✔ | | [startsWith](/docs/en/sql-reference/functions/string-functions.md/#startswith) | ✔ | ✔ | ✔ | ✔ | ✗ | ✔ | @@ -511,10 +513,10 @@ Indexes of type `set` can be utilized by all functions. The other index types ar | [multiSearchAny](/docs/en/sql-reference/functions/string-search-functions.md/#function-multisearchany) | ✗ | ✗ | ✔ | ✗ | ✗ | ✔ | | [in](/docs/en/sql-reference/functions/in-functions#in-functions) | ✔ | ✔ | ✔ | ✔ | ✔ | ✔ | | [notIn](/docs/en/sql-reference/functions/in-functions#in-functions) | ✔ | ✔ | ✔ | ✔ | ✔ | ✔ | -| [less (<)](/docs/en/sql-reference/functions/comparison-functions.md/#function-less) | ✔ | ✔ | ✗ | ✗ | ✗ | ✗ | -| [greater (>)](/docs/en/sql-reference/functions/comparison-functions.md/#function-greater) | ✔ | ✔ | ✗ | ✗ | ✗ | ✗ | -| [lessOrEquals (<=)](/docs/en/sql-reference/functions/comparison-functions.md/#function-lessorequals) | ✔ | ✔ | ✗ | ✗ | ✗ | ✗ | -| [greaterOrEquals (>=)](/docs/en/sql-reference/functions/comparison-functions.md/#function-greaterorequals) | ✔ | ✔ | ✗ | ✗ | ✗ | ✗ | +| [less (<)](/docs/en/sql-reference/functions/comparison-functions.md/#less) | ✔ | ✔ | ✗ | ✗ | ✗ | ✗ | +| [greater (>)](/docs/en/sql-reference/functions/comparison-functions.md/#greater) | ✔ | ✔ | ✗ | ✗ | ✗ | ✗ | +| [lessOrEquals (<=)](/docs/en/sql-reference/functions/comparison-functions.md/#lessorequals) | ✔ | ✔ | ✗ | ✗ | ✗ | ✗ | +| [greaterOrEquals (>=)](/docs/en/sql-reference/functions/comparison-functions.md/#greaterorequals) | ✔ | ✔ | ✗ | ✗ | ✗ | ✗ | | [empty](/docs/en/sql-reference/functions/array-functions#function-empty) | ✔ | ✔ | ✗ | ✗ | ✗ | ✗ | | [notEmpty](/docs/en/sql-reference/functions/array-functions#function-notempty) | ✔ | ✔ | ✗ | ✗ | ✗ | ✗ | | [has](/docs/en/sql-reference/functions/array-functions#function-has) | ✗ | ✗ | ✔ | ✔ | ✔ | ✔ | @@ -866,6 +868,7 @@ Tags: - `prefer_not_to_merge` — Disables merging of data parts on this volume. When this setting is enabled, merging data on this volume is not allowed. This allows controlling how ClickHouse works with slow disks. - `perform_ttl_move_on_insert` — Disables TTL move on data part INSERT. By default (if enabled) if we insert a data part that already expired by the TTL move rule it immediately goes to a volume/disk declared in move rule. This can significantly slowdown insert in case if destination volume/disk is slow (e.g. S3). If disabled then already expired data part is written into a default volume and then right after moved to TTL volume. - `load_balancing` - Policy for disk balancing, `round_robin` or `least_used`. +- `least_used_ttl_ms` - Configure timeout (in milliseconds) for the updating available space on all disks (`0` - update always, `-1` - never update, default is `60000`). Note, if the disk can be used by ClickHouse only and is not subject to a online filesystem resize/shrink you can use `-1`, in all other cases it is not recommended, since eventually it will lead to incorrect space distribution. Configuration examples: diff --git a/docs/en/engines/table-engines/special/distributed.md b/docs/en/engines/table-engines/special/distributed.md index d1a0b13b363..14431c4c43b 100644 --- a/docs/en/engines/table-engines/special/distributed.md +++ b/docs/en/engines/table-engines/special/distributed.md @@ -46,63 +46,68 @@ CREATE TABLE [IF NOT EXISTS] [db.]table_name [ON CLUSTER cluster] AS [db2.]name2 `sharding_key` - (optionally) sharding key +Specifying the `sharding_key` is necessary for the following: + +- For `INSERTs` into a distributed table (as the table engine needs the `sharding_key` to determine how to split the data). However, if `insert_distributed_one_random_shard` setting is enabled, then `INSERTs` do not need the sharding key +- For use with `optimize_skip_unused_shards` as the `sharding_key` is necessary to determine what shards should be queried + #### policy_name -`policy_name` - (optionally) policy name, it will be used to store temporary files for async send +`policy_name` - (optionally) policy name, it will be used to store temporary files for background send **See Also** - - [insert_distributed_sync](../../../operations/settings/settings.md#insert_distributed_sync) setting + - [distributed_foreground_insert](../../../operations/settings/settings.md#distributed_foreground_insert) setting - [MergeTree](../../../engines/table-engines/mergetree-family/mergetree.md#table_engine-mergetree-multiple-volumes) for the examples ### Distributed Settings #### fsync_after_insert -`fsync_after_insert` - do the `fsync` for the file data after asynchronous insert to Distributed. Guarantees that the OS flushed the whole inserted data to a file **on the initiator node** disk. +`fsync_after_insert` - do the `fsync` for the file data after background insert to Distributed. Guarantees that the OS flushed the whole inserted data to a file **on the initiator node** disk. #### fsync_directories -`fsync_directories` - do the `fsync` for directories. Guarantees that the OS refreshed directory metadata after operations related to asynchronous inserts on Distributed table (after insert, after sending the data to shard, etc). +`fsync_directories` - do the `fsync` for directories. Guarantees that the OS refreshed directory metadata after operations related to background inserts on Distributed table (after insert, after sending the data to shard, etc). #### bytes_to_throw_insert -`bytes_to_throw_insert` - if more than this number of compressed bytes will be pending for async INSERT, an exception will be thrown. 0 - do not throw. Default 0. +`bytes_to_throw_insert` - if more than this number of compressed bytes will be pending for background INSERT, an exception will be thrown. 0 - do not throw. Default 0. #### bytes_to_delay_insert -`bytes_to_delay_insert` - if more than this number of compressed bytes will be pending for async INSERT, the query will be delayed. 0 - do not delay. Default 0. +`bytes_to_delay_insert` - if more than this number of compressed bytes will be pending for background INSERT, the query will be delayed. 0 - do not delay. Default 0. #### max_delay_to_insert -`max_delay_to_insert` - max delay of inserting data into Distributed table in seconds, if there are a lot of pending bytes for async send. Default 60. +`max_delay_to_insert` - max delay of inserting data into Distributed table in seconds, if there are a lot of pending bytes for background send. Default 60. -#### monitor_batch_inserts +#### background_insert_batch -`monitor_batch_inserts` - same as [distributed_directory_monitor_batch_inserts](../../../operations/settings/settings.md#distributed_directory_monitor_batch_inserts) +`background_insert_batch` - same as [distributed_background_insert_batch](../../../operations/settings/settings.md#distributed_background_insert_batch) -#### monitor_split_batch_on_failure +#### background_insert_split_batch_on_failure -`monitor_split_batch_on_failure` - same as [distributed_directory_monitor_split_batch_on_failure](../../../operations/settings/settings.md#distributed_directory_monitor_split_batch_on_failure) +`background_insert_split_batch_on_failure` - same as [distributed_background_insert_split_batch_on_failure](../../../operations/settings/settings.md#distributed_background_insert_split_batch_on_failure) -#### monitor_sleep_time_ms +#### background_insert_sleep_time_ms -`monitor_sleep_time_ms` - same as [distributed_directory_monitor_sleep_time_ms](../../../operations/settings/settings.md#distributed_directory_monitor_sleep_time_ms) +`background_insert_sleep_time_ms` - same as [distributed_background_insert_sleep_time_ms](../../../operations/settings/settings.md#distributed_background_insert_sleep_time_ms) -#### monitor_max_sleep_time_ms +#### background_insert_max_sleep_time_ms -`monitor_max_sleep_time_ms` - same as [distributed_directory_monitor_max_sleep_time_ms](../../../operations/settings/settings.md#distributed_directory_monitor_max_sleep_time_ms) +`background_insert_max_sleep_time_ms` - same as [distributed_background_insert_max_sleep_time_ms](../../../operations/settings/settings.md#distributed_background_insert_max_sleep_time_ms) :::note **Durability settings** (`fsync_...`): -- Affect only asynchronous INSERTs (i.e. `insert_distributed_sync=false`) when data first stored on the initiator node disk and later asynchronously send to shards. +- Affect only background INSERTs (i.e. `distributed_foreground_insert=false`) when data first stored on the initiator node disk and later, in background, send to shards. - May significantly decrease the inserts' performance - Affect writing the data stored inside Distributed table folder into the **node which accepted your insert**. If you need to have guarantees of writing data to underlying MergeTree tables - see durability settings (`...fsync...`) in `system.merge_tree_settings` For **Insert limit settings** (`..._insert`) see also: -- [insert_distributed_sync](../../../operations/settings/settings.md#insert_distributed_sync) setting +- [distributed_foreground_insert](../../../operations/settings/settings.md#distributed_foreground_insert) setting - [prefer_localhost_replica](../../../operations/settings/settings.md#settings-prefer-localhost-replica) setting - `bytes_to_throw_insert` handled before `bytes_to_delay_insert`, so you should not set it to the value less then `bytes_to_delay_insert` ::: @@ -232,7 +237,7 @@ You should be concerned about the sharding scheme in the following cases: - Queries are used that require joining data (`IN` or `JOIN`) by a specific key. If data is sharded by this key, you can use local `IN` or `JOIN` instead of `GLOBAL IN` or `GLOBAL JOIN`, which is much more efficient. - A large number of servers is used (hundreds or more) with a large number of small queries, for example, queries for data of individual clients (e.g. websites, advertisers, or partners). In order for the small queries to not affect the entire cluster, it makes sense to locate data for a single client on a single shard. Alternatively, you can set up bi-level sharding: divide the entire cluster into “layers”, where a layer may consist of multiple shards. Data for a single client is located on a single layer, but shards can be added to a layer as necessary, and data is randomly distributed within them. `Distributed` tables are created for each layer, and a single shared distributed table is created for global queries. -Data is written asynchronously. When inserted in the table, the data block is just written to the local file system. The data is sent to the remote servers in the background as soon as possible. The periodicity for sending data is managed by the [distributed_directory_monitor_sleep_time_ms](../../../operations/settings/settings.md#distributed_directory_monitor_sleep_time_ms) and [distributed_directory_monitor_max_sleep_time_ms](../../../operations/settings/settings.md#distributed_directory_monitor_max_sleep_time_ms) settings. The `Distributed` engine sends each file with inserted data separately, but you can enable batch sending of files with the [distributed_directory_monitor_batch_inserts](../../../operations/settings/settings.md#distributed_directory_monitor_batch_inserts) setting. This setting improves cluster performance by better utilizing local server and network resources. You should check whether data is sent successfully by checking the list of files (data waiting to be sent) in the table directory: `/var/lib/clickhouse/data/database/table/`. The number of threads performing background tasks can be set by [background_distributed_schedule_pool_size](../../../operations/settings/settings.md#background_distributed_schedule_pool_size) setting. +Data is written in background. When inserted in the table, the data block is just written to the local file system. The data is sent to the remote servers in the background as soon as possible. The periodicity for sending data is managed by the [distributed_background_insert_sleep_time_ms](../../../operations/settings/settings.md#distributed_background_insert_sleep_time_ms) and [distributed_background_insert_max_sleep_time_ms](../../../operations/settings/settings.md#distributed_background_insert_max_sleep_time_ms) settings. The `Distributed` engine sends each file with inserted data separately, but you can enable batch sending of files with the [distributed_background_insert_batch](../../../operations/settings/settings.md#distributed_background_insert_batch) setting. This setting improves cluster performance by better utilizing local server and network resources. You should check whether data is sent successfully by checking the list of files (data waiting to be sent) in the table directory: `/var/lib/clickhouse/data/database/table/`. The number of threads performing background tasks can be set by [background_distributed_schedule_pool_size](../../../operations/settings/settings.md#background_distributed_schedule_pool_size) setting. If the server ceased to exist or had a rough restart (for example, due to a hardware failure) after an `INSERT` to a `Distributed` table, the inserted data might be lost. If a damaged data part is detected in the table directory, it is transferred to the `broken` subdirectory and no longer used. diff --git a/docs/en/engines/table-engines/special/file.md b/docs/en/engines/table-engines/special/file.md index 27945b30c03..6e3897398a5 100644 --- a/docs/en/engines/table-engines/special/file.md +++ b/docs/en/engines/table-engines/special/file.md @@ -87,12 +87,18 @@ $ echo -e "1,2\n3,4" | clickhouse-local -q "CREATE TABLE table (a Int64, b Int64 - Indices - Replication -## PARTITION BY +## PARTITION BY {#partition-by} `PARTITION BY` — Optional. It is possible to create separate files by partitioning the data on a partition key. In most cases, you don't need a partition key, and if it is needed you generally don't need a partition key more granular than by month. Partitioning does not speed up queries (in contrast to the ORDER BY expression). You should never use too granular partitioning. Don't partition your data by client identifiers or names (instead, make client identifier or name the first column in the ORDER BY expression). For partitioning by month, use the `toYYYYMM(date_column)` expression, where `date_column` is a column with a date of the type [Date](/docs/en/sql-reference/data-types/date.md). The partition names here have the `"YYYYMM"` format. +## Virtual Columns {#virtual-columns} + +- `_path` — Path to the file. Type: `LowCardinalty(String)`. +- `_file` — Name of the file. Type: `LowCardinalty(String)`. +- `_size` — Size of the file in bytes. Type: `Nullable(UInt64)`. If the size is unknown, the value is `NULL`. + ## Settings {#settings} - [engine_file_empty_if_not_exists](/docs/en/operations/settings/settings.md#engine-file-emptyif-not-exists) - allows to select empty data from a file that doesn't exist. Disabled by default. diff --git a/docs/en/engines/table-engines/special/filelog.md b/docs/en/engines/table-engines/special/filelog.md new file mode 100644 index 00000000000..eef9a17444e --- /dev/null +++ b/docs/en/engines/table-engines/special/filelog.md @@ -0,0 +1,105 @@ +--- +slug: /en/engines/table-engines/special/filelog +sidebar_position: 160 +sidebar_label: FileLog +--- + +# FileLog Engine {#filelog-engine} + +This engine allows to process application log files as a stream of records. + +`FileLog` lets you: + +- Subscribe to log files. +- Process new records as they are appended to subscribed log files. + +## Creating a Table {#creating-a-table} + +``` sql +CREATE TABLE [IF NOT EXISTS] [db.]table_name [ON CLUSTER cluster] +( + name1 [type1] [DEFAULT|MATERIALIZED|ALIAS expr1], + name2 [type2] [DEFAULT|MATERIALIZED|ALIAS expr2], + ... +) ENGINE = FileLog('path_to_logs', 'format_name') SETTINGS + [poll_timeout_ms = 0,] + [poll_max_batch_size = 0,] + [max_block_size = 0,] + [max_threads = 0,] + [poll_directory_watch_events_backoff_init = 500,] + [poll_directory_watch_events_backoff_max = 32000,] + [poll_directory_watch_events_backoff_factor = 2,] + [handle_error_mode = 'default'] +``` + +Engine arguments: + +- `path_to_logs` – Path to log files to subscribe. It can be path to a directory with log files or to a single log file. Note that ClickHouse allows only paths inside `user_files` directory. +- `format_name` - Record format. Note that FileLog process each line in a file as a separate record and not all data formats are suitable for it. + +Optional parameters: + +- `poll_timeout_ms` - Timeout for single poll from log file. Default: [stream_poll_timeout_ms](../../../operations/settings/settings.md#stream_poll_timeout_ms). +- `poll_max_batch_size` — Maximum amount of records to be polled in a single poll. Default: [max_block_size](../../../operations/settings/settings.md#setting-max_block_size). +- `max_block_size` — The maximum batch size (in records) for poll. Default: [max_insert_block_size](../../../operations/settings/settings.md#setting-max_insert_block_size). +- `max_threads` - Number of max threads to parse files, default is 0, which means the number will be max(1, physical_cpu_cores / 4). +- `poll_directory_watch_events_backoff_init` - The initial sleep value for watch directory thread. Default: `500`. +- `poll_directory_watch_events_backoff_max` - The max sleep value for watch directory thread. Default: `32000`. +- `poll_directory_watch_events_backoff_factor` - The speed of backoff, exponential by default. Default: `2`. +- `handle_error_mode` — How to handle errors for FileLog engine. Possible values: default (the exception will be thrown if we fail to parse a message), stream (the exception message and raw message will be saved in virtual columns `_error` and `_raw_message`). + +## Description {#description} + +The delivered records are tracked automatically, so each record in a log file is only counted once. + +`SELECT` is not particularly useful for reading records (except for debugging), because each record can be read only once. It is more practical to create real-time threads using [materialized views](../../../sql-reference/statements/create/view.md). To do this: + +1. Use the engine to create a FileLog table and consider it a data stream. +2. Create a table with the desired structure. +3. Create a materialized view that converts data from the engine and puts it into a previously created table. + +When the `MATERIALIZED VIEW` joins the engine, it starts collecting data in the background. This allows you to continually receive records from log files and convert them to the required format using `SELECT`. +One FileLog table can have as many materialized views as you like, they do not read data from the table directly, but receive new records (in blocks), this way you can write to several tables with different detail level (with grouping - aggregation and without). + +Example: + +``` sql + CREATE TABLE logs ( + timestamp UInt64, + level String, + message String + ) ENGINE = FileLog('user_files/my_app/app.log', 'JSONEachRow'); + + CREATE TABLE daily ( + day Date, + level String, + total UInt64 + ) ENGINE = SummingMergeTree(day, (day, level), 8192); + + CREATE MATERIALIZED VIEW consumer TO daily + AS SELECT toDate(toDateTime(timestamp)) AS day, level, count() as total + FROM queue GROUP BY day, level; + + SELECT level, sum(total) FROM daily GROUP BY level; +``` + +To stop receiving streams data or to change the conversion logic, detach the materialized view: + +``` sql + DETACH TABLE consumer; + ATTACH TABLE consumer; +``` + +If you want to change the target table by using `ALTER`, we recommend disabling the material view to avoid discrepancies between the target table and the data from the view. + +## Virtual Columns {#virtual-columns} + +- `_filename` - Name of the log file. Data type: `LowCardinality(String)`. +- `_offset` - Offset in the log file. Data type: `UInt64`. + +Additional virtual columns when `kafka_handle_error_mode='stream'`: + +- `_raw_record` - Raw record that couldn't be parsed successfully. Data type: `Nullable(String)`. +- `_error` - Exception message happened during failed parsing. Data type: `Nullable(String)`. + +Note: `_raw_record` and `_error` virtual columns are filled only in case of exception during parsing, they are always `NULL` when message was parsed successfully. diff --git a/docs/en/engines/table-engines/special/url.md b/docs/en/engines/table-engines/special/url.md index 5a5e1564180..f6183a779ae 100644 --- a/docs/en/engines/table-engines/special/url.md +++ b/docs/en/engines/table-engines/special/url.md @@ -103,6 +103,12 @@ SELECT * FROM url_engine_table For partitioning by month, use the `toYYYYMM(date_column)` expression, where `date_column` is a column with a date of the type [Date](/docs/en/sql-reference/data-types/date.md). The partition names here have the `"YYYYMM"` format. +## Virtual Columns {#virtual-columns} + +- `_path` — Path to the `URL`. Type: `LowCardinalty(String)`. +- `_file` — Resource name of the `URL`. Type: `LowCardinalty(String)`. +- `_size` — Size of the resource in bytes. Type: `Nullable(UInt64)`. If the size is unknown, the value is `NULL`. + ## Storage Settings {#storage-settings} - [engine_url_skip_empty_files](/docs/en/operations/settings/settings.md#engine_url_skip_empty_files) - allows to skip empty files while reading. Disabled by default. diff --git a/docs/en/getting-started/example-datasets/amazon-reviews.md b/docs/en/getting-started/example-datasets/amazon-reviews.md index 75e4549cb78..00dc553782c 100644 --- a/docs/en/getting-started/example-datasets/amazon-reviews.md +++ b/docs/en/getting-started/example-datasets/amazon-reviews.md @@ -5,9 +5,7 @@ sidebar_label: Amazon customer reviews # Amazon customer reviews dataset -[**Amazon Customer Reviews**](https://s3.amazonaws.com/amazon-reviews-pds/readme.html) (a.k.a. Product Reviews) is one of Amazon’s iconic products. In a period of over two decades since the first review in 1995, millions of Amazon customers have contributed over a hundred million reviews to express opinions and describe their experiences regarding products on the Amazon.com website. This makes Amazon Customer Reviews a rich source of information for academic researchers in the fields of Natural Language Processing (NLP), Information Retrieval (IR), and Machine Learning (ML), amongst others. By accessing the dataset, you agree to the [license terms](https://s3.amazonaws.com/amazon-reviews-pds/license.txt). - -The data is in a tab-separated format in gzipped files are up in AWS S3. Let's walk through the steps to insert it into ClickHouse. +This dataset contains over 150M customer reviews of Amazon products. The data is in snappy-compressed Parquet files in AWS S3 that total 49GB in size (compressed). Let's walk through the steps to insert it into ClickHouse. :::note The queries below were executed on a **Production** instance of [ClickHouse Cloud](https://clickhouse.cloud). @@ -18,49 +16,28 @@ The queries below were executed on a **Production** instance of [ClickHouse Clou ```sql SELECT * -FROM s3('https://s3.amazonaws.com/amazon-reviews-pds/tsv/amazon_reviews_us_Wireless_v1_00.tsv.gz', - 'TabSeparatedWithNames', - 'marketplace String, - customer_id Int64, - review_id String, - product_id String, - product_parent Int64, - product_title String, - product_category String, - star_rating Int64, - helpful_votes Int64, - total_votes Int64, - vine Bool, - verified_purchase Bool, - review_headline String, - review_body String, - review_date Date' -) -LIMIT 10; +FROM s3('https://datasets-documentation.s3.eu-west-3.amazonaws.com/amazon_reviews/amazon_reviews_2015.snappy.parquet') +LIMIT 10 ``` The rows look like: ```response -┌─marketplace─┬─customer_id─┬─review_id──────┬─product_id─┬─product_parent─┬─product_title──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────┬─product_category─┬─star_rating─┬─helpful_votes─┬─total_votes─┬─vine──┬─verified_purchase─┬─review_headline───────────┬─review_body────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────┬─review_date─┐ -│ US │ 16414143 │ R3W4P9UBGNGH1U │ B00YL0EKWE │ 852431543 │ LG G4 Case Hard Transparent Slim Clear Cover for LG G4 │ Wireless │ 2 │ 1 │ 3 │ false │ true │ Looks good, functions meh │ 2 issues - Once I turned on the circle apps and installed this case, my battery drained twice as fast as usual. I ended up turning off the circle apps, which kind of makes the case just a case... with a hole in it. Second, the wireless charging doesn't work. I have a Motorola 360 watch and a Qi charging pad. The watch charges fine but this case doesn't. But hey, it looks nice. │ 2015-08-31 │ -│ US │ 50800750 │ R15V54KBMTQWAY │ B00XK95RPQ │ 516894650 │ Selfie Stick Fiblastiq™ Extendable Wireless Bluetooth Selfie Stick with built-in Bluetooth Adjustable Phone Holder │ Wireless │ 4 │ 0 │ 0 │ false │ false │ A fun little gadget │ I’m embarrassed to admit that until recently, I have had a very negative opinion about “selfie sticks” aka “monopods” aka “narcissticks.” But having reviewed a number of them recently, they’re growing on me. This one is pretty nice and simple to set up and with easy instructions illustrated on the back of the box (not sure why some reviewers have stated that there are no instructions when they are clearly printed on the box unless they received different packaging than I did). Once assembled, the pairing via bluetooth and use of the stick are easy and intuitive. Nothing to it.

The stick comes with a USB charging cable but arrived with a charge so you can use it immediately, though it’s probably a good idea to charge it right away so that you have no interruption of use out of the box. Make sure the stick is switched to on (it will light up) and extend your stick to the length you desire up to about a yard’s length and snap away.

The phone clamp held the phone sturdily so I wasn’t worried about it slipping out. But the longer you extend the stick, the harder it is to maneuver. But that will happen with any stick and is not specific to this one in particular.

Two things that could improve this: 1) add the option to clamp this in portrait orientation instead of having to try and hold the stick at the portrait angle, which makes it feel unstable; 2) add the opening for a tripod so that this can be used to sit upright on a table for skyping and facetime eliminating the need to hold the phone up with your hand, causing fatigue.

But other than that, this is a nice quality monopod for a variety of picture taking opportunities.

I received a sample in exchange for my honest opinion. │ 2015-08-31 │ -│ US │ 15184378 │ RY8I449HNXSVF │ B00SXRXUKO │ 984297154 │ Tribe AB40 Water Resistant Sports Armband with Key Holder for 4.7-Inch iPhone 6S/6/5/5S/5C, Galaxy S4 + Screen Protector - Dark Pink │ Wireless │ 5 │ 0 │ 0 │ false │ true │ Five Stars │ Fits iPhone 6 well │ 2015-08-31 │ -│ US │ 10203548 │ R18TLJYCKJFLSR │ B009V5X1CE │ 279912704 │ RAVPower® Element 10400mAh External Battery USB Portable Charger (Dual USB Outputs, Ultra Compact Design), Travel Charger for iPhone 6,iPhone 6 plus,iPhone 5, 5S, 5C, 4S, 4, iPad Air, 4, 3, 2, Mini 2 (Apple adapters not included); Samsung Galaxy S5, S4, S3, S2, Note 3, Note 2; HTC One, EVO, Thunderbolt, Incredible, Droid DNA, Motorola ATRIX, Droid, Moto X, Google Glass, Nexus 4, Nexus 5, Nexus 7, │ Wireless │ 5 │ 0 │ 0 │ false │ true │ Great charger │ Great charger. I easily get 3+ charges on a Samsung Galaxy 3. Works perfectly for camping trips or long days on the boat. │ 2015-08-31 │ -│ US │ 488280 │ R1NK26SWS53B8Q │ B00D93OVF0 │ 662791300 │ Fosmon Micro USB Value Pack Bundle for Samsung Galaxy Exhilarate - Includes Home / Travel Charger, Car / Vehicle Charger and USB Cable │ Wireless │ 5 │ 0 │ 0 │ false │ true │ Five Stars │ Great for the price :-) │ 2015-08-31 │ -│ US │ 13334021 │ R11LOHEDYJALTN │ B00XVGJMDQ │ 421688488 │ iPhone 6 Case, Vofolen Impact Resistant Protective Shell iPhone 6S Wallet Cover Shockproof Rubber Bumper Case Anti-scratches Hard Cover Skin Card Slot Holder for iPhone 6 6S │ Wireless │ 5 │ 0 │ 0 │ false │ true │ Five Stars │ Great Case, better customer service! │ 2015-08-31 │ -│ US │ 27520697 │ R3ALQVQB2P9LA7 │ B00KQW1X1C │ 554285554 │ Nokia Lumia 630 RM-978 White Factory Unlocked - International Version No Warranty │ Wireless │ 4 │ 0 │ 0 │ false │ true │ Four Stars │ Easy to set up and use. Great functions for the price │ 2015-08-31 │ -│ US │ 48086021 │ R3MWLXLNO21PDQ │ B00IP1MQNK │ 488006702 │ Lumsing 10400mah external battery │ Wireless │ 5 │ 0 │ 0 │ false │ true │ Five Stars │ Works great │ 2015-08-31 │ -│ US │ 12738196 │ R2L15IS24CX0LI │ B00HVORET8 │ 389677711 │ iPhone 5S Battery Case - iPhone 5 Battery Case , Maxboost Atomic S [MFI Certified] External Protective Battery Charging Case Power Bank Charger All Versions of Apple iPhone 5/5S [Juice Battery Pack] │ Wireless │ 5 │ 0 │ 0 │ false │ true │ So far so good │ So far so good. It is essentially identical to the one it replaced from another company. That one stopped working after 7 months so I am a bit apprehensive about this one. │ 2015-08-31 │ -│ US │ 15867807 │ R1DJ8976WPWVZU │ B00HX3G6J6 │ 299654876 │ HTC One M8 Screen Protector, Skinomi TechSkin Full Coverage Screen Protector for HTC One M8 Clear HD Anti-Bubble Film │ Wireless │ 3 │ 0 │ 0 │ false │ true │ seems durable but these are always harder to get on ... │ seems durable but these are always harder to get on right than people make them out to be. also send to curl up at the edges after a while. with today's smartphones, you hardly need screen protectors anyway. │ 2015-08-31 │ -└─────────────┴─────────────┴────────────────┴────────────┴────────────────┴───────────────────────────────────────────────────────────────────────────────────────────────────────────────────────┴──────────────────┴─────────────┴───────────────┴─────────────┴───────┴───────────────────┴─────────────────────────────────────────────────────────┴─────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────┴─────────────┘ +┌─review_date─┬─marketplace─┬─customer_id─┬─review_id──────┬─product_id─┬─product_parent─┬─product_title────────────────────────────────────────────────┬─product_category───────┬─star_rating─┬─helpful_votes─┬─total_votes─┬─vine──┬─verified_purchase─┬─review_headline─────────────────────────────────────────────────────────────┬─review_body────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────┐ +│ 16452 │ US │ 21080196 │ R17NMVYCQXEEFW │ B00RSI5DJA │ 904397429 │ Pilot │ Digital_Video_Download │ 5 │ 0 │ 0 │ false │ false │ yes indeed │ OMG- i totally see myself get hook on that show if it happen- love it │ +│ 16452 │ US │ 44158214 │ R3MAPJVO9D0ERG │ B00RSI61PU │ 475013967 │ Salem Rogers: Model of the Year 1998 │ Digital_Video_Download │ 5 │ 0 │ 0 │ false │ false │ Halarious show!! │ Loved this pilot episode!! Please pick this up Amazon!!! │ +│ 16452 │ US │ 1944630 │ R1Q5YPRE84OVB6 │ B009IU6BIS │ 101502671 │ National Lampoon's Christmas Vacation │ Digital_Video_Download │ 5 │ 0 │ 0 │ false │ false │ Classic! │ This is a holiday classic. How can you not love it! │ +│ 16452 │ US │ 49029010 │ RGDK35TBJJ2ZI │ B00RSI68V2 │ 639602030 │ Table 58 │ Digital_Video_Download │ 5 │ 2 │ 3 │ false │ false │ Fun for the whole family!! │ This show is fun! Our family really enjoyed watching the show. I can see this being one of the shows that we watch on Friday nights with our pizza and ice cream. I hope to see more of the show and the great cast of characters. │ +│ 16452 │ US │ 52257958 │ R1R2SEOJT8M14Y │ B00RSGIMUE │ 196368495 │ Niko and the Sword of Light │ Digital_Video_Download │ 5 │ 1 │ 2 │ false │ false │ it's a new kind of avatar. great show. make more. │ My 7 year old son and my husband both loved it! It's like avatar the last air bender but with different magical powers. The characters are adorably well developed. The story is interesting. We hope amazon makes the whole season. We can't wait to see more! │ +│ 16452 │ US │ 26927978 │ RN0JCPQ6Z4FUB │ B009ITL7UG │ 497741324 │ Lord of the Rings: The Return of the King (Extended Edition) │ Digital_Video_Download │ 5 │ 0 │ 0 │ false │ true │ Definite must-own for any Tolkien buff who has not yet upgraded to Blu-Ray! │ If you liked the theatrical release and are a fan of Middle-Earth then you should get this. │ +│ 16452 │ US │ 19626887 │ R15LDVOU1S1DFB │ B00RSGHGB0 │ 576999592 │ Just Add Magic - Season 1 │ Digital_Video_Download │ 5 │ 1 │ 1 │ false │ false │ Great story! So good even my teenage boys said ... │ Great story! So good even my teenage boys said this is actually pretty good!!! Can't wait for the next episode. │ +│ 16452 │ US │ 1439383 │ R2DJVLZM1MVFQH │ B002WEQJ3E │ 733651019 │ Six: The Mark Unleashed │ Digital_Video_Download │ 1 │ 0 │ 4 │ false │ false │ I am now less intelligent for having watched an entire 10 minutes of it │ I am now less intelligent for having watched an entire 10 minutes of it. God save my sole as I now must kick out the chair from which I am standing on so that the noose may do its job. Watch the movie at your own risk. The screen will suck your brain cells out of your body. │ +│ 16452 │ US │ 46233181 │ R33W2NB9MCRUFV │ B00RSGFYQE │ 464741068 │ Point of Honor │ Digital_Video_Download │ 4 │ 0 │ 0 │ false │ false │ Give it a chance. │ Pilots are just what they are...pilots. A chance to see what works and what doesn't and a chance to smooth out the wrinkles. Point of Honor at least stands a fair chance. │ +│ 16452 │ US │ 19537300 │ R2WGJYESHID0ZF │ B00RSGHQJM │ 374287214 │ Down Dog │ Digital_Video_Download │ 5 │ 1 │ 1 │ false │ false │ Five Stars │ great fun │ +└─────────────┴─────────────┴─────────────┴────────────────┴────────────┴────────────────┴──────────────────────────────────────────────────────────────┴────────────────────────┴─────────────┴───────────────┴─────────────┴───────┴───────────────────┴─────────────────────────────────────────────────────────────────────────────┴────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────┘ ``` -:::note -Normally you would not need to pass in the schema into the `s3` table function - ClickHouse can infer the names and data types of the columns. However, this particular dataset uses a non-standard tab-separated format, but the `s3` function seems to work fine with this non-standard format if you include the schema. -::: - -2. Let's define a new table named `amazon_reviews`. We'll optimize some of the column data types - and choose a primary key (the `ORDER BY` clause): +2. Let's define a new `MergeTree` table named `amazon_reviews` to store this data in ClickHouse: ```sql CREATE TABLE amazon_reviews @@ -82,58 +59,38 @@ CREATE TABLE amazon_reviews review_body String ) ENGINE = MergeTree -ORDER BY (marketplace, review_date, product_category); +ORDER BY (review_date, product_category); ``` -3. We are now ready to insert the data into ClickHouse. Before we do, check out the [list of files in the dataset](https://s3.amazonaws.com/amazon-reviews-pds/tsv/index.txt) and decide which ones you want to include. - -4. We will insert all of the US reviews - which is about 151M rows. The following `INSERT` command uses the `s3Cluster` table function, which allows the processing of multiple S3 files in parallel using all the nodes of your cluster. We also use a wildcard to insert any file that starts with the name `https://s3.amazonaws.com/amazon-reviews-pds/tsv/amazon_reviews_us_`: +3. The following `INSERT` command uses the `s3Cluster` table function, which allows the processing of multiple S3 files in parallel using all the nodes of your cluster. We also use a wildcard to insert any file that starts with the name `https://datasets-documentation.s3.eu-west-3.amazonaws.com/amazon_reviews/amazon_reviews_*.snappy.parquet`: ```sql INSERT INTO amazon_reviews -WITH - transform(vine, ['Y','N'],[true, false]) AS vine, - transform(verified_purchase, ['Y','N'],[true, false]) AS verified_purchase SELECT * FROM s3Cluster( 'default', - 'https://s3.amazonaws.com/amazon-reviews-pds/tsv/amazon_reviews_us_*.tsv.gz', - 'TSVWithNames', - 'review_date Date, - marketplace LowCardinality(String), - customer_id UInt64, - review_id String, - product_id String, - product_parent UInt64, - product_title String, - product_category LowCardinality(String), - star_rating UInt8, - helpful_votes UInt32, - total_votes UInt32, - vine FixedString(1), - verified_purchase FixedString(1), - review_headline String, - review_body String' - ) -SETTINGS input_format_allow_errors_num = 1000000; + 'https://datasets-documentation.s3.eu-west-3.amazonaws.com/amazon_reviews/amazon_reviews_*.snappy.parquet' + ); ``` :::tip -In ClickHouse Cloud, there is a cluster named `default`. Change `default` to the name of your cluster...or use the `s3` table function (instead of `s3Cluster`) if you do not have a cluster. +In ClickHouse Cloud, the name of the cluster is `default`. Change `default` to the name of your cluster...or use the `s3` table function (instead of `s3Cluster`) if you do not have a cluster. ::: -5. That query doesn't take long - within 5 minutes or so you should see all the rows inserted: +5. That query doesn't take long - averaging about 300,000 rows per second. within 5 minutes or so you should see all the rows inserted: ```sql SELECT formatReadableQuantity(count()) -FROM amazon_reviews +FROM amazon_reviews; ``` ```response ┌─formatReadableQuantity(count())─┐ │ 150.96 million │ └─────────────────────────────────┘ + +1 row in set. Elapsed: 0.005 sec. ``` 6. Let's see how much space our data is using: @@ -155,11 +112,11 @@ The original data was about 70G, but compressed in ClickHouse it takes up about ```response ┌─disk_name─┬─compressed─┬─uncompressed─┬─compr_rate─┬──────rows─┬─part_count─┐ -│ s3disk │ 30.00 GiB │ 70.61 GiB │ 2.35 │ 150957260 │ 9 │ +│ s3disk │ 30.05 GiB │ 70.47 GiB │ 2.35 │ 150957260 │ 14 │ └───────────┴────────────┴──────────────┴────────────┴───────────┴────────────┘ ``` -7. Let's run some queries...here are the top 10 most-helpful reviews on Amazon: +7. Let's run some queries...here are the top 10 most-helpful reviews in the dataset: ```sql SELECT @@ -170,7 +127,7 @@ ORDER BY helpful_votes DESC LIMIT 10; ``` -Notice the query has to process all 151M rows, and it takes about 17 seconds: +Notice the query has to process all 151M rows, but takes less than one second! ```response ┌─product_title────────────────────────────────────────────────────────────────────────────┬─review_headline───────────────────────────────────────────────────────┐ @@ -186,7 +143,7 @@ Notice the query has to process all 151M rows, and it takes about 17 seconds: │ Tuscan Dairy Whole Vitamin D Milk, Gallon, 128 oz │ Make this your only stock and store │ └──────────────────────────────────────────────────────────────────────────────────────────┴───────────────────────────────────────────────────────────────────────┘ -10 rows in set. Elapsed: 17.595 sec. Processed 150.96 million rows, 15.36 GB (8.58 million rows/s., 872.89 MB/s.) +10 rows in set. Elapsed: 0.897 sec. Processed 150.96 million rows, 15.36 GB (168.36 million rows/s., 17.13 GB/s.) ``` 8. Here are the top 10 products in Amazon with the most reviews: @@ -215,7 +172,7 @@ LIMIT 10; │ Crossy Road │ 28111 │ └───────────────────────────────────────────────┴─────────┘ -10 rows in set. Elapsed: 16.684 sec. Processed 195.05 million rows, 20.86 GB (11.69 million rows/s., 1.25 GB/s.) +10 rows in set. Elapsed: 20.059 sec. Processed 150.96 million rows, 12.78 GB (7.53 million rows/s., 637.25 MB/s.) ``` 9. Here are the average review ratings per month for each product (an actual [Amazon job interview question](https://datalemur.com/questions/sql-avg-review-ratings)!): @@ -261,7 +218,8 @@ It calculates all the monthly averages for each product, but we only returned 20 │ 2015-08-01 │ The Birds of West Africa (Collins Field Guides) │ 4 │ └────────────┴─────────────────────────────────────────────────────────────────────────────────────────┴───────────┘ -20 rows in set. Elapsed: 52.827 sec. Processed 251.46 million rows, 35.26 GB (4.76 million rows/s., 667.55 MB/s.) +20 rows in set. Elapsed: 43.055 sec. Processed 150.96 million rows, 13.24 GB (3.51 million rows/s., 307.41 MB/s.) +Peak memory usage: 41.73 GiB. ``` 10. Here are the total number of votes per product category. This query is fast because `product_category` is in the primary key: @@ -272,7 +230,8 @@ SELECT product_category FROM amazon_reviews GROUP BY product_category -ORDER BY 1 DESC; +ORDER BY 1 DESC +FORMAT PrettyCompactMonoBlock; ``` ```response @@ -322,7 +281,7 @@ ORDER BY 1 DESC; │ 72970 │ Gift Card │ └──────────────────┴──────────────────────────┘ -43 rows in set. Elapsed: 0.423 sec. Processed 150.96 million rows, 756.20 MB (356.70 million rows/s., 1.79 GB/s.) +43 rows in set. Elapsed: 0.201 sec. Processed 150.96 million rows, 754.79 MB (750.85 million rows/s., 3.75 GB/s.) ``` 11. Let's find the products with the word **"awful"** occurring most frequently in the review. This is a big task - over 151M strings have to be parsed looking for a single word: @@ -340,7 +299,7 @@ ORDER BY count DESC LIMIT 50; ``` -The query takes a couple of minutes, but the results are a fun read: +The query only takes 4 seconds - which is impressive - and the results are a fun read: ```response @@ -363,41 +322,42 @@ The query takes a couple of minutes, but the results are a fun read: │ B00N28818A │ Amazon Prime Video │ 1.4305555555555556 │ 72 │ │ B007FTE2VW │ SimCity - Limited Edition │ 1.2794117647058822 │ 68 │ │ 0439023513 │ Mockingjay (The Hunger Games) │ 2.6417910447761193 │ 67 │ -│ B00178630A │ Diablo III - PC/Mac │ 1.671875 │ 64 │ │ B000OCEWGW │ Liquid Ass │ 4.8125 │ 64 │ +│ B00178630A │ Diablo III - PC/Mac │ 1.671875 │ 64 │ │ B005ZOBNOI │ The Fault in Our Stars │ 4.316666666666666 │ 60 │ │ B00L9B7IKE │ The Girl on the Train: A Novel │ 2.0677966101694913 │ 59 │ -│ B007S6Y6VS │ Garden of Life Raw Organic Meal │ 2.8793103448275863 │ 58 │ -│ B0064X7B4A │ Words With Friends │ 2.2413793103448274 │ 58 │ │ B003WUYPPG │ Unbroken: A World War II Story of Survival, Resilience, and Redemption │ 4.620689655172414 │ 58 │ -│ B00006HBUJ │ Star Wars: Episode II - Attack of the Clones (Widescreen Edition) │ 2.2982456140350878 │ 57 │ +│ B0064X7B4A │ Words With Friends │ 2.2413793103448274 │ 58 │ +│ B007S6Y6VS │ Garden of Life Raw Organic Meal │ 2.8793103448275863 │ 58 │ │ B000XUBFE2 │ The Book Thief │ 4.526315789473684 │ 57 │ +│ B00006HBUJ │ Star Wars: Episode II - Attack of the Clones (Widescreen Edition) │ 2.2982456140350878 │ 57 │ │ B0006399FS │ How to Dismantle an Atomic Bomb │ 1.9821428571428572 │ 56 │ │ B003ZSJ212 │ Star Wars: The Complete Saga (Episodes I-VI) (Packaging May Vary) [Blu-ray] │ 2.309090909090909 │ 55 │ │ 193700788X │ Dead Ever After (Sookie Stackhouse/True Blood) │ 1.5185185185185186 │ 54 │ │ B004FYEZMQ │ Mass Effect 3 │ 2.056603773584906 │ 53 │ │ B000CFYAMC │ The Room │ 3.9615384615384617 │ 52 │ -│ B0031JK95S │ Garden of Life Raw Organic Meal │ 3.3137254901960786 │ 51 │ │ B0012JY4G4 │ Color Oops Hair Color Remover Extra Strength 1 Each │ 3.9019607843137254 │ 51 │ -│ B007VTVRFA │ SimCity - Limited Edition │ 1.2040816326530612 │ 49 │ +│ B0031JK95S │ Garden of Life Raw Organic Meal │ 3.3137254901960786 │ 51 │ │ B00CE18P0K │ Pilot │ 1.7142857142857142 │ 49 │ +│ B007VTVRFA │ SimCity - Limited Edition │ 1.2040816326530612 │ 49 │ │ 0316015849 │ Twilight (The Twilight Saga, Book 1) │ 1.8979591836734695 │ 49 │ │ B00DR0PDNE │ Google Chromecast HDMI Streaming Media Player │ 2.5416666666666665 │ 48 │ │ B000056OWC │ The First Years: 4-Stage Bath System │ 1.2127659574468086 │ 47 │ │ B007IXWKUK │ Fifty Shades Darker (Fifty Shades, Book 2) │ 1.6304347826086956 │ 46 │ │ 1892112000 │ To Train Up a Child │ 1.4130434782608696 │ 46 │ │ 043935806X │ Harry Potter and the Order of the Phoenix (Book 5) │ 3.977272727272727 │ 44 │ -│ B00BGO0Q9O │ Fitbit Flex Wireless Wristband with Sleep Function, Black │ 1.9318181818181819 │ 44 │ │ B003XF1XOQ │ Mockingjay (Hunger Games Trilogy, Book 3) │ 2.772727272727273 │ 44 │ -│ B00DD2B52Y │ Spring Breakers │ 1.2093023255813953 │ 43 │ +│ B00BGO0Q9O │ Fitbit Flex Wireless Wristband with Sleep Function, Black │ 1.9318181818181819 │ 44 │ │ B0064X7FVE │ The Weather Channel: Forecast, Radar & Alerts │ 1.5116279069767442 │ 43 │ │ B0083PWAPW │ Kindle Fire HD 7", Dolby Audio, Dual-Band Wi-Fi │ 2.627906976744186 │ 43 │ +│ B00DD2B52Y │ Spring Breakers │ 1.2093023255813953 │ 43 │ │ B00192KCQ0 │ Death Magnetic │ 3.5714285714285716 │ 42 │ -│ B007S6Y74O │ Garden of Life Raw Organic Meal │ 3.292682926829268 │ 41 │ +│ B004CFA9RS │ Divergent (Divergent Trilogy, Book 1) │ 3.1219512195121952 │ 41 │ │ B0052QYLUM │ Infant Optics DXR-5 Portable Video Baby Monitor │ 2.1463414634146343 │ 41 │ └────────────┴──────────────────────────────────────────────────────────────────────────────────────────┴────────────────────┴───────┘ -50 rows in set. Elapsed: 60.052 sec. Processed 150.96 million rows, 68.93 GB (2.51 million rows/s., 1.15 GB/s.) +50 rows in set. Elapsed: 4.072 sec. Processed 150.96 million rows, 68.93 GB (37.07 million rows/s., 16.93 GB/s.) +Peak memory usage: 1.82 GiB. ``` 12. We can run the same query again, except this time we search for **awesome** in the reviews: @@ -415,8 +375,6 @@ ORDER BY count DESC LIMIT 50; ``` -It runs quite a bit faster - which means the cache is helping us out here: - ```response ┌─product_id─┬─any(product_title)────────────────────────────────────────────────────┬───avg(star_rating)─┬─count─┐ @@ -449,8 +407,8 @@ It runs quite a bit faster - which means the cache is helping us out here: │ B008JK6W5K │ Logo Quiz │ 4.782106782106782 │ 693 │ │ B00EDTSKLU │ Geometry Dash │ 4.942028985507246 │ 690 │ │ B00CSR2J9I │ Hill Climb Racing │ 4.880059970014993 │ 667 │ -│ B005ZXWMUS │ Netflix │ 4.722306525037936 │ 659 │ │ B00CRFAAYC │ Fab Tattoo Artist FREE │ 4.907435508345979 │ 659 │ +│ B005ZXWMUS │ Netflix │ 4.722306525037936 │ 659 │ │ B00DHQHQCE │ Battle Beach │ 4.863287250384024 │ 651 │ │ B00BGA9WK2 │ PlayStation 4 500GB Console [Old Model] │ 4.688751926040061 │ 649 │ │ B008Y7SMQU │ Logo Quiz - Fun Plus Free │ 4.7888 │ 625 │ @@ -472,5 +430,6 @@ It runs quite a bit faster - which means the cache is helping us out here: │ B00G6ZTM3Y │ Terraria │ 4.728421052631579 │ 475 │ └────────────┴───────────────────────────────────────────────────────────────────────┴────────────────────┴───────┘ -50 rows in set. Elapsed: 33.954 sec. Processed 150.96 million rows, 68.95 GB (4.45 million rows/s., 2.03 GB/s.) +50 rows in set. Elapsed: 4.079 sec. Processed 150.96 million rows, 68.95 GB (37.01 million rows/s., 16.90 GB/s.) +Peak memory usage: 2.18 GiB. ``` diff --git a/docs/en/getting-started/example-datasets/wikistat.md b/docs/en/getting-started/example-datasets/wikistat.md index 9d0760efe94..d913ccd9b31 100644 --- a/docs/en/getting-started/example-datasets/wikistat.md +++ b/docs/en/getting-started/example-datasets/wikistat.md @@ -1,5 +1,4 @@ --- -slug: /en/getting-started/example-datasets/wikistat sidebar_label: WikiStat --- @@ -41,7 +40,8 @@ CREATE TABLE wikistat project LowCardinality(String), subproject LowCardinality(String), path String CODEC(ZSTD(3)), - hits UInt64 CODEC(ZSTD(3)) + hits UInt64 CODEC(ZSTD(3)), + size UInt64 CODEC(ZSTD(3)) ) ENGINE = MergeTree ORDER BY (path, time); diff --git a/docs/en/interfaces/formats.md b/docs/en/interfaces/formats.md index df6acd89616..57de0555bf6 100644 --- a/docs/en/interfaces/formats.md +++ b/docs/en/interfaces/formats.md @@ -74,6 +74,7 @@ The supported formats are: | [ArrowStream](#data-format-arrow-stream) | ✔ | ✔ | | [ORC](#data-format-orc) | ✔ | ✔ | | [One](#data-format-one) | ✔ | ✗ | +| [Npy](#data-format-npy) | ✔ | ✗ | | [RowBinary](#rowbinary) | ✔ | ✔ | | [RowBinaryWithNames](#rowbinarywithnamesandtypes) | ✔ | ✔ | | [RowBinaryWithNamesAndTypes](#rowbinarywithnamesandtypes) | ✔ | ✔ | @@ -2155,7 +2156,7 @@ To exchange data with Hadoop, you can use [HDFS table engine](/docs/en/engines/t - [input_format_parquet_local_file_min_bytes_for_seek](/docs/en/operations/settings/settings-formats.md/#input_format_parquet_local_file_min_bytes_for_seek) - min bytes required for local read (file) to do seek, instead of read with ignore in Parquet input format. Default value - `8192`. - [output_format_parquet_fixed_string_as_fixed_byte_array](/docs/en/operations/settings/settings-formats.md/#output_format_parquet_fixed_string_as_fixed_byte_array) - use Parquet FIXED_LENGTH_BYTE_ARRAY type instead of Binary/String for FixedString columns. Default value - `true`. - [output_format_parquet_version](/docs/en/operations/settings/settings-formats.md/#output_format_parquet_version) - The version of Parquet format used in output format. Default value - `2.latest`. -- [output_format_parquet_compression_method](/docs/en/operations/settings/settings-formats.md/#output_format_parquet_compression_method) - compression method used in output Parquet format. Default value - `snappy`. +- [output_format_parquet_compression_method](/docs/en/operations/settings/settings-formats.md/#output_format_parquet_compression_method) - compression method used in output Parquet format. Default value - `lz4`. ## ParquetMetadata {data-format-parquet-metadata} @@ -2454,6 +2455,51 @@ Result: └──────────────┘ ``` +## Npy {#data-format-npy} + +This function is designed to load a NumPy array from a .npy file into ClickHouse. The NumPy file format is a binary format used for efficiently storing arrays of numerical data. During import, ClickHouse treats top level dimension as an array of rows with single column. Supported Npy data types and their corresponding type in ClickHouse: +| Npy type | ClickHouse type | +|:--------:|:---------------:| +| b1 | UInt8 | +| i1 | Int8 | +| i2 | Int16 | +| i4 | Int32 | +| i8 | Int64 | +| u1 | UInt8 | +| u2 | UInt16 | +| u4 | UInt32 | +| u8 | UInt64 | +| f2 | Float32 | +| f4 | Float32 | +| f8 | Float64 | +| S | String | +| U | String | + +**Example of saving an array in .npy format using Python** + + +```Python +import numpy as np +arr = np.array([[[1],[2],[3]],[[4],[5],[6]]]) +np.save('example_array.npy', arr) +``` + +**Example of reading a NumPy file in ClickHouse** + +Query: +```sql +SELECT * +FROM file('example_array.npy', Npy) +``` + +Result: +``` +┌─array─────────┐ +│ [[1],[2],[3]] │ +│ [[4],[5],[6]] │ +└───────────────┘ +``` + ## LineAsString {#lineasstring} In this format, every line of input data is interpreted as a single string value. This format can only be parsed for table with a single field of type [String](/docs/en/sql-reference/data-types/string.md). The remaining columns must be set to [DEFAULT](/docs/en/sql-reference/statements/create/table.md/#default) or [MATERIALIZED](/docs/en/sql-reference/statements/create/table.md/#materialized), or omitted. diff --git a/docs/en/interfaces/http.md b/docs/en/interfaces/http.md index 0e2c0c00e4c..63f75fb7830 100644 --- a/docs/en/interfaces/http.md +++ b/docs/en/interfaces/http.md @@ -438,7 +438,7 @@ $ curl -v 'http://localhost:8123/predefined_query' < X-ClickHouse-Query-Id: 96fe0052-01e6-43ce-b12a-6b7370de6e8a < X-ClickHouse-Format: Template < X-ClickHouse-Timezone: Asia/Shanghai -< Keep-Alive: timeout=3 +< Keep-Alive: timeout=10 < X-ClickHouse-Summary: {"read_rows":"0","read_bytes":"0","written_rows":"0","written_bytes":"0","total_rows_to_read":"0","elapsed_ns":"662334"} < # HELP "Query" "Number of executing queries" @@ -603,7 +603,7 @@ $ curl -vv -H 'XXX:xxx' 'http://localhost:8123/hi' < Connection: Keep-Alive < Content-Type: text/html; charset=UTF-8 < Transfer-Encoding: chunked -< Keep-Alive: timeout=3 +< Keep-Alive: timeout=10 < X-ClickHouse-Summary: {"read_rows":"0","read_bytes":"0","written_rows":"0","written_bytes":"0","total_rows_to_read":"0","elapsed_ns":"662334"} < * Connection #0 to host localhost left intact @@ -643,7 +643,7 @@ $ curl -v -H 'XXX:xxx' 'http://localhost:8123/get_config_static_handler' < Connection: Keep-Alive < Content-Type: text/plain; charset=UTF-8 < Transfer-Encoding: chunked -< Keep-Alive: timeout=3 +< Keep-Alive: timeout=10 < X-ClickHouse-Summary: {"read_rows":"0","read_bytes":"0","written_rows":"0","written_bytes":"0","total_rows_to_read":"0","elapsed_ns":"662334"} < * Connection #0 to host localhost left intact @@ -695,7 +695,7 @@ $ curl -vv -H 'XXX:xxx' 'http://localhost:8123/get_absolute_path_static_handler' < Connection: Keep-Alive < Content-Type: text/html; charset=UTF-8 < Transfer-Encoding: chunked -< Keep-Alive: timeout=3 +< Keep-Alive: timeout=10 < X-ClickHouse-Summary: {"read_rows":"0","read_bytes":"0","written_rows":"0","written_bytes":"0","total_rows_to_read":"0","elapsed_ns":"662334"} < Absolute Path File @@ -714,7 +714,7 @@ $ curl -vv -H 'XXX:xxx' 'http://localhost:8123/get_relative_path_static_handler' < Connection: Keep-Alive < Content-Type: text/html; charset=UTF-8 < Transfer-Encoding: chunked -< Keep-Alive: timeout=3 +< Keep-Alive: timeout=10 < X-ClickHouse-Summary: {"read_rows":"0","read_bytes":"0","written_rows":"0","written_bytes":"0","total_rows_to_read":"0","elapsed_ns":"662334"} < Relative Path File diff --git a/docs/en/interfaces/third-party/client-libraries.md b/docs/en/interfaces/third-party/client-libraries.md index 7b5c4f27a2a..5aa634785aa 100644 --- a/docs/en/interfaces/third-party/client-libraries.md +++ b/docs/en/interfaces/third-party/client-libraries.md @@ -74,6 +74,7 @@ ClickHouse Inc does **not** maintain the libraries listed below and hasn’t don ### Elixir - [clickhousex](https://github.com/appodeal/clickhousex/) - [pillar](https://github.com/sofakingworld/pillar) + - [ecto_ch](https://github.com/plausible/ecto_ch) ### Nim - [nim-clickhouse](https://github.com/leonardoce/nim-clickhouse) ### Haskell diff --git a/docs/en/operations/_troubleshooting.md b/docs/en/operations/_troubleshooting.md index dbb0dad7976..b3846643e7a 100644 --- a/docs/en/operations/_troubleshooting.md +++ b/docs/en/operations/_troubleshooting.md @@ -17,12 +17,8 @@ - The issue may be happened when the GPG key is changed. -Please use the following scripts to resolve the issue: +Please use the manual from the [setup](../getting-started/install.md#setup-the-debian-repository) page to update the repository configuration. -```bash -sudo apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv 8919F6BD2B48D754 -sudo apt-get update -``` ### You Get Different Warnings with `apt-get update` {#you-get-different-warnings-with-apt-get-update} diff --git a/docs/en/operations/named-collections.md b/docs/en/operations/named-collections.md index 6ed72152c1e..06c05929ffa 100644 --- a/docs/en/operations/named-collections.md +++ b/docs/en/operations/named-collections.md @@ -18,7 +18,15 @@ function, table engine, database, etc. In the examples below the parameter list linked to for each type. Parameters set in a named collection can be overridden in SQL, this is shown in the examples -below. +below. This ability can be limited using `[NOT] OVERRIDABLE` keywords and XML attributes +and/or the configuration option `allow_named_collection_override_by_default`. + +:::warning +If override is allowed, it may be possible for users without administrative access to +figure out the credentials that you are trying to hide. +If you are using named collections with that purpose, you should disable +`allow_named_collection_override_by_default` (which is enabled by default). +::: ## Storing named collections in the system database @@ -26,11 +34,17 @@ below. ```sql CREATE NAMED COLLECTION name AS -key_1 = 'value', -key_2 = 'value2', +key_1 = 'value' OVERRIDABLE, +key_2 = 'value2' NOT OVERRIDABLE, url = 'https://connection.url/' ``` +In the above example: + + * `key_1` can always be overridden. + * `key_2` can never be overridden. + * `url` can be overridden or not depending on the value of `allow_named_collection_override_by_default`. + ### Permissions to create named collections with DDL To manage named collections with DDL a user must have the `named_control_collection` privilege. This can be assigned by adding a file to `/etc/clickhouse-server/users.d/`. The example gives the user `default` both the `access_management` and `named_collection_control` privileges: @@ -61,25 +75,37 @@ In the above example the `password_sha256_hex` value is the hexadecimal represen - value - value_2 + value + value_2 https://connection.url/ ``` +In the above example: + + * `key_1` can always be overridden. + * `key_2` can never be overridden. + * `url` can be overridden or not depending on the value of `allow_named_collection_override_by_default`. + ## Modifying named collections Named collections that are created with DDL queries can be altered or dropped with DDL. Named collections created with XML files can be managed by editing or deleting the corresponding XML. ### Alter a DDL named collection -Change or add the keys `key1` and `key3` of the collection `collection2`: +Change or add the keys `key1` and `key3` of the collection `collection2` +(this will not change the value of the `overridable` flag for those keys): ```sql ALTER NAMED COLLECTION collection2 SET key1=4, key3='value3' ``` +Change or add the key `key1` and allow it to be always overridden: +```sql +ALTER NAMED COLLECTION collection2 SET key1=4 OVERRIDABLE +``` + Remove the key `key2` from `collection2`: ```sql ALTER NAMED COLLECTION collection2 DELETE key2 @@ -90,6 +116,13 @@ Change or add the key `key1` and delete the key `key3` of the collection `collec ALTER NAMED COLLECTION collection2 SET key1=4, DELETE key3 ``` +To force a key to use the default settings for the `overridable` flag, you have to +remove and re-add the key. +```sql +ALTER NAMED COLLECTION collection2 DELETE key1; +ALTER NAMED COLLECTION collection2 SET key1=4; +``` + ### Drop the DDL named collection `collection2`: ```sql DROP NAMED COLLECTION collection2 diff --git a/docs/en/operations/optimizing-performance/profile-guided-optimization.md b/docs/en/operations/optimizing-performance/profile-guided-optimization.md index cda21e3c604..3d36bb2cc14 100644 --- a/docs/en/operations/optimizing-performance/profile-guided-optimization.md +++ b/docs/en/operations/optimizing-performance/profile-guided-optimization.md @@ -1,5 +1,4 @@ --- -slug: /en/operations/optimizing-performance/profile-guided-optimization sidebar_position: 54 sidebar_label: Profile Guided Optimization (PGO) --- diff --git a/docs/en/operations/optimizing-performance/sampling-query-profiler.md b/docs/en/operations/optimizing-performance/sampling-query-profiler.md index 9988bfc44bc..206f710734e 100644 --- a/docs/en/operations/optimizing-performance/sampling-query-profiler.md +++ b/docs/en/operations/optimizing-performance/sampling-query-profiler.md @@ -11,7 +11,8 @@ ClickHouse runs sampling profiler that allows analyzing query execution. Using p Query profiler is automatically enabled in ClickHouse Cloud and you can run a sample query as follows -:::note If you are running the following query in ClickHouse Cloud, make sure to change `FROM system.trace_log` to `FROM clusterAllReplicas(default, system.trace_log)` to select from all nodes of the cluster ::: +:::note If you are running the following query in ClickHouse Cloud, make sure to change `FROM system.trace_log` to `FROM clusterAllReplicas(default, system.trace_log)` to select from all nodes of the cluster +::: ``` sql SELECT diff --git a/docs/en/operations/query-cache.md b/docs/en/operations/query-cache.md index 665ae6cdfdc..def0f48b968 100644 --- a/docs/en/operations/query-cache.md +++ b/docs/en/operations/query-cache.md @@ -169,7 +169,12 @@ Also, results of queries with non-deterministic functions are not cached by defa [`getMacro()`](../sql-reference/functions/other-functions.md#getMacro) etc. To force caching of results of queries with non-deterministic functions regardless, use setting -[query_cache_store_results_of_queries_with_nondeterministic_functions](settings/settings.md#query-cache-store-results-of-queries-with-nondeterministic-functions). +[query_cache_nondeterministic_function_handling](settings/settings.md#query-cache-nondeterministic-function-handling). + +:::note +Prior to ClickHouse v23.11, setting 'query_cache_store_results_of_queries_with_nondeterministic_functions = 0 / 1' controlled whether +results of queries with non-deterministic results were cached. In newer ClickHouse versions, this setting is obsolete and has no effect. +::: Finally, entries in the query cache are not shared between users due to security reasons. For example, user A must not be able to bypass a row policy on a table by running the same query as another user B for whom no such policy exists. However, if necessary, cache entries can diff --git a/docs/en/operations/server-configuration-parameters/settings.md b/docs/en/operations/server-configuration-parameters/settings.md index 2dac78bb10e..2b73c4ec624 100644 --- a/docs/en/operations/server-configuration-parameters/settings.md +++ b/docs/en/operations/server-configuration-parameters/settings.md @@ -74,7 +74,7 @@ The maximum number of threads that will be used for fetching data parts from ano Type: UInt64 -Default: 8 +Default: 16 ## background_merges_mutations_concurrency_ratio @@ -136,7 +136,7 @@ The maximum number of threads that will be used for constantly executing some li Type: UInt64 -Default: 128 +Default: 512 ## backup_threads @@ -214,7 +214,7 @@ Max consecutive resolving failures before dropping a host from ClickHouse DNS ca Type: UInt32 -Default: 1024 +Default: 10 ## index_mark_cache_policy @@ -961,9 +961,11 @@ See also “[Executable User Defined Functions](../../sql-reference/functions/in Lazy loading of dictionaries. -If `true`, then each dictionary is created on first use. If dictionary creation failed, the function that was using the dictionary throws an exception. +If `true`, then each dictionary is loaded on the first use. If the loading is failed, the function that was using the dictionary throws an exception. -If `false`, all dictionaries are created when the server starts, if the dictionary or dictionaries are created too long or are created with errors, then the server boots without of these dictionaries and continues to try to create these dictionaries. +If `false`, then the server loads all dictionaries at startup. +The server will wait at startup until all the dictionaries finish their loading before receiving any connections +(exception: if `wait_dictionaries_load_at_startup` is set to `false` - see below). The default is `true`. @@ -1823,6 +1825,10 @@ The trailing slash is mandatory. ## Prometheus {#prometheus} +:::note +ClickHouse Cloud does not currently support connecting to Prometheus. To be notified when this feature is supported, please contact support@clickhouse.com. +::: + Exposing metrics data for scraping from [Prometheus](https://prometheus.io). Settings: @@ -2387,6 +2393,28 @@ Path to the file that contains: users.xml ``` +## wait_dictionaries_load_at_startup {#wait_dictionaries_load_at_startup} + +This setting allows to specify behavior if `dictionaries_lazy_load` is `false`. +(If `dictionaries_lazy_load` is `true` this setting doesn't affect anything.) + +If `wait_dictionaries_load_at_startup` is `false`, then the server +will start loading all the dictionaries at startup and it will receive connections in parallel with that loading. +When a dictionary is used in a query for the first time then the query will wait until the dictionary is loaded if it's not loaded yet. +Setting `wait_dictionaries_load_at_startup` to `false` can make ClickHouse start faster, however some queries can be executed slower +(because they will have to wait for some dictionaries to be loaded). + +If `wait_dictionaries_load_at_startup` is `true`, then the server will wait at startup +until all the dictionaries finish their loading (successfully or not) before receiving any connections. + +The default is `true`. + +**Example** + +``` xml +true +``` + ## zookeeper {#server-settings_zookeeper} Contains settings that allow ClickHouse to interact with a [ZooKeeper](http://zookeeper.apache.org/) cluster. @@ -2423,6 +2451,8 @@ This section contains the following parameters: * hostname_levenshtein_distance - just like nearest_hostname, but it compares hostname in a levenshtein distance manner. * first_or_random - selects the first ZooKeeper node, if it's not available then randomly selects one of remaining ZooKeeper nodes. * round_robin - selects the first ZooKeeper node, if reconnection happens selects the next. +- `use_compression` — If set to true, enables compression in Keeper protocol. + **Example configuration** @@ -2712,7 +2742,7 @@ ClickHouse will use it to form the proxy URI using the following template: `{pro 10 - + http://resolver:8080/hostname @@ -2758,3 +2788,7 @@ Proxy settings are determined in the following order: ClickHouse will check the highest priority resolver type for the request protocol. If it is not defined, it will check the next highest priority resolver type, until it reaches the environment resolver. This also allows a mix of resolver types can be used. + +### disable_tunneling_for_https_requests_over_http_proxy {#disable_tunneling_for_https_requests_over_http_proxy} + +By default, tunneling (i.e, `HTTP CONNECT`) is used to make `HTTPS` requests over `HTTP` proxy. This setting can be used to disable it. diff --git a/docs/en/operations/settings/query-complexity.md b/docs/en/operations/settings/query-complexity.md index 15f39b53e07..9e36aa26946 100644 --- a/docs/en/operations/settings/query-complexity.md +++ b/docs/en/operations/settings/query-complexity.md @@ -106,6 +106,15 @@ Possible values: Default value: 0. +## max_bytes_before_external_sort {#settings-max_bytes_before_external_sort} + +Enables or disables execution of `ORDER BY` clauses in external memory. See [ORDER BY Implementation Details](../../sql-reference/statements/select/order-by.md#implementation-details) + +- Maximum volume of RAM (in bytes) that can be used by the single [ORDER BY](../../sql-reference/statements/select/order-by.md) operation. Recommended value is half of available system memory +- 0 — `ORDER BY` in external memory disabled. + +Default value: 0. + ## max_rows_to_sort {#max-rows-to-sort} A maximum number of rows before sorting. This allows you to limit memory consumption when sorting. @@ -163,7 +172,27 @@ If you set `timeout_before_checking_execution_speed `to 0, ClickHouse will use c ## timeout_overflow_mode {#timeout-overflow-mode} -What to do if the query is run longer than ‘max_execution_time’: ‘throw’ or ‘break’. By default, throw. +What to do if the query is run longer than `max_execution_time`: `throw` or `break`. By default, `throw`. + +# max_execution_time_leaf + +Similar semantic to `max_execution_time` but only apply on leaf node for distributed or remote queries. + +For example, if we want to limit execution time on leaf node to `10s` but no limit on the initial node, instead of having `max_execution_time` in the nested subquery settings: + +``` sql +SELECT count() FROM cluster(cluster, view(SELECT * FROM t SETTINGS max_execution_time = 10)); +``` + +We can use `max_execution_time_leaf` as the query settings: + +``` sql +SELECT count() FROM cluster(cluster, view(SELECT * FROM t)) SETTINGS max_execution_time_leaf = 10; +``` + +# timeout_overflow_mode_leaf + +What to do when the query in leaf node run longer than `max_execution_time_leaf`: `throw` or `break`. By default, `throw`. ## min_execution_speed {#min-execution-speed} diff --git a/docs/en/operations/settings/settings-formats.md b/docs/en/operations/settings/settings-formats.md index bb59402079e..344e6dda680 100644 --- a/docs/en/operations/settings/settings-formats.md +++ b/docs/en/operations/settings/settings-formats.md @@ -897,6 +897,12 @@ Use DOS/Windows-style line separator (CRLF) in CSV instead of Unix style (LF). Disabled by default. +### input_format_csv_allow_cr_end_of_line {#input_format_csv_allow_cr_end_of_line} + +If it is set true, CR(\\r) will be allowed at end of line not followed by LF(\\n) + +Disabled by default. + ### input_format_csv_enum_as_number {#input_format_csv_enum_as_number} When enabled, always treat enum values as enum ids for CSV input format. It's recommended to enable this setting if data contains only enum ids to optimize enum parsing. diff --git a/docs/en/operations/settings/settings.md b/docs/en/operations/settings/settings.md index ccf290c8e20..663572d91c8 100644 --- a/docs/en/operations/settings/settings.md +++ b/docs/en/operations/settings/settings.md @@ -731,11 +731,13 @@ Default value: LZ4. ## max_block_size {#setting-max_block_size} -In ClickHouse, data is processed by blocks (sets of column parts). The internal processing cycles for a single block are efficient enough, but there are noticeable expenditures on each block. The `max_block_size` setting is a recommendation for what size of the block (in a count of rows) to load from tables. The block size shouldn’t be too small, so that the expenditures on each block are still noticeable, but not too large so that the query with LIMIT that is completed after the first block is processed quickly. The goal is to avoid consuming too much memory when extracting a large number of columns in multiple threads and to preserve at least some cache locality. +In ClickHouse, data is processed by blocks, which are sets of column parts. The internal processing cycles for a single block are efficient but there are noticeable costs when processing each block. -Default value: 65,536. +The `max_block_size` setting indicates the recommended maximum number of rows to include in a single block when loading data from tables. Blocks the size of `max_block_size` are not always loaded from the table: if ClickHouse determines that less data needs to be retrieved, a smaller block is processed. -Blocks the size of `max_block_size` are not always loaded from the table. If it is obvious that less data needs to be retrieved, a smaller block is processed. +The block size should not be too small to avoid noticeable costs when processing each block. It should also not be too large to ensure that queries with a LIMIT clause execute quickly after processing the first block. When setting `max_block_size`, the goal should be to avoid consuming too much memory when extracting a large number of columns in multiple threads and to preserve at least some cache locality. + +Default value: `65,409` ## preferred_block_size_bytes {#preferred-block-size-bytes} @@ -1657,16 +1659,17 @@ Possible values: Default value: `1`. -## query_cache_store_results_of_queries_with_nondeterministic_functions {#query-cache-store-results-of-queries-with-nondeterministic-functions} +## query_cache_nondeterministic_function_handling {#query-cache-nondeterministic-function-handling} -If turned on, then results of `SELECT` queries with non-deterministic functions (e.g. `rand()`, `now()`) can be cached in the [query cache](../query-cache.md). +Controls how the [query cache](../query-cache.md) handles `SELECT` queries with non-deterministic functions like `rand()` or `now()`. Possible values: -- 0 - Disabled -- 1 - Enabled +- `'throw'` - Throw an exception and don't cache the query result. +- `'save'` - Cache the query result. +- `'ignore'` - Don't cache the query result and don't throw an exception. -Default value: `0`. +Default value: `throw`. ## query_cache_min_query_runs {#query-cache-min-query-runs} @@ -2473,7 +2476,7 @@ See also: - [distributed_replica_error_cap](#distributed_replica_error_cap) - [distributed_replica_error_half_life](#settings-distributed_replica_error_half_life) -## distributed_directory_monitor_sleep_time_ms {#distributed_directory_monitor_sleep_time_ms} +## distributed_background_insert_sleep_time_ms {#distributed_background_insert_sleep_time_ms} Base interval for the [Distributed](../../engines/table-engines/special/distributed.md) table engine to send data. The actual interval grows exponentially in the event of errors. @@ -2483,9 +2486,9 @@ Possible values: Default value: 100 milliseconds. -## distributed_directory_monitor_max_sleep_time_ms {#distributed_directory_monitor_max_sleep_time_ms} +## distributed_background_insert_max_sleep_time_ms {#distributed_background_insert_max_sleep_time_ms} -Maximum interval for the [Distributed](../../engines/table-engines/special/distributed.md) table engine to send data. Limits exponential growth of the interval set in the [distributed_directory_monitor_sleep_time_ms](#distributed_directory_monitor_sleep_time_ms) setting. +Maximum interval for the [Distributed](../../engines/table-engines/special/distributed.md) table engine to send data. Limits exponential growth of the interval set in the [distributed_background_insert_sleep_time_ms](#distributed_background_insert_sleep_time_ms) setting. Possible values: @@ -2493,7 +2496,7 @@ Possible values: Default value: 30000 milliseconds (30 seconds). -## distributed_directory_monitor_batch_inserts {#distributed_directory_monitor_batch_inserts} +## distributed_background_insert_batch {#distributed_background_insert_batch} Enables/disables inserted data sending in batches. @@ -2506,13 +2509,13 @@ Possible values: Default value: 0. -## distributed_directory_monitor_split_batch_on_failure {#distributed_directory_monitor_split_batch_on_failure} +## distributed_background_insert_split_batch_on_failure {#distributed_background_insert_split_batch_on_failure} Enables/disables splitting batches on failures. Sometimes sending particular batch to the remote shard may fail, because of some complex pipeline after (i.e. `MATERIALIZED VIEW` with `GROUP BY`) due to `Memory limit exceeded` or similar errors. In this case, retrying will not help (and this will stuck distributed sends for the table) but sending files from that batch one by one may succeed INSERT. -So installing this setting to `1` will disable batching for such batches (i.e. temporary disables `distributed_directory_monitor_batch_inserts` for failed batches). +So installing this setting to `1` will disable batching for such batches (i.e. temporary disables `distributed_background_insert_batch` for failed batches). Possible values: @@ -2695,15 +2698,15 @@ Possible values: Default value: 0. -## insert_distributed_sync {#insert_distributed_sync} +## distributed_foreground_insert {#distributed_foreground_insert} Enables or disables synchronous data insertion into a [Distributed](../../engines/table-engines/special/distributed.md/#distributed) table. -By default, when inserting data into a `Distributed` table, the ClickHouse server sends data to cluster nodes in asynchronous mode. When `insert_distributed_sync=1`, the data is processed synchronously, and the `INSERT` operation succeeds only after all the data is saved on all shards (at least one replica for each shard if `internal_replication` is true). +By default, when inserting data into a `Distributed` table, the ClickHouse server sends data to cluster nodes in background mode. When `distributed_foreground_insert=1`, the data is processed synchronously, and the `INSERT` operation succeeds only after all the data is saved on all shards (at least one replica for each shard if `internal_replication` is true). Possible values: -- 0 — Data is inserted in asynchronous mode. +- 0 — Data is inserted in background mode. - 1 — Data is inserted in synchronous mode. Default value: `0`. @@ -2713,6 +2716,10 @@ Default value: `0`. - [Distributed Table Engine](../../engines/table-engines/special/distributed.md/#distributed) - [Managing Distributed Tables](../../sql-reference/statements/system.md/#query-language-system-distributed) +## insert_distributed_sync {#insert_distributed_sync} + +Alias for [`distributed_foreground_insert`](#distributed_foreground_insert). + ## insert_shard_id {#insert_shard_id} If not `0`, specifies the shard of [Distributed](../../engines/table-engines/special/distributed.md/#distributed) table into which the data will be inserted synchronously. @@ -2762,7 +2769,7 @@ Result: ## use_compact_format_in_distributed_parts_names {#use_compact_format_in_distributed_parts_names} -Uses compact format for storing blocks for async (`insert_distributed_sync`) INSERT into tables with `Distributed` engine. +Uses compact format for storing blocks for background (`distributed_foreground_insert`) INSERT into tables with `Distributed` engine. Possible values: @@ -2772,7 +2779,7 @@ Possible values: Default value: `1`. :::note -- with `use_compact_format_in_distributed_parts_names=0` changes from cluster definition will not be applied for async INSERT. +- with `use_compact_format_in_distributed_parts_names=0` changes from cluster definition will not be applied for background INSERT. - with `use_compact_format_in_distributed_parts_names=1` changing the order of the nodes in the cluster definition, will change the `shard_index`/`replica_index` so be aware. ::: @@ -3310,22 +3317,11 @@ Possible values: Default value: `0`. -## use_mysql_types_in_show_columns {#use_mysql_types_in_show_columns} - -Show the names of MySQL data types corresponding to ClickHouse data types in [SHOW COLUMNS](../../sql-reference/statements/show.md#show_columns). - -Possible values: - -- 0 - Show names of native ClickHouse data types. -- 1 - Show names of MySQL data types corresponding to ClickHouse data types. - -Default value: `0`. - ## mysql_map_string_to_text_in_show_columns {#mysql_map_string_to_text_in_show_columns} When enabled, [String](../../sql-reference/data-types/string.md) ClickHouse data type will be displayed as `TEXT` in [SHOW COLUMNS](../../sql-reference/statements/show.md#show_columns). -Has effect only when [use_mysql_types_in_show_columns](#use_mysql_types_in_show_columns) is enabled. +Has an effect only when the connection is made through the MySQL wire protocol. - 0 - Use `BLOB`. - 1 - Use `TEXT`. @@ -3336,7 +3332,7 @@ Default value: `0`. When enabled, [FixedString](../../sql-reference/data-types/fixedstring.md) ClickHouse data type will be displayed as `TEXT` in [SHOW COLUMNS](../../sql-reference/statements/show.md#show_columns). -Has effect only when [use_mysql_types_in_show_columns](#use_mysql_types_in_show_columns) is enabled. +Has an effect only when the connection is made through the MySQL wire protocol. - 0 - Use `BLOB`. - 1 - Use `TEXT`. @@ -3944,6 +3940,27 @@ Possible values: Default value: `0`. +## force_optimize_projection_name {#force-optimize-projection_name} + +If it is set to a non-empty string, check that this projection is used in the query at least once. + +Possible values: + +- string: name of projection that used in a query + +Default value: `''`. + +## preferred_optimize_projection_name {#preferred_optimize_projection_name} + +If it is set to a non-empty string, ClickHouse will try to apply specified projection in query. + + +Possible values: + +- string: name of preferred projection + +Default value: `''`. + ## alter_sync {#alter-sync} Allows to set up waiting for actions to be executed on replicas by [ALTER](../../sql-reference/statements/alter/index.md), [OPTIMIZE](../../sql-reference/statements/optimize.md) or [TRUNCATE](../../sql-reference/statements/truncate.md) queries. @@ -3956,6 +3973,10 @@ Possible values: Default value: `1`. +:::note +`alter_sync` is applicable to `Replicated` tables only, it does nothing to alters of not `Replicated` tables. +::: + ## replication_wait_for_inactive_replica_timeout {#replication-wait-for-inactive-replica-timeout} Specifies how long (in seconds) to wait for inactive replicas to execute [ALTER](../../sql-reference/statements/alter/index.md), [OPTIMIZE](../../sql-reference/statements/optimize.md) or [TRUNCATE](../../sql-reference/statements/truncate.md) queries. @@ -4780,6 +4801,247 @@ a Tuple( ) ``` +## analyze_index_with_space_filling_curves + +If a table has a space-filling curve in its index, e.g. `ORDER BY mortonEncode(x, y)`, and the query has conditions on its arguments, e.g. `x >= 10 AND x <= 20 AND y >= 20 AND y <= 30`, use the space-filling curve for index analysis. + +## query_plan_enable_optimizations {#query_plan_enable_optimizations} + +Toggles query optimization at the query plan level. + +:::note +This is an expert-level setting which should only be used for debugging by developers. The setting may change in future in backward-incompatible ways or be removed. +::: + +Possible values: + +- 0 - Disable all optimizations at the query plan level +- 1 - Enable optimizations at the query plan level (but individual optimizations may still be disabled via their individual settings) + +Default value: `1`. + +## query_plan_max_optimizations_to_apply + +Limits the total number of optimizations applied to query plan, see setting [query_plan_enable_optimizations](#query_plan_enable_optimizations). +Useful to avoid long optimization times for complex queries. +If the actual number of optimizations exceeds this setting, an exception is thrown. + +:::note +This is an expert-level setting which should only be used for debugging by developers. The setting may change in future in backward-incompatible ways or be removed. +::: + +Type: [UInt64](../../sql-reference/data-types/int-uint.md). + +Default value: '10000' + +## query_plan_lift_up_array_join + +Toggles a query-plan-level optimization which moves ARRAY JOINs up in the execution plan. +Only takes effect if setting [query_plan_enable_optimizations](#query_plan_enable_optimizations) is 1. + +:::note +This is an expert-level setting which should only be used for debugging by developers. The setting may change in future in backward-incompatible ways or be removed. +::: + +Possible values: + +- 0 - Disable +- 1 - Enable + +Default value: `1`. + +## query_plan_push_down_limit + +Toggles a query-plan-level optimization which moves LIMITs down in the execution plan. +Only takes effect if setting [query_plan_enable_optimizations](#query_plan_enable_optimizations) is 1. + +:::note +This is an expert-level setting which should only be used for debugging by developers. The setting may change in future in backward-incompatible ways or be removed. +::: + +Possible values: + +- 0 - Disable +- 1 - Enable + +Default value: `1`. + +## query_plan_split_filter + +:::note +This is an expert-level setting which should only be used for debugging by developers. The setting may change in future in backward-incompatible ways or be removed. +::: + +Toggles a query-plan-level optimization which splits filters into expressions. +Only takes effect if setting [query_plan_enable_optimizations](#query_plan_enable_optimizations) is 1. + +Possible values: + +- 0 - Disable +- 1 - Enable + +Default value: `1`. + +## query_plan_merge_expressions + +Toggles a query-plan-level optimization which merges consecutive filters. +Only takes effect if setting [query_plan_enable_optimizations](#query_plan_enable_optimizations) is 1. + +:::note +This is an expert-level setting which should only be used for debugging by developers. The setting may change in future in backward-incompatible ways or be removed. +::: + +Possible values: + +- 0 - Disable +- 1 - Enable + +Default value: `1`. + +## query_plan_filter_push_down + +Toggles a query-plan-level optimization which moves filters down in the execution plan. +Only takes effect if setting [query_plan_enable_optimizations](#query_plan_enable_optimizations) is 1. + +:::note +This is an expert-level setting which should only be used for debugging by developers. The setting may change in future in backward-incompatible ways or be removed. +::: + +Possible values: + +- 0 - Disable +- 1 - Enable + +Default value: `1`. + +## query_plan_execute_functions_after_sorting + +Toggles a query-plan-level optimization which moves expressions after sorting steps. +Only takes effect if setting [query_plan_enable_optimizations](#query_plan_enable_optimizations) is 1. + +:::note +This is an expert-level setting which should only be used for debugging by developers. The setting may change in future in backward-incompatible ways or be removed. +::: + +Possible values: + +- 0 - Disable +- 1 - Enable + +Default value: `1`. + +## query_plan_reuse_storage_ordering_for_window_functions + +Toggles a query-plan-level optimization which uses storage sorting when sorting for window functions. +Only takes effect if setting [query_plan_enable_optimizations](#query_plan_enable_optimizations) is 1. + +:::note +This is an expert-level setting which should only be used for debugging by developers. The setting may change in future in backward-incompatible ways or be removed. +::: + +Possible values: + +- 0 - Disable +- 1 - Enable + +Default value: `1`. + +## query_plan_lift_up_union + +Toggles a query-plan-level optimization which moves larger subtrees of the query plan into union to enable further optimizations. +Only takes effect if setting [query_plan_enable_optimizations](#query_plan_enable_optimizations) is 1. + +:::note +This is an expert-level setting which should only be used for debugging by developers. The setting may change in future in backward-incompatible ways or be removed. +::: + +Possible values: + +- 0 - Disable +- 1 - Enable + +Default value: `1`. + +## query_plan_distinct_in_order + +Toggles the distinct in-order optimization query-plan-level optimization. +Only takes effect if setting [query_plan_enable_optimizations](#query_plan_enable_optimizations) is 1. + +:::note +This is an expert-level setting which should only be used for debugging by developers. The setting may change in future in backward-incompatible ways or be removed. +::: + +Possible values: + +- 0 - Disable +- 1 - Enable + +Default value: `1`. + +## query_plan_read_in_order + +Toggles the read in-order optimization query-plan-level optimization. +Only takes effect if setting [query_plan_enable_optimizations](#query_plan_enable_optimizations) is 1. + +:::note +This is an expert-level setting which should only be used for debugging by developers. The setting may change in future in backward-incompatible ways or be removed. +::: + +Possible values: + +- 0 - Disable +- 1 - Enable + +Default value: `1`. + +## query_plan_aggregation_in_order + +Toggles the aggregation in-order query-plan-level optimization. +Only takes effect if setting [query_plan_enable_optimizations](#query_plan_enable_optimizations) is 1. + +:::note +This is an expert-level setting which should only be used for debugging by developers. The setting may change in future in backward-incompatible ways or be removed. +::: + +Possible values: + +- 0 - Disable +- 1 - Enable + +Default value: `0`. + +## query_plan_remove_redundant_sorting + +Toggles a query-plan-level optimization which removes redundant sorting steps, e.g. in subqueries. +Only takes effect if setting [query_plan_enable_optimizations](#query_plan_enable_optimizations) is 1. + +:::note +This is an expert-level setting which should only be used for debugging by developers. The setting may change in future in backward-incompatible ways or be removed. +::: + +Possible values: + +- 0 - Disable +- 1 - Enable + +Default value: `1`. + +## query_plan_remove_redundant_distinct + +Toggles a query-plan-level optimization which removes redundant DISTINCT steps. +Only takes effect if setting [query_plan_enable_optimizations](#query_plan_enable_optimizations) is 1. + +:::note +This is an expert-level setting which should only be used for debugging by developers. The setting may change in future in backward-incompatible ways or be removed. +::: + +Possible values: + +- 0 - Disable +- 1 - Enable + +Default value: `1`. + ## dictionary_use_async_executor {#dictionary_use_async_executor} Execute a pipeline for reading dictionary source in several threads. It's supported only by dictionaries with local CLICKHOUSE source. @@ -4794,3 +5056,17 @@ LIFETIME(MIN 0 MAX 3600) LAYOUT(COMPLEX_KEY_HASHED_ARRAY()) SETTINGS(dictionary_use_async_executor=1, max_threads=8); ``` + +## storage_metadata_write_full_object_key {#storage_metadata_write_full_object_key} + +When set to `true` the metadata files are written with `VERSION_FULL_OBJECT_KEY` format version. With that format full object storage key names are written to the metadata files. +When set to `false` the metadata files are written with the previous format version, `VERSION_INLINE_DATA`. With that format only suffixes of object storage key names are are written to the metadata files. The prefix for all of object storage key names is set in configurations files at `storage_configuration.disks` section. + +Default value: `false`. + +## s3_use_adaptive_timeouts {#s3_use_adaptive_timeouts} + +When set to `true` than for all s3 requests first two attempts are made with low send and receive timeouts. +When set to `false` than all attempts are made with identical timeouts. + +Default value: `true`. diff --git a/docs/en/operations/system-tables/blob_storage_log.md b/docs/en/operations/system-tables/blob_storage_log.md new file mode 100644 index 00000000000..2328f7f0346 --- /dev/null +++ b/docs/en/operations/system-tables/blob_storage_log.md @@ -0,0 +1,59 @@ +--- +slug: /en/operations/system-tables/blob_storage_log +--- +# blob_storage_log + +Contains logging entries with information about various blob storage operations such as uploads and deletes. + +Columns: + +- `event_date` ([Date](../../sql-reference/data-types/date.md)) — Date of the event. +- `event_time` ([DateTime](../../sql-reference/data-types/datetime.md)) — Time of the event. +- `event_time_microseconds` ([DateTime64](../../sql-reference/data-types/datetime64.md)) — Time of the event with microseconds precision. +- `event_type` ([Enum8](../../sql-reference/data-types/enum.md)) — Type of the event. Possible values: + - `'Upload'` + - `'Delete'` + - `'MultiPartUploadCreate'` + - `'MultiPartUploadWrite'` + - `'MultiPartUploadComplete'` + - `'MultiPartUploadAbort'` +- `query_id` ([String](../../sql-reference/data-types/string.md)) — Identifier of the query associated with the event, if any. +- `thread_id` ([UInt64](../../sql-reference/data-types/int-uint.md#uint-ranges)) — Identifier of the thread performing the operation. +- `thread_name` ([String](../../sql-reference/data-types/string.md)) — Name of the thread performing the operation. +- `disk_name` ([LowCardinality(String)](../../sql-reference/data-types/lowcardinality.md)) — Name of the associated disk. +- `bucket` ([String](../../sql-reference/data-types/string.md)) — Name of the bucket. +- `remote_path` ([String](../../sql-reference/data-types/string.md)) — Path to the remote resource. +- `local_path` ([String](../../sql-reference/data-types/string.md)) — Path to the metadata file on the local system, which references the remote resource. +- `data_size` ([UInt32](../../sql-reference/data-types/int-uint.md#uint-ranges)) — Size of the data involved in the upload event. +- `error` ([String](../../sql-reference/data-types/string.md)) — Error message associated with the event, if any. + +**Example** + +Suppose a blob storage operation uploads a file, and an event is logged: + +```sql +SELECT * FROM system.blob_storage_log WHERE query_id = '7afe0450-504d-4e4b-9a80-cd9826047972' ORDER BY event_date, event_time_microseconds \G +``` + +```text +Row 1: +────── +event_date: 2023-10-31 +event_time: 2023-10-31 16:03:40 +event_time_microseconds: 2023-10-31 16:03:40.481437 +event_type: Upload +query_id: 7afe0450-504d-4e4b-9a80-cd9826047972 +thread_id: 2381740 +disk_name: disk_s3 +bucket: bucket1 +remote_path: rrr/kxo/tbnqtrghgtnxkzgtcrlutwuslgawe +local_path: store/654/6549e8b3-d753-4447-8047-d462df6e6dbe/tmp_insert_all_1_1_0/checksums.txt +data_size: 259 +error: +``` + +In this example, upload operation was associated with the `INSERT` query with ID `7afe0450-504d-4e4b-9a80-cd9826047972`. The local metadata file `store/654/6549e8b3-d753-4447-8047-d462df6e6dbe/tmp_insert_all_1_1_0/checksums.txt` refers to remote path `rrr/kxo/tbnqtrghgtnxkzgtcrlutwuslgawe` in bucket `bucket1` on disk `disk_s3`, with a size of 259 bytes. + +**See Also** + +- [External Disks for Storing Data](../../operations/storing-data.md) diff --git a/docs/en/operations/system-tables/dashboards.md b/docs/en/operations/system-tables/dashboards.md new file mode 100644 index 00000000000..1d6876b9f8d --- /dev/null +++ b/docs/en/operations/system-tables/dashboards.md @@ -0,0 +1,68 @@ +--- +slug: /en/operations/system-tables/dashboards +--- +# dashboards + +Contains queries used by `/dashboard` page accessible though [HTTP interface](/docs/en/interfaces/http.md). +This table can be useful for monitoring and troubleshooting. The table contains a row for every chart in a dashboard. + +:::note +`/dashboard` page can render queries not only from `system.dashboards`, but from any table with the same schema. +This can be useful to create custom dashboards. +::: + +Example: + +``` sql +SELECT * +FROM system.dashboards +WHERE title ILIKE '%CPU%' +``` + +``` text +Row 1: +────── +dashboard: overview +title: CPU Usage (cores) +query: SELECT toStartOfInterval(event_time, INTERVAL {rounding:UInt32} SECOND)::INT AS t, avg(ProfileEvent_OSCPUVirtualTimeMicroseconds) / 1000000 +FROM system.metric_log +WHERE event_date >= toDate(now() - {seconds:UInt32}) AND event_time >= now() - {seconds:UInt32} +GROUP BY t +ORDER BY t WITH FILL STEP {rounding:UInt32} + +Row 2: +────── +dashboard: overview +title: CPU Wait +query: SELECT toStartOfInterval(event_time, INTERVAL {rounding:UInt32} SECOND)::INT AS t, avg(ProfileEvent_OSCPUWaitMicroseconds) / 1000000 +FROM system.metric_log +WHERE event_date >= toDate(now() - {seconds:UInt32}) AND event_time >= now() - {seconds:UInt32} +GROUP BY t +ORDER BY t WITH FILL STEP {rounding:UInt32} + +Row 3: +────── +dashboard: overview +title: OS CPU Usage (Userspace) +query: SELECT toStartOfInterval(event_time, INTERVAL {rounding:UInt32} SECOND)::INT AS t, avg(value) +FROM system.asynchronous_metric_log +WHERE event_date >= toDate(now() - {seconds:UInt32}) AND event_time >= now() - {seconds:UInt32} AND metric = 'OSUserTimeNormalized' +GROUP BY t +ORDER BY t WITH FILL STEP {rounding:UInt32} + +Row 4: +────── +dashboard: overview +title: OS CPU Usage (Kernel) +query: SELECT toStartOfInterval(event_time, INTERVAL {rounding:UInt32} SECOND)::INT AS t, avg(value) +FROM system.asynchronous_metric_log +WHERE event_date >= toDate(now() - {seconds:UInt32}) AND event_time >= now() - {seconds:UInt32} AND metric = 'OSSystemTimeNormalized' +GROUP BY t +ORDER BY t WITH FILL STEP {rounding:UInt32} +``` + +Columns: + +- `dashboard` (`String`) - The dashboard name. +- `title` (`String`) - The title of a chart. +- `query` (`String`) - The query to obtain data to be displayed. diff --git a/docs/en/operations/system-tables/databases.md b/docs/en/operations/system-tables/databases.md index f3d3d388c36..e3b0ded96e8 100644 --- a/docs/en/operations/system-tables/databases.md +++ b/docs/en/operations/system-tables/databases.md @@ -14,6 +14,7 @@ Columns: - `uuid` ([UUID](../../sql-reference/data-types/uuid.md)) — Database UUID. - `comment` ([String](../../sql-reference/data-types/enum.md)) — Database comment. - `engine_full` ([String](../../sql-reference/data-types/enum.md)) — Parameters of the database engine. +- `database` ([String](../../sql-reference/data-types/string.md)) – Alias for `name`. The `name` column from this system table is used for implementing the `SHOW DATABASES` query. diff --git a/docs/en/operations/system-tables/index.md b/docs/en/operations/system-tables/index.md index df42f80275e..eaf79d035a9 100644 --- a/docs/en/operations/system-tables/index.md +++ b/docs/en/operations/system-tables/index.md @@ -78,6 +78,11 @@ If procfs is supported and enabled on the system, ClickHouse server collects the - `OSReadBytes` - `OSWriteBytes` +:::note +`OSIOWaitMicroseconds` is disabled by default in Linux kernels starting from 5.14.x. +You can enable it using `sudo sysctl kernel.task_delayacct=1` or by creating a `.conf` file in `/etc/sysctl.d/` with `kernel.task_delayacct = 1` +::: + ## Related content - Blog: [System Tables and a window into the internals of ClickHouse](https://clickhouse.com/blog/clickhouse-debugging-issues-with-system-tables) diff --git a/docs/en/operations/system-tables/information_schema.md b/docs/en/operations/system-tables/information_schema.md index 8470ac838a4..d3f06f6e719 100644 --- a/docs/en/operations/system-tables/information_schema.md +++ b/docs/en/operations/system-tables/information_schema.md @@ -18,12 +18,14 @@ SHOW TABLES FROM information_schema; │ KEY_COLUMN_USAGE │ │ REFERENTIAL_CONSTRAINTS │ │ SCHEMATA │ +| STATISTICS | │ TABLES │ │ VIEWS │ │ columns │ │ key_column_usage │ │ referential_constraints │ │ schemata │ +| statistics | │ tables │ │ views │ └─────────────────────────┘ @@ -32,11 +34,12 @@ SHOW TABLES FROM information_schema; `INFORMATION_SCHEMA` contains the following views: - [COLUMNS](#columns) -- [SCHEMATA](#schemata) -- [TABLES](#tables) -- [VIEWS](#views) - [KEY_COLUMN_USAGE](#key_column_usage) - [REFERENTIAL_CONSTRAINTS](#referential_constraints) +- [SCHEMATA](#schemata) +- [STATISTICS](#statistics) +- [TABLES](#tables) +- [VIEWS](#views) Case-insensitive equivalent views, e.g. `INFORMATION_SCHEMA.columns` are provided for reasons of compatibility with other databases. The same applies to all the columns in these views - both lowercase (for example, `table_name`) and uppercase (`TABLE_NAME`) variants are provided. @@ -372,3 +375,28 @@ Columns: - `delete_rule` ([String](../../sql-reference/data-types/string.md)) — Currently unused. - `table_name` ([String](../../sql-reference/data-types/string.md)) — Currently unused. - `referenced_table_name` ([String](../../sql-reference/data-types/string.md)) — Currently unused. + +## STATISTICS {#statistics} + +Provides information about table indexes. Currently returns an empty result (no rows) which is just enough to provide compatibility with 3rd party tools like Tableau Online. + +Columns: + +- `table_catalog` ([String](../../sql-reference/data-types/string.md)) — Currently unused. +- `table_schema` ([String](../../sql-reference/data-types/string.md)) — Currently unused. +- `table_name` ([String](../../sql-reference/data-types/string.md)) — Currently unused. +- `non_unique` ([Int32](../../sql-reference/data-types/int-uint.md)) — Currently unused. +- `index_schema` ([String](../../sql-reference/data-types/string.md)) — Currently unused. +- `index_name` ([Nullable](../../sql-reference/data-types/nullable.md)([String](../../sql-reference/data-types/string.md))) — Currently unused. +- `seq_in_index` ([UInt32](../../sql-reference/data-types/int-uint.md)) — Currently unused. +- `column_name` ([Nullable](../../sql-reference/data-types/nullable.md)([String](../../sql-reference/data-types/string.md))) — Currently unused. +- `collation` ([Nullable](../../sql-reference/data-types/nullable.md)([String](../../sql-reference/data-types/string.md))) — Currently unused. +- `cardinality` ([Nullable](../../sql-reference/data-types/nullable.md)([Int64](../../sql-reference/data-types/int-uint.md))) — Currently unused. +- `sub_part` ([Nullable](../../sql-reference/data-types/nullable.md)([Int64](../../sql-reference/data-types/int-uint.md))) — Currently unused. +- `packed` ([Nullable](../../sql-reference/data-types/nullable.md)([String](../../sql-reference/data-types/string.md))) — Currently unused. +- `nullable` ([String](../../sql-reference/data-types/string.md)) — Currently unused. +- `index_type` ([String](../../sql-reference/data-types/string.md)) — Currently unused. +- `comment` ([String](../../sql-reference/data-types/string.md)) — Currently unused. +- `index_comment` ([String](../../sql-reference/data-types/string.md)) — Currently unused. +- `is_visible` ([String](../../sql-reference/data-types/string.md)) — Currently unused. +- `expression` ([Nullable](../../sql-reference/data-types/nullable.md)([String](../../sql-reference/data-types/string.md))) — Currently unused. diff --git a/docs/en/operations/system-tables/stack_trace.md b/docs/en/operations/system-tables/stack_trace.md index 52ee7088597..90f1f47e52f 100644 --- a/docs/en/operations/system-tables/stack_trace.md +++ b/docs/en/operations/system-tables/stack_trace.md @@ -35,27 +35,25 @@ WITH arrayMap(x -> demangle(addressToSymbol(x)), trace) AS all SELECT thread_nam ``` text Row 1: ────── -thread_name: clickhouse-serv - -thread_id: 686 -query_id: 1a11f70b-626d-47c1-b948-f9c7b206395d -res: sigqueue -DB::StorageSystemStackTrace::fillData(std::__1::vector::mutable_ptr, std::__1::allocator::mutable_ptr > >&, DB::Context const&, DB::SelectQueryInfo const&) const -DB::IStorageSystemOneBlock::read(std::__1::vector, std::__1::allocator >, std::__1::allocator, std::__1::allocator > > > const&, DB::SelectQueryInfo const&, DB::Context const&, DB::QueryProcessingStage::Enum, unsigned long, unsigned int) -DB::InterpreterSelectQuery::executeFetchColumns(DB::QueryProcessingStage::Enum, DB::QueryPipeline&, std::__1::shared_ptr const&, std::__1::vector, std::__1::allocator >, std::__1::allocator, std::__1::allocator > > > const&) -DB::InterpreterSelectQuery::executeImpl(DB::QueryPipeline&, std::__1::shared_ptr const&, std::__1::optional) -DB::InterpreterSelectQuery::execute() -DB::InterpreterSelectWithUnionQuery::execute() -DB::executeQueryImpl(char const*, char const*, DB::Context&, bool, DB::QueryProcessingStage::Enum, bool, DB::ReadBuffer*) -DB::executeQuery(std::__1::basic_string, std::__1::allocator > const&, DB::Context&, bool, DB::QueryProcessingStage::Enum, bool) -DB::TCPHandler::runImpl() -DB::TCPHandler::run() -Poco::Net::TCPServerConnection::start() -Poco::Net::TCPServerDispatcher::run() -Poco::PooledThread::run() -Poco::ThreadImpl::runnableEntry(void*) -start_thread -__clone +thread_name: QueryPipelineEx +thread_id: 743490 +query_id: dc55a564-febb-4e37-95bb-090ef182c6f1 +res: memcpy +large_ralloc +arena_ralloc +do_rallocx +Allocator::realloc(void*, unsigned long, unsigned long, unsigned long) +HashTable, HashTableNoState, PairNoInit>, HashCRC32, HashTableGrowerWithPrecalculation<8ul>, Allocator>::resize(unsigned long, unsigned long) +void DB::Aggregator::executeImplBatch, HashTableNoState, PairNoInit>, HashCRC32, HashTableGrowerWithPrecalculation<8ul>, Allocator>, true, false>>(DB::AggregationMethodOneNumber, HashTableNoState, PairNoInit>, HashCRC32, HashTableGrowerWithPrecalculation<8ul>, Allocator>, true, false>&, DB::AggregationMethodOneNumber, HashTableNoState, PairNoInit>, HashCRC32, HashTableGrowerWithPrecalculation<8ul>, Allocator>, true, false>::State&, DB::Arena*, unsigned long, unsigned long, DB::Aggregator::AggregateFunctionInstruction*, bool, char*) const +DB::Aggregator::executeImpl(DB::AggregatedDataVariants&, unsigned long, unsigned long, std::__1::vector>&, DB::Aggregator::AggregateFunctionInstruction*, bool, bool, char*) const +DB::Aggregator::executeOnBlock(std::__1::vector::immutable_ptr, std::__1::allocator::immutable_ptr>>, unsigned long, unsigned long, DB::AggregatedDataVariants&, std::__1::vector>&, std::__1::vector>, std::__1::allocator>>>&, bool&) const +DB::AggregatingTransform::work() +DB::ExecutionThreadContext::executeTask() +DB::PipelineExecutor::executeStepImpl(unsigned long, std::__1::atomic*) +void std::__1::__function::__policy_invoker::__call_impl>(std::__1::__function::__policy_storage const*) +ThreadPoolImpl>::worker(std::__1::__list_iterator, void*>) +void std::__1::__function::__policy_invoker::__call_impl::ThreadFromGlobalPoolImpl>::scheduleImpl(std::__1::function, Priority, std::__1::optional, bool)::'lambda0'()>(void&&)::'lambda'(), void ()>>(std::__1::__function::__policy_storage const*) +void* std::__1::__thread_proxy[abi:v15000]>, void ThreadPoolImpl::scheduleImpl(std::__1::function, Priority, std::__1::optional, bool)::'lambda0'()>>(void*) ``` Getting filenames and line numbers in ClickHouse source code: diff --git a/docs/en/operations/system-tables/symbols.md b/docs/en/operations/system-tables/symbols.md new file mode 100644 index 00000000000..5acd3ad51c7 --- /dev/null +++ b/docs/en/operations/system-tables/symbols.md @@ -0,0 +1,35 @@ +--- +slug: /en/operations/system-tables/symbols +--- +# symbols + +Contains information for introspection of `clickhouse` binary. It requires the introspection privilege to access. +This table is only useful for C++ experts and ClickHouse engineers. + +Columns: + +- `symbol` ([String](../../sql-reference/data-types/string.md)) — Symbol name in the binary. It is mangled. You can apply `demangle(symbol)` to obtain a readable name. +- `address_begin` ([UInt64](../../sql-reference/data-types/int-uint.md)) — Start address of the symbol in the binary. +- `address_end` ([UInt64](../../sql-reference/data-types/int-uint.md)) — End address of the symbol in the binary. +- `name` ([String](../../sql-reference/data-types/string.md)) — Alias for `event`. + +**Example** + +``` sql +SELECT address_begin, address_end - address_begin AS size, demangle(symbol) FROM system.symbols ORDER BY size DESC LIMIT 10 +``` + +``` text +┌─address_begin─┬─────size─┬─demangle(symbol)──────────────────────────────────────────────────────────────────┐ +│ 25000976 │ 29466000 │ icudt70_dat │ +│ 400605288 │ 2097272 │ arena_emap_global │ +│ 18760592 │ 1048576 │ CLD2::kQuadChrome1015_2 │ +│ 9807152 │ 884808 │ TopLevelDomainLookupHash::isValid(char const*, unsigned long)::wordlist │ +│ 57442432 │ 850608 │ llvm::X86Insts │ +│ 55682944 │ 681360 │ (anonymous namespace)::X86DAGToDAGISel::SelectCode(llvm::SDNode*)::MatcherTable │ +│ 55130368 │ 502840 │ (anonymous namespace)::X86InstructionSelector::getMatchTable() const::MatchTable0 │ +│ 402930616 │ 404032 │ qpl::ml::dispatcher::hw_dispatcher::get_instance()::instance │ +│ 274131872 │ 356795 │ DB::SettingsTraits::Accessor::instance()::$_0::operator()() const │ +│ 58293040 │ 249424 │ llvm::X86InstrNameData │ +└───────────────┴──────────┴───────────────────────────────────────────────────────────────────────────────────┘ +``` diff --git a/docs/en/operations/utilities/backupview.md b/docs/en/operations/utilities/backupview.md new file mode 100644 index 00000000000..c4f1cc0ae71 --- /dev/null +++ b/docs/en/operations/utilities/backupview.md @@ -0,0 +1,50 @@ +--- +slug: /en/operations/utilities/backupview +title: clickhouse_backupview +--- + +# clickhouse_backupview {#clickhouse_backupview} + +Python module to help analyzing backups made by the [BACKUP](https://clickhouse.com/docs/en/operations/backup) command. +The main motivation was to allows getting some information from a backup without actually restoring it. + +This module provides functions to +- enumerate files contained in a backup +- read files from a backup +- get useful information in readable form about databases, tables, parts contained in a backup +- check integrity of a backup + +## Example: + +```python +from clickhouse_backupview import open_backup, S3, FileInfo + +# Open a backup. We could also use a local path: +# backup = open_backup("/backups/my_backup_1/") +backup = open_backup(S3("uri", "access_key_id", "secret_access_key")) + +# Get a list of databasess inside the backup. +print(backup.get_databases())) + +# Get a list of tables inside the backup, +# and for each table its create query and a list of parts and partitions. +for db in backup.get_databases(): + for tbl in backup.get_tables(database=db): + print(backup.get_create_query(database=db, table=tbl)) + print(backup.get_partitions(database=db, table=tbl)) + print(backup.get_parts(database=db, table=tbl)) + +# Extract everything from the backup. +backup.extract_all(table="mydb.mytable", out='/tmp/my_backup_1/all/') + +# Extract the data of a specific table. +backup.extract_table_data(table="mydb.mytable", out='/tmp/my_backup_1/mytable/') + +# Extract a single partition. +backup.extract_table_data(table="mydb.mytable", partition="202201", out='/tmp/my_backup_1/202201/') + +# Extract a single part. +backup.extract_table_data(table="mydb.mytable", part="202201_100_200_3", out='/tmp/my_backup_1/202201_100_200_3/') +``` + +For more examples see the [test](https://github.com/ClickHouse/ClickHouse/blob/master/utils/backupview/test/test.py). diff --git a/docs/en/operations/utilities/clickhouse-copier.md b/docs/en/operations/utilities/clickhouse-copier.md index 473c1c628d1..0d329487504 100644 --- a/docs/en/operations/utilities/clickhouse-copier.md +++ b/docs/en/operations/utilities/clickhouse-copier.md @@ -115,7 +115,7 @@ Parameters: 3 - 1 + 1 - 1 + 1 - 1 + 1 - + + /usr/share/clickhouse/protos/ #include +#include #include #include #include @@ -24,6 +25,7 @@ namespace CurrentMetrics { extern const Metric LocalThread; extern const Metric LocalThreadActive; + extern const Metric LocalThreadScheduled; } namespace DB @@ -59,7 +61,7 @@ void ClusterCopier::init() getContext()->setClustersConfig(task_cluster_current_config, false, task_cluster->clusters_prefix); /// Set up shards and their priority - task_cluster->random_engine.seed(task_cluster->random_device()); + task_cluster->random_engine.seed(randomSeed()); for (auto & task_table : task_cluster->table_tasks) { task_table.cluster_pull = getContext()->getCluster(task_table.cluster_pull_name); @@ -199,7 +201,7 @@ void ClusterCopier::discoverTablePartitions(const ConnectionTimeouts & timeouts, { /// Fetch partitions list from a shard { - ThreadPool thread_pool(CurrentMetrics::LocalThread, CurrentMetrics::LocalThreadActive, num_threads ? num_threads : 2 * getNumberOfPhysicalCPUCores()); + ThreadPool thread_pool(CurrentMetrics::LocalThread, CurrentMetrics::LocalThreadActive, CurrentMetrics::LocalThreadScheduled, num_threads ? num_threads : 2 * getNumberOfPhysicalCPUCores()); for (const TaskShardPtr & task_shard : task_table.all_shards) thread_pool.scheduleOrThrowOnError([this, timeouts, task_shard]() @@ -1406,7 +1408,7 @@ TaskStatus ClusterCopier::processPartitionPieceTaskImpl( /// 3) Create helping table on the whole destination cluster auto & settings_push = task_cluster->settings_push; - auto connection = task_table.cluster_push->getAnyShardInfo().pool->get(timeouts, &settings_push, true); + auto connection = task_table.cluster_push->getAnyShardInfo().pool->get(timeouts, settings_push, true); String create_query = getRemoteCreateTable(task_shard.task_table.table_push, *connection, settings_push); ParserCreateQuery parser_create_query; @@ -1784,7 +1786,7 @@ String ClusterCopier::getRemoteCreateTable(const DatabaseAndTableName & table, C ASTPtr ClusterCopier::getCreateTableForPullShard(const ConnectionTimeouts & timeouts, TaskShard & task_shard) { /// Fetch and parse (possibly) new definition - auto connection_entry = task_shard.info.pool->get(timeouts, &task_cluster->settings_pull, true); + auto connection_entry = task_shard.info.pool->get(timeouts, task_cluster->settings_pull, true); String create_query_pull_str = getRemoteCreateTable( task_shard.task_table.table_pull, *connection_entry, diff --git a/programs/copier/TaskCluster.cpp b/programs/copier/TaskCluster.cpp index b25e9d32320..0fb06616e50 100644 --- a/programs/copier/TaskCluster.cpp +++ b/programs/copier/TaskCluster.cpp @@ -58,7 +58,7 @@ void DB::TaskCluster::reloadSettings(const Poco::Util::AbstractConfiguration & c /// Override important settings settings_pull.readonly = 1; settings_pull.prefer_localhost_replica = false; - settings_push.insert_distributed_sync = true; + settings_push.distributed_foreground_insert = true; settings_push.prefer_localhost_replica = false; set_default_value(settings_pull.load_balancing, LoadBalancing::NEAREST_HOSTNAME); @@ -66,7 +66,7 @@ void DB::TaskCluster::reloadSettings(const Poco::Util::AbstractConfiguration & c set_default_value(settings_pull.max_block_size, 8192UL); set_default_value(settings_pull.preferred_block_size_bytes, 0); - set_default_value(settings_push.insert_distributed_timeout, 0); + set_default_value(settings_push.distributed_background_insert_timeout, 0); set_default_value(settings_push.alter_sync, 2); } diff --git a/programs/copier/TaskCluster.h b/programs/copier/TaskCluster.h index fc1c8a663ec..a7f8bc3baca 100644 --- a/programs/copier/TaskCluster.h +++ b/programs/copier/TaskCluster.h @@ -7,7 +7,7 @@ #include -#include +#include namespace DB { @@ -45,7 +45,6 @@ struct TaskCluster /// Subtasks TasksTable table_tasks; - std::random_device random_device; pcg64 random_engine; }; diff --git a/programs/disks/CommandCopy.cpp b/programs/disks/CommandCopy.cpp index 296fc708411..421e4038d12 100644 --- a/programs/disks/CommandCopy.cpp +++ b/programs/disks/CommandCopy.cpp @@ -57,7 +57,7 @@ public: String relative_path_from = validatePathAndGetAsRelative(path_from); String relative_path_to = validatePathAndGetAsRelative(path_to); - disk_from->copyDirectoryContent(relative_path_from, disk_to, relative_path_to, /* read_settings= */ {}, /* write_settings= */ {}); + disk_from->copyDirectoryContent(relative_path_from, disk_to, relative_path_to, /* read_settings= */ {}, /* write_settings= */ {}, /* cancellation_hook= */ {}); } }; } diff --git a/programs/install/Install.cpp b/programs/install/Install.cpp index e10a9fea86b..9d4d791263b 100644 --- a/programs/install/Install.cpp +++ b/programs/install/Install.cpp @@ -420,7 +420,7 @@ int mainEntryClickHouseInstall(int argc, char ** argv) /// Create symlinks. - std::initializer_list tools + std::initializer_list tools { "clickhouse-server", "clickhouse-client", @@ -435,6 +435,9 @@ int mainEntryClickHouseInstall(int argc, char ** argv) "clickhouse-keeper", "clickhouse-keeper-converter", "clickhouse-disks", + "ch", + "chl", + "chc", }; for (const auto & tool : tools) @@ -444,29 +447,39 @@ int mainEntryClickHouseInstall(int argc, char ** argv) if (fs::exists(symlink_path)) { - bool is_symlink = FS::isSymlink(symlink_path); - fs::path points_to; - if (is_symlink) - points_to = fs::weakly_canonical(FS::readSymlink(symlink_path)); - - if (is_symlink && (points_to == main_bin_path || (options.count("link") && points_to == binary_self_canonical_path))) + /// Do not replace short named symlinks if they are already present in the system + /// to avoid collision with other tools. + if (!tool.starts_with("clickhouse")) { + fmt::print("Symlink {} already exists. Will keep it.\n", symlink_path.string()); need_to_create = false; } else { - if (!is_symlink) + bool is_symlink = FS::isSymlink(symlink_path); + fs::path points_to; + if (is_symlink) + points_to = fs::weakly_canonical(FS::readSymlink(symlink_path)); + + if (is_symlink && (points_to == main_bin_path || (options.count("link") && points_to == binary_self_canonical_path))) { - fs::path rename_path = symlink_path.replace_extension(".old"); - fmt::print("File {} already exists but it's not a symlink. Will rename to {}.\n", - symlink_path.string(), rename_path.string()); - fs::rename(symlink_path, rename_path); + need_to_create = false; } - else if (points_to != main_bin_path) + else { - fmt::print("Symlink {} already exists but it points to {}. Will replace the old symlink to {}.\n", - symlink_path.string(), points_to.string(), main_bin_path.string()); - fs::remove(symlink_path); + if (!is_symlink) + { + fs::path rename_path = symlink_path.replace_extension(".old"); + fmt::print("File {} already exists but it's not a symlink. Will rename to {}.\n", + symlink_path.string(), rename_path.string()); + fs::rename(symlink_path, rename_path); + } + else if (points_to != main_bin_path) + { + fmt::print("Symlink {} already exists but it points to {}. Will replace the old symlink to {}.\n", + symlink_path.string(), points_to.string(), main_bin_path.string()); + fs::remove(symlink_path); + } } } } diff --git a/programs/keeper/Keeper.cpp b/programs/keeper/Keeper.cpp index c9a3deb0ec7..e04e669abae 100644 --- a/programs/keeper/Keeper.cpp +++ b/programs/keeper/Keeper.cpp @@ -35,7 +35,7 @@ #include "Core/Defines.h" #include "config.h" -#include "config_version.h" +#include #include "config_tools.h" @@ -50,6 +50,9 @@ #include +#include +/// A minimal file used when the keeper is run without installation +INCBIN(keeper_resource_embedded_xml, SOURCE_DIR "/programs/keeper/keeper_embedded.xml"); int mainEntryClickHouseKeeper(int argc, char ** argv) { @@ -158,6 +161,8 @@ int Keeper::run() void Keeper::initialize(Poco::Util::Application & self) { + ConfigProcessor::registerEmbeddedConfig("keeper_config.xml", std::string_view(reinterpret_cast(gkeeper_resource_embedded_xmlData), gkeeper_resource_embedded_xmlSize)); + BaseDaemon::initialize(self); logger().information("starting up"); @@ -551,7 +556,8 @@ catch (...) { /// Poco does not provide stacktrace. tryLogCurrentException("Application"); - throw; + auto code = getCurrentExceptionCode(); + return code ? code : -1; } diff --git a/programs/local/LocalServer.cpp b/programs/local/LocalServer.cpp index 452af7a4ec0..f3b84fa3eb1 100644 --- a/programs/local/LocalServer.cpp +++ b/programs/local/LocalServer.cpp @@ -424,7 +424,7 @@ void LocalServer::setupUsers() void LocalServer::connect() { - connection_parameters = ConnectionParameters(config()); + connection_parameters = ConnectionParameters(config(), "localhost"); connection = LocalConnection::createConnection( connection_parameters, global_context, need_render_progress, need_render_profile_events, server_display_name); } @@ -495,7 +495,7 @@ try processConfig(); adjustSettings(); - initTtyBuffer(toProgressOption(config().getString("progress", "default"))); + initTTYBuffer(toProgressOption(config().getString("progress", "default"))); applyCmdSettings(global_context); @@ -563,9 +563,6 @@ catch (...) void LocalServer::updateLoggerLevel(const String & logs_level) { - if (!logging_initialized) - return; - config().setString("logger.level", logs_level); updateLevels(config(), logger()); } @@ -607,21 +604,13 @@ void LocalServer::processConfig() Poco::AutoPtr pf = new OwnPatternFormatter; Poco::AutoPtr log = new OwnFormattingChannel(pf, new Poco::SimpleFileChannel(server_logs_file)); Poco::Logger::root().setChannel(log); - logging_initialized = true; - } - else if (logging || is_interactive) - { - config().setString("logger", "logger"); - auto log_level_default = is_interactive && !logging ? "none" : level; - config().setString("logger.level", config().getString("log-level", config().getString("send_logs_level", log_level_default))); - buildLoggers(config(), logger(), "clickhouse-local"); - logging_initialized = true; } else { - Poco::Logger::root().setLevel("none"); - Poco::Logger::root().setChannel(Poco::AutoPtr(new Poco::NullChannel())); - logging_initialized = false; + config().setString("logger", "logger"); + auto log_level_default = logging ? level : "fatal"; + config().setString("logger.level", config().getString("log-level", config().getString("send_logs_level", log_level_default))); + buildLoggers(config(), logger(), "clickhouse-local"); } shared_context = Context::createShared(); @@ -763,7 +752,7 @@ void LocalServer::processConfig() { DatabaseCatalog::instance().createBackgroundTasks(); loadMetadata(global_context); - DatabaseCatalog::instance().startupBackgroundCleanup(); + DatabaseCatalog::instance().startupBackgroundTasks(); } /// For ClickHouse local if path is not set the loader will be disabled. diff --git a/programs/main.cpp b/programs/main.cpp index 5857e8d5ee4..959984d565d 100644 --- a/programs/main.cpp +++ b/programs/main.cpp @@ -2,15 +2,12 @@ #include #include -#ifdef OS_LINUX -#include -#endif - #include #include #include #include #include +#include #include /// pair #include @@ -22,7 +19,6 @@ #include #include -#include /// Universal executable for various clickhouse applications @@ -98,7 +94,7 @@ using MainFunc = int (*)(int, char**); #if !defined(FUZZING_MODE) /// Add an item here to register new application -std::pair clickhouse_applications[] = +std::pair clickhouse_applications[] = { #if ENABLE_CLICKHOUSE_LOCAL {"local", mainEntryClickHouseLocal}, @@ -158,6 +154,18 @@ std::pair clickhouse_applications[] = #endif }; +/// Add an item here to register a new short name +std::pair clickhouse_short_names[] = +{ +#if ENABLE_CLICKHOUSE_LOCAL + {"ch", "local"}, + {"chl", "local"}, +#endif +#if ENABLE_CLICKHOUSE_CLIENT + {"chc", "client"}, +#endif +}; + int printHelp(int, char **) { std::cerr << "Use one of the following commands:" << std::endl; @@ -387,15 +395,21 @@ void checkHarmfulEnvironmentVariables(char ** argv) } -bool isClickhouseApp(const std::string & app_suffix, std::vector & argv) +bool isClickhouseApp(std::string_view app_suffix, std::vector & argv) { + for (const auto & [alias, name] : clickhouse_short_names) + if (app_suffix == name + && !argv.empty() && (alias == argv[0] || endsWith(argv[0], "/" + std::string(alias)))) + return true; + /// Use app if the first arg 'app' is passed (the arg should be quietly removed) if (argv.size() >= 2) { auto first_arg = argv.begin() + 1; /// 'clickhouse --client ...' and 'clickhouse client ...' are Ok - if (*first_arg == "--" + app_suffix || *first_arg == app_suffix) + if (*first_arg == app_suffix + || (std::string_view(*first_arg).starts_with("--") && std::string_view(*first_arg).substr(2) == app_suffix)) { argv.erase(first_arg); return true; @@ -403,7 +417,7 @@ bool isClickhouseApp(const std::string & app_suffix, std::vector & argv) } /// Use app if clickhouse binary is run through symbolic link with name clickhouse-app - std::string app_name = "clickhouse-" + app_suffix; + std::string app_name = "clickhouse-" + std::string(app_suffix); return !argv.empty() && (app_name == argv[0] || endsWith(argv[0], "/" + app_name)); } diff --git a/programs/obfuscator/Obfuscator.cpp b/programs/obfuscator/Obfuscator.cpp index 15997ec986e..2cb5250cdf2 100644 --- a/programs/obfuscator/Obfuscator.cpp +++ b/programs/obfuscator/Obfuscator.cpp @@ -1106,7 +1106,7 @@ public: { if (isInteger(data_type)) { - if (isUnsignedInteger(data_type)) + if (isUInt(data_type)) return std::make_unique(seed); else return std::make_unique(seed); diff --git a/programs/self-extracting/CMakeLists.txt b/programs/self-extracting/CMakeLists.txt index f3ff0bbcd78..4b6dd07f618 100644 --- a/programs/self-extracting/CMakeLists.txt +++ b/programs/self-extracting/CMakeLists.txt @@ -11,8 +11,8 @@ else () endif () add_custom_target (self-extracting ALL - ${CMAKE_COMMAND} -E remove clickhouse + ${CMAKE_COMMAND} -E remove clickhouse clickhouse-stripped COMMAND ${COMPRESSOR} ${DECOMPRESSOR} clickhouse ../clickhouse - DEPENDS clickhouse compressor + COMMAND ${COMPRESSOR} ${DECOMPRESSOR} clickhouse-stripped ../clickhouse-stripped + DEPENDS clickhouse clickhouse-stripped compressor ) - diff --git a/programs/server/Server.cpp b/programs/server/Server.cpp index af460ccc7d9..8519532f788 100644 --- a/programs/server/Server.cpp +++ b/programs/server/Server.cpp @@ -98,7 +98,7 @@ #include #include "config.h" -#include "config_version.h" +#include #if defined(OS_LINUX) # include @@ -536,6 +536,16 @@ static void sanityChecks(Server & server) { } + try + { + const char * filename = "/proc/sys/kernel/task_delayacct"; + if (readNumber(filename) == 0) + server.context()->addWarningMessage("Delay accounting is not enabled, OSIOWaitMicroseconds will not be gathered. Check " + String(filename)); + } + catch (...) // NOLINT(bugprone-empty-catch) + { + } + std::string dev_id = getBlockDeviceId(data_path); if (getBlockDeviceType(dev_id) == BlockDeviceType::ROT && getBlockDeviceReadAheadBytes(dev_id) == 0) server.context()->addWarningMessage("Rotational disk with disabled readahead is in use. Performance can be degraded. Used for data: " + String(data_path)); @@ -666,6 +676,10 @@ try global_context->addWarningMessage("Server was built with sanitizer. It will work slowly."); #endif +#if defined(SANITIZE_COVERAGE) || WITH_COVERAGE + global_context->addWarningMessage("Server was built with code coverage. It will work slowly."); +#endif + const size_t physical_server_memory = getMemoryAmount(); LOG_INFO(log, "Available RAM: {}; physical cores: {}; logical cores: {}.", @@ -1149,6 +1163,8 @@ try CompiledExpressionCacheFactory::instance().init(compiled_expression_cache_max_size_in_bytes, compiled_expression_cache_max_elements); #endif + NamedCollectionUtils::loadIfNot(); + /// Initialize main config reloader. std::string include_from_path = config().getString("include_from", "/etc/metrika.xml"); @@ -1263,6 +1279,8 @@ try global_context->setHTTPHeaderFilter(*config); global_context->setMaxTableSizeToDrop(server_settings_.max_table_size_to_drop); + global_context->setClientHTTPHeaderForbiddenHeaders(server_settings_.get_client_http_header_forbidden_headers); + global_context->setAllowGetHTTPHeaderFunction(server_settings_.allow_get_client_http_header); global_context->setMaxPartitionSizeToDrop(server_settings_.max_partition_size_to_drop); ConcurrencyControl::SlotCount concurrent_threads_soft_limit = ConcurrencyControl::Unlimited; @@ -1362,6 +1380,8 @@ try global_context->reloadAuxiliaryZooKeepersConfigIfChanged(config); + global_context->reloadQueryMaskingRulesIfChanged(config); + std::lock_guard lock(servers_lock); updateServers(*config, server_pool, async_metrics, servers, servers_to_start_before_tables); } @@ -1557,6 +1577,10 @@ try global_context->setFormatSchemaPath(format_schema_path); fs::create_directories(format_schema_path); + /// Set the path for google proto files + if (config().has("google_protos_path")) + global_context->setGoogleProtosPath(fs::weakly_canonical(config().getString("google_protos_path"))); + /// Set path for filesystem caches fs::path filesystem_caches_path(config().getString("filesystem_caches_path", "")); if (!filesystem_caches_path.empty()) @@ -1681,7 +1705,7 @@ try /// Then, load remaining databases loadMetadata(global_context, default_database); convertDatabasesEnginesIfNeed(global_context); - database_catalog.startupBackgroundCleanup(); + database_catalog.startupBackgroundTasks(); /// After loading validate that default database exists database_catalog.assertDatabaseExists(default_database); /// Load user-defined SQL functions. @@ -1806,6 +1830,9 @@ try try { global_context->loadOrReloadDictionaries(config()); + + if (!config().getBool("dictionaries_lazy_load", true) && config().getBool("wait_dictionaries_load_at_startup", true)) + global_context->waitForDictionariesLoad(); } catch (...) { @@ -1951,7 +1978,8 @@ catch (...) { /// Poco does not provide stacktrace. tryLogCurrentException("Application"); - throw; + auto code = getCurrentExceptionCode(); + return code ? code : -1; } std::unique_ptr Server::buildProtocolStackFromConfig( diff --git a/programs/server/config.d/path.xml b/programs/server/config.d/path.xml index 46af5bfb64b..7afada689d7 100644 --- a/programs/server/config.d/path.xml +++ b/programs/server/config.d/path.xml @@ -3,6 +3,7 @@ ./tmp/ ./user_files/ ./format_schemas/ + ../../contrib/google-protobuf/src/ ./access/ ./top_level_domains/ diff --git a/programs/server/config.xml b/programs/server/config.xml index a1e2907f6b6..f367b97cec1 100644 --- a/programs/server/config.xml +++ b/programs/server/config.xml @@ -104,15 +104,14 @@ - - + - 3 + 10 @@ -326,7 +325,7 @@ Query can upscale to desired number of threads during execution if more threads become available. --> 0 - 0 + 2 1000 @@ -406,6 +405,9 @@ --> 5368709120 + + 5368709120 @@ -1246,6 +1248,25 @@ 7500 + + + system + s3queue_log
+ toYYYYMM(event_date) + 7500 +
+ + + + system + blob_storage_log
+ toYYYYMM(event_date) + 7500 + event_date + INTERVAL 30 DAY +
+ *_dictionary.*ml + + true + + + true + *_function.*ml @@ -1396,6 +1428,10 @@ --> /var/lib/clickhouse/format_schemas/ + + /usr/share/clickhouse/protos/ + + - + 1 + + + 1 + + + diff --git a/programs/server/users.yaml.example b/programs/server/users.yaml.example index afae8f2b1ff..27bdf791e35 100644 --- a/programs/server/users.yaml.example +++ b/programs/server/users.yaml.example @@ -91,6 +91,10 @@ users: # User can create other users and grant rights to them. # access_management: 1 + # SQL expressions for grants available for that user - https://clickhouse.com/docs/en/sql-reference/statements/grant + # grants: + # - query: GRANT ALL ON *.* + # Quotas. quotas: # Name of quota. diff --git a/rust/skim/Cargo.toml b/rust/skim/Cargo.toml index 0381ad81619..22af40c3e33 100644 --- a/rust/skim/Cargo.toml +++ b/rust/skim/Cargo.toml @@ -23,3 +23,7 @@ debug = true inherits = "release" # We use LTO here as well to slightly decrease binary size lto = true + +[patch.crates-io] +# Ref: https://github.com/lotabout/tuikit/pull/51 +tuikit = { git = "https://github.com/azat-rust/tuikit.git", rev = "e1994c0e03ff02c49cf1471f0cc3cbf185ce0104" } diff --git a/src/Access/KerberosInit.cpp b/src/Access/KerberosInit.cpp index 58e4a46f2aa..772938ad9b2 100644 --- a/src/Access/KerberosInit.cpp +++ b/src/Access/KerberosInit.cpp @@ -44,7 +44,7 @@ private: krb5_ccache defcache = nullptr; krb5_get_init_creds_opt * options = nullptr; // Credentials structure including ticket, session key, and lifetime info. - krb5_creds my_creds; + krb5_creds my_creds {}; krb5_keytab keytab = nullptr; krb5_principal defcache_princ = nullptr; String fmtError(krb5_error_code code) const; diff --git a/src/Access/UsersConfigAccessStorage.cpp b/src/Access/UsersConfigAccessStorage.cpp index 952a1064829..2b0fb3f9b2e 100644 --- a/src/Access/UsersConfigAccessStorage.cpp +++ b/src/Access/UsersConfigAccessStorage.cpp @@ -12,7 +12,7 @@ #include #include #include -#include +#include #include #include #include diff --git a/src/AggregateFunctions/AggregateFunctionAnalysisOfVariance.cpp b/src/AggregateFunctions/AggregateFunctionAnalysisOfVariance.cpp index 9ef2d295828..934a8dffd90 100644 --- a/src/AggregateFunctions/AggregateFunctionAnalysisOfVariance.cpp +++ b/src/AggregateFunctions/AggregateFunctionAnalysisOfVariance.cpp @@ -1,7 +1,18 @@ #include -#include #include +#include + +#include +#include +#include +#include +#include +#include +#include +#include + + namespace DB { @@ -13,6 +24,82 @@ namespace ErrorCodes namespace { +using AggregateFunctionAnalysisOfVarianceData = AnalysisOfVarianceMoments; + + +/// One way analysis of variance +/// Provides a statistical test of whether two or more population means are equal (null hypothesis) +/// Has an assumption that subjects from group i have normal distribution. +/// Accepts two arguments - a value and a group number which this value belongs to. +/// Groups are enumerated starting from 0 and there should be at least two groups to perform a test +/// Moreover there should be at least one group with the number of observations greater than one. +class AggregateFunctionAnalysisOfVariance final : public IAggregateFunctionDataHelper +{ +public: + explicit AggregateFunctionAnalysisOfVariance(const DataTypes & arguments, const Array & params) + : IAggregateFunctionDataHelper(arguments, params, createResultType()) + {} + + DataTypePtr createResultType() const + { + DataTypes types {std::make_shared>(), std::make_shared>() }; + Strings names {"f_statistic", "p_value"}; + return std::make_shared( + std::move(types), + std::move(names) + ); + } + + String getName() const override { return "analysisOfVariance"; } + + bool allocatesMemoryInArena() const override { return false; } + + void add(AggregateDataPtr __restrict place, const IColumn ** columns, size_t row_num, Arena *) const override + { + data(place).add(columns[0]->getFloat64(row_num), columns[1]->getUInt(row_num)); + } + + void merge(AggregateDataPtr __restrict place, ConstAggregateDataPtr rhs, Arena *) const override + { + data(place).merge(data(rhs)); + } + + void serialize(ConstAggregateDataPtr __restrict place, WriteBuffer & buf, std::optional /* version */) const override + { + data(place).write(buf); + } + + void deserialize(AggregateDataPtr __restrict place, ReadBuffer & buf, std::optional /* version */, Arena *) const override + { + data(place).read(buf); + } + + void insertResultInto(AggregateDataPtr __restrict place, IColumn & to, Arena *) const override + { + auto f_stat = data(place).getFStatistic(); + + auto & column_tuple = assert_cast(to); + auto & column_stat = assert_cast &>(column_tuple.getColumn(0)); + auto & column_value = assert_cast &>(column_tuple.getColumn(1)); + + if (unlikely(!std::isfinite(f_stat) || f_stat < 0)) + { + column_stat.getData().push_back(std::numeric_limits::quiet_NaN()); + column_value.getData().push_back(std::numeric_limits::quiet_NaN()); + return; + } + + auto p_value = data(place).getPValue(f_stat); + + /// Because p-value is a probability. + p_value = std::min(1.0, std::max(0.0, p_value)); + + column_stat.getData().push_back(f_stat); + column_value.getData().push_back(p_value); + } + +}; + AggregateFunctionPtr createAggregateFunctionAnalysisOfVariance(const std::string & name, const DataTypes & arguments, const Array & parameters, const Settings *) { assertNoParameters(name, parameters); diff --git a/src/AggregateFunctions/AggregateFunctionAnalysisOfVariance.h b/src/AggregateFunctions/AggregateFunctionAnalysisOfVariance.h deleted file mode 100644 index 76e749dc1fe..00000000000 --- a/src/AggregateFunctions/AggregateFunctionAnalysisOfVariance.h +++ /dev/null @@ -1,97 +0,0 @@ -#pragma once - -#include -#include - -#include -#include -#include -#include -#include -#include -#include -#include "Common/NaNUtils.h" -#include -#include - - -namespace DB -{ - -using AggregateFunctionAnalysisOfVarianceData = AnalysisOfVarianceMoments; - - -/// One way analysis of variance -/// Provides a statistical test of whether two or more population means are equal (null hypothesis) -/// Has an assumption that subjects from group i have normal distribution. -/// Accepts two arguments - a value and a group number which this value belongs to. -/// Groups are enumerated starting from 0 and there should be at least two groups to perform a test -/// Moreover there should be at least one group with the number of observations greater than one. -class AggregateFunctionAnalysisOfVariance final : public IAggregateFunctionDataHelper -{ -public: - explicit AggregateFunctionAnalysisOfVariance(const DataTypes & arguments, const Array & params) - : IAggregateFunctionDataHelper(arguments, params, createResultType()) - {} - - DataTypePtr createResultType() const - { - DataTypes types {std::make_shared>(), std::make_shared>() }; - Strings names {"f_statistic", "p_value"}; - return std::make_shared( - std::move(types), - std::move(names) - ); - } - - String getName() const override { return "analysisOfVariance"; } - - bool allocatesMemoryInArena() const override { return false; } - - void add(AggregateDataPtr __restrict place, const IColumn ** columns, size_t row_num, Arena *) const override - { - data(place).add(columns[0]->getFloat64(row_num), columns[1]->getUInt(row_num)); - } - - void merge(AggregateDataPtr __restrict place, ConstAggregateDataPtr rhs, Arena *) const override - { - data(place).merge(data(rhs)); - } - - void serialize(ConstAggregateDataPtr __restrict place, WriteBuffer & buf, std::optional /* version */) const override - { - data(place).write(buf); - } - - void deserialize(AggregateDataPtr __restrict place, ReadBuffer & buf, std::optional /* version */, Arena *) const override - { - data(place).read(buf); - } - - void insertResultInto(AggregateDataPtr __restrict place, IColumn & to, Arena *) const override - { - auto f_stat = data(place).getFStatistic(); - - auto & column_tuple = assert_cast(to); - auto & column_stat = assert_cast &>(column_tuple.getColumn(0)); - auto & column_value = assert_cast &>(column_tuple.getColumn(1)); - - if (unlikely(!std::isfinite(f_stat) || f_stat < 0)) - { - column_stat.getData().push_back(std::numeric_limits::quiet_NaN()); - column_value.getData().push_back(std::numeric_limits::quiet_NaN()); - return; - } - - auto p_value = data(place).getPValue(f_stat); - - /// Because p-value is a probability. - p_value = std::min(1.0, std::max(0.0, p_value)); - - column_stat.getData().push_back(f_stat); - column_value.getData().push_back(p_value); - } - -}; - -} diff --git a/src/AggregateFunctions/AggregateFunctionAvgWeighted.cpp b/src/AggregateFunctions/AggregateFunctionAvgWeighted.cpp index e840005facf..d33e843fac9 100644 --- a/src/AggregateFunctions/AggregateFunctionAvgWeighted.cpp +++ b/src/AggregateFunctions/AggregateFunctionAvgWeighted.cpp @@ -1,12 +1,14 @@ #include #include +#include #include -#include #include #include + namespace DB { + struct Settings; namespace ErrorCodes @@ -16,13 +18,93 @@ namespace ErrorCodes namespace { + +template +using AvgWeightedFieldType = std::conditional_t, + Float64, // no way to do UInt128 * UInt128, better cast to Float64 + NearestFieldType>; + +template +using MaxFieldType = std::conditional_t<(sizeof(AvgWeightedFieldType) > sizeof(AvgWeightedFieldType)), + AvgWeightedFieldType, AvgWeightedFieldType>; + +template +class AggregateFunctionAvgWeighted final : + public AggregateFunctionAvgBase< + MaxFieldType, AvgWeightedFieldType, AggregateFunctionAvgWeighted> +{ +public: + using Base = AggregateFunctionAvgBase< + MaxFieldType, AvgWeightedFieldType, AggregateFunctionAvgWeighted>; + using Base::Base; + + using Numerator = typename Base::Numerator; + using Denominator = typename Base::Denominator; + using Fraction = typename Base::Fraction; + + void NO_SANITIZE_UNDEFINED add(AggregateDataPtr __restrict place, const IColumn ** columns, size_t row_num, Arena *) const override + { + const auto & weights = static_cast &>(*columns[1]); + + this->data(place).numerator += static_cast( + static_cast &>(*columns[0]).getData()[row_num]) + * static_cast(weights.getData()[row_num]); + + this->data(place).denominator += static_cast(weights.getData()[row_num]); + } + + String getName() const override { return "avgWeighted"; } + +#if USE_EMBEDDED_COMPILER + + bool isCompilable() const override + { + bool can_be_compiled = Base::isCompilable(); + can_be_compiled &= canBeNativeType(); + + return can_be_compiled; + } + + void compileAdd(llvm::IRBuilderBase & builder, llvm::Value * aggregate_data_ptr, const ValuesWithType & arguments) const override + { + llvm::IRBuilder<> & b = static_cast &>(builder); + + auto * numerator_type = toNativeType(b); + auto * numerator_ptr = aggregate_data_ptr; + auto * numerator_value = b.CreateLoad(numerator_type, numerator_ptr); + + auto numerator_data_type = toNativeDataType(); + auto * argument = nativeCast(b, arguments[0], numerator_data_type); + auto * weight = nativeCast(b, arguments[1], numerator_data_type); + + llvm::Value * value_weight_multiplication = argument->getType()->isIntegerTy() ? b.CreateMul(argument, weight) : b.CreateFMul(argument, weight); + auto * numerator_result_value = numerator_type->isIntegerTy() ? b.CreateAdd(numerator_value, value_weight_multiplication) : b.CreateFAdd(numerator_value, value_weight_multiplication); + b.CreateStore(numerator_result_value, numerator_ptr); + + auto * denominator_type = toNativeType(b); + + static constexpr size_t denominator_offset = offsetof(Fraction, denominator); + auto * denominator_ptr = b.CreateConstInBoundsGEP1_64(b.getInt8Ty(), aggregate_data_ptr, denominator_offset); + + auto * weight_cast_to_denominator = nativeCast(b, arguments[1], toNativeDataType()); + + auto * denominator_value = b.CreateLoad(denominator_type, denominator_ptr); + auto * denominator_value_updated = denominator_type->isIntegerTy() ? b.CreateAdd(denominator_value, weight_cast_to_denominator) : b.CreateFAdd(denominator_value, weight_cast_to_denominator); + + b.CreateStore(denominator_value_updated, denominator_ptr); + } + +#endif + +}; + bool allowTypes(const DataTypePtr& left, const DataTypePtr& right) noexcept { const WhichDataType l_dt(left), r_dt(right); constexpr auto allow = [](WhichDataType t) { - return t.isInt() || t.isUInt() || t.isFloat() || t.isDecimal(); + return t.isInt() || t.isUInt() || t.isFloat(); }; return allow(l_dt) && allow(r_dt); @@ -33,7 +115,6 @@ bool allowTypes(const DataTypePtr& left, const DataTypePtr& right) noexcept { \ LINE(Int8); LINE(Int16); LINE(Int32); LINE(Int64); LINE(Int128); LINE(Int256); \ LINE(UInt8); LINE(UInt16); LINE(UInt32); LINE(UInt64); LINE(UInt128); LINE(UInt256); \ - LINE(Decimal32); LINE(Decimal64); LINE(Decimal128); LINE(Decimal256); \ LINE(Float32); LINE(Float64); \ default: return nullptr; \ } @@ -75,31 +156,14 @@ createAggregateFunctionAvgWeighted(const std::string & name, const DataTypes & a "Types {} and {} are non-conforming as arguments for aggregate function {}", data_type->getName(), data_type_weight->getName(), name); - AggregateFunctionPtr ptr; - - const bool left_decimal = isDecimal(data_type); - const bool right_decimal = isDecimal(data_type_weight); - - /// We multiply value by weight, so actual scale of numerator is + - if (left_decimal && right_decimal) - ptr.reset(create(*data_type, *data_type_weight, - argument_types, - getDecimalScale(*data_type) + getDecimalScale(*data_type_weight), getDecimalScale(*data_type_weight))); - else if (left_decimal) - ptr.reset(create(*data_type, *data_type_weight, argument_types, - getDecimalScale(*data_type))); - else if (right_decimal) - ptr.reset(create(*data_type, *data_type_weight, argument_types, - getDecimalScale(*data_type_weight), getDecimalScale(*data_type_weight))); - else - ptr.reset(create(*data_type, *data_type_weight, argument_types)); - - return ptr; + return AggregateFunctionPtr(create(*data_type, *data_type_weight, argument_types)); } + } void registerAggregateFunctionAvgWeighted(AggregateFunctionFactory & factory) { - factory.registerFunction("avgWeighted", createAggregateFunctionAvgWeighted, AggregateFunctionFactory::CaseSensitive); + factory.registerFunction("avgWeighted", createAggregateFunctionAvgWeighted); } + } diff --git a/src/AggregateFunctions/AggregateFunctionAvgWeighted.h b/src/AggregateFunctions/AggregateFunctionAvgWeighted.h deleted file mode 100644 index 5a3869032ca..00000000000 --- a/src/AggregateFunctions/AggregateFunctionAvgWeighted.h +++ /dev/null @@ -1,90 +0,0 @@ -#pragma once - -#include -#include - -namespace DB -{ -struct Settings; - -template -using AvgWeightedFieldType = std::conditional_t, - std::conditional_t, Decimal256, Decimal128>, - std::conditional_t, - Float64, // no way to do UInt128 * UInt128, better cast to Float64 - NearestFieldType>>; - -template -using MaxFieldType = std::conditional_t<(sizeof(AvgWeightedFieldType) > sizeof(AvgWeightedFieldType)), - AvgWeightedFieldType, AvgWeightedFieldType>; - -template -class AggregateFunctionAvgWeighted final : - public AggregateFunctionAvgBase< - MaxFieldType, AvgWeightedFieldType, AggregateFunctionAvgWeighted> -{ -public: - using Base = AggregateFunctionAvgBase< - MaxFieldType, AvgWeightedFieldType, AggregateFunctionAvgWeighted>; - using Base::Base; - - using Numerator = typename Base::Numerator; - using Denominator = typename Base::Denominator; - using Fraction = typename Base::Fraction; - - void NO_SANITIZE_UNDEFINED add(AggregateDataPtr __restrict place, const IColumn ** columns, size_t row_num, Arena *) const override - { - const auto& weights = static_cast &>(*columns[1]); - - this->data(place).numerator += static_cast( - static_cast &>(*columns[0]).getData()[row_num]) * - static_cast(weights.getData()[row_num]); - - this->data(place).denominator += static_cast(weights.getData()[row_num]); - } - - String getName() const override { return "avgWeighted"; } - -#if USE_EMBEDDED_COMPILER - - bool isCompilable() const override - { - bool can_be_compiled = Base::isCompilable(); - can_be_compiled &= canBeNativeType(); - - return can_be_compiled; - } - - void compileAdd(llvm::IRBuilderBase & builder, llvm::Value * aggregate_data_ptr, const ValuesWithType & arguments) const override - { - llvm::IRBuilder<> & b = static_cast &>(builder); - - auto * numerator_type = toNativeType(b); - auto * numerator_ptr = aggregate_data_ptr; - auto * numerator_value = b.CreateLoad(numerator_type, numerator_ptr); - - auto numerator_data_type = toNativeDataType(); - auto * argument = nativeCast(b, arguments[0], numerator_data_type); - auto * weight = nativeCast(b, arguments[1], numerator_data_type); - - llvm::Value * value_weight_multiplication = argument->getType()->isIntegerTy() ? b.CreateMul(argument, weight) : b.CreateFMul(argument, weight); - auto * numerator_result_value = numerator_type->isIntegerTy() ? b.CreateAdd(numerator_value, value_weight_multiplication) : b.CreateFAdd(numerator_value, value_weight_multiplication); - b.CreateStore(numerator_result_value, numerator_ptr); - - auto * denominator_type = toNativeType(b); - - static constexpr size_t denominator_offset = offsetof(Fraction, denominator); - auto * denominator_ptr = b.CreateConstInBoundsGEP1_64(b.getInt8Ty(), aggregate_data_ptr, denominator_offset); - - auto * weight_cast_to_denominator = nativeCast(b, arguments[1], toNativeDataType()); - - auto * denominator_value = b.CreateLoad(denominator_type, denominator_ptr); - auto * denominator_value_updated = denominator_type->isIntegerTy() ? b.CreateAdd(denominator_value, weight_cast_to_denominator) : b.CreateFAdd(denominator_value, weight_cast_to_denominator); - - b.CreateStore(denominator_value_updated, denominator_ptr); - } - -#endif - -}; -} diff --git a/src/AggregateFunctions/AggregateFunctionBitwise.cpp b/src/AggregateFunctions/AggregateFunctionBitwise.cpp index f5c2deb4588..619251552e4 100644 --- a/src/AggregateFunctions/AggregateFunctionBitwise.cpp +++ b/src/AggregateFunctions/AggregateFunctionBitwise.cpp @@ -1,11 +1,27 @@ #include -#include #include #include +#include +#include + +#include +#include +#include + +#include + +#include "config.h" + +#if USE_EMBEDDED_COMPILER +# include +# include +#endif + namespace DB { + struct Settings; namespace ErrorCodes @@ -16,6 +32,179 @@ namespace ErrorCodes namespace { +template +struct AggregateFunctionGroupBitOrData +{ + T value = 0; + static const char * name() { return "groupBitOr"; } + void update(T x) { value |= x; } + +#if USE_EMBEDDED_COMPILER + + static void compileCreate(llvm::IRBuilderBase & builder, llvm::Value * value_ptr) + { + auto type = toNativeType(builder); + builder.CreateStore(llvm::Constant::getNullValue(type), value_ptr); + } + + static llvm::Value* compileUpdate(llvm::IRBuilderBase & builder, llvm::Value * lhs, llvm::Value * rhs) + { + return builder.CreateOr(lhs, rhs); + } + +#endif +}; + +template +struct AggregateFunctionGroupBitAndData +{ + T value = -1; /// Two's complement arithmetic, sign extension. + static const char * name() { return "groupBitAnd"; } + void update(T x) { value &= x; } + +#if USE_EMBEDDED_COMPILER + + static void compileCreate(llvm::IRBuilderBase & builder, llvm::Value * value_ptr) + { + auto type = toNativeType(builder); + builder.CreateStore(llvm::ConstantInt::get(type, -1), value_ptr); + } + + static llvm::Value* compileUpdate(llvm::IRBuilderBase & builder, llvm::Value * lhs, llvm::Value * rhs) + { + return builder.CreateAnd(lhs, rhs); + } + +#endif +}; + +template +struct AggregateFunctionGroupBitXorData +{ + T value = 0; + static const char * name() { return "groupBitXor"; } + void update(T x) { value ^= x; } + +#if USE_EMBEDDED_COMPILER + + static void compileCreate(llvm::IRBuilderBase & builder, llvm::Value * value_ptr) + { + auto type = toNativeType(builder); + builder.CreateStore(llvm::Constant::getNullValue(type), value_ptr); + } + + static llvm::Value* compileUpdate(llvm::IRBuilderBase & builder, llvm::Value * lhs, llvm::Value * rhs) + { + return builder.CreateXor(lhs, rhs); + } + +#endif +}; + + +/// Counts bitwise operation on numbers. +template +class AggregateFunctionBitwise final : public IAggregateFunctionDataHelper> +{ +public: + explicit AggregateFunctionBitwise(const DataTypePtr & type) + : IAggregateFunctionDataHelper>({type}, {}, createResultType()) + {} + + String getName() const override { return Data::name(); } + + static DataTypePtr createResultType() + { + return std::make_shared>(); + } + + bool allocatesMemoryInArena() const override { return false; } + + void add(AggregateDataPtr __restrict place, const IColumn ** columns, size_t row_num, Arena *) const override + { + this->data(place).update(assert_cast &>(*columns[0]).getData()[row_num]); + } + + void merge(AggregateDataPtr __restrict place, ConstAggregateDataPtr rhs, Arena *) const override + { + this->data(place).update(this->data(rhs).value); + } + + void serialize(ConstAggregateDataPtr __restrict place, WriteBuffer & buf, std::optional /* version */) const override + { + writeBinary(this->data(place).value, buf); + } + + void deserialize(AggregateDataPtr __restrict place, ReadBuffer & buf, std::optional /* version */, Arena *) const override + { + readBinary(this->data(place).value, buf); + } + + void insertResultInto(AggregateDataPtr __restrict place, IColumn & to, Arena *) const override + { + assert_cast &>(to).getData().push_back(this->data(place).value); + } + +#if USE_EMBEDDED_COMPILER + + bool isCompilable() const override + { + auto return_type = this->getResultType(); + return canBeNativeType(*return_type); + } + + void compileCreate(llvm::IRBuilderBase & builder, llvm::Value * aggregate_data_ptr) const override + { + auto * value_ptr = aggregate_data_ptr; + Data::compileCreate(builder, value_ptr); + } + + void compileAdd(llvm::IRBuilderBase & builder, llvm::Value * aggregate_data_ptr, const ValuesWithType & arguments) const override + { + llvm::IRBuilder<> & b = static_cast &>(builder); + + auto * return_type = toNativeType(b, this->getResultType()); + + auto * value_ptr = aggregate_data_ptr; + auto * value = b.CreateLoad(return_type, value_ptr); + + auto * result_value = Data::compileUpdate(builder, value, arguments[0].value); + + b.CreateStore(result_value, value_ptr); + } + + void compileMerge(llvm::IRBuilderBase & builder, llvm::Value * aggregate_data_dst_ptr, llvm::Value * aggregate_data_src_ptr) const override + { + llvm::IRBuilder<> & b = static_cast &>(builder); + + auto * return_type = toNativeType(b, this->getResultType()); + + auto * value_dst_ptr = aggregate_data_dst_ptr; + auto * value_dst = b.CreateLoad(return_type, value_dst_ptr); + + auto * value_src_ptr = aggregate_data_src_ptr; + auto * value_src = b.CreateLoad(return_type, value_src_ptr); + + auto * result_value = Data::compileUpdate(builder, value_dst, value_src); + + b.CreateStore(result_value, value_dst_ptr); + } + + llvm::Value * compileGetResult(llvm::IRBuilderBase & builder, llvm::Value * aggregate_data_ptr) const override + { + llvm::IRBuilder<> & b = static_cast &>(builder); + + auto * return_type = toNativeType(b, this->getResultType()); + auto * value_ptr = aggregate_data_ptr; + + return b.CreateLoad(return_type, value_ptr); + } + +#endif + +}; + + template