From 627d15c22d15feb220abf46b8479958261355ea6 Mon Sep 17 00:00:00 2001 From: Max Kainov Date: Thu, 14 Nov 2024 23:05:42 +0000 Subject: [PATCH 01/31] CI: packaging --- .github/workflows/pr.yaml | 661 ++++++++++++++++++++++++++ ci/__init__.py | 0 ci/docker/fasttest/Dockerfile | 9 + ci/jobs/build_clickhouse.py | 48 ++ ci/jobs/scripts/clickhouse_version.py | 36 ++ ci/praktika/_environment.py | 2 +- ci/praktika/hook_html.py | 4 +- ci/praktika/result.py | 3 + ci/praktika/runner.py | 31 +- ci/praktika/s3.py | 10 +- ci/workflows/pull_request.py | 35 +- packages/build | 50 +- 12 files changed, 841 insertions(+), 48 deletions(-) create mode 100644 .github/workflows/pr.yaml delete mode 100644 ci/__init__.py create mode 100644 ci/jobs/scripts/clickhouse_version.py diff --git a/.github/workflows/pr.yaml b/.github/workflows/pr.yaml new file mode 100644 index 00000000000..0e24cc379be --- /dev/null +++ b/.github/workflows/pr.yaml @@ -0,0 +1,661 @@ +# generated by praktika + +name: PR + +on: + pull_request: + branches: ['master'] + +# Cancel the previous wf run in PRs. +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +env: + # Force the stdout and stderr streams to be unbuffered + PYTHONUNBUFFERED: 1 + GH_TOKEN: ${{ github.token }} + +# Allow updating GH commit statuses and PR comments to post an actual job reports link +permissions: write-all + +jobs: + + config_workflow: + runs-on: [ci_services] + needs: [] + name: "Config Workflow" + outputs: + data: ${{ steps.run.outputs.DATA }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ github.head_ref }} + + - name: Prepare env script + run: | + cat > /tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:. + + cat > /tmp/praktika/workflow_config_pr.json << 'EOF' + ${{ needs.config_workflow.outputs.data }} + EOF + cat > /tmp/praktika/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + rm -rf /tmp/praktika/input /tmp/praktika/output /tmp/praktika + mkdir -p /tmp/praktika /tmp/praktika/input /tmp/praktika/output + + - name: Run + id: run + run: | + . /tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run --job '''Config Workflow''' --workflow "PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee /tmp/praktika/praktika_run.log + else + python3 -m praktika run --job '''Config Workflow''' --workflow "PR" --ci |& tee /tmp/praktika/praktika_run.log + fi + + docker_builds: + runs-on: [ci_services_ebs] + needs: [config_workflow] + if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'RG9ja2VyIEJ1aWxkcw==') }} + name: "Docker Builds" + outputs: + data: ${{ steps.run.outputs.DATA }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ github.head_ref }} + + - name: Prepare env script + run: | + cat > /tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:. + + cat > /tmp/praktika/workflow_config_pr.json << 'EOF' + ${{ needs.config_workflow.outputs.data }} + EOF + cat > /tmp/praktika/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + rm -rf /tmp/praktika/input /tmp/praktika/output /tmp/praktika + mkdir -p /tmp/praktika /tmp/praktika/input /tmp/praktika/output + + - name: Run + id: run + run: | + . /tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run --job '''Docker Builds''' --workflow "PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee /tmp/praktika/praktika_run.log + else + python3 -m praktika run --job '''Docker Builds''' --workflow "PR" --ci |& tee /tmp/praktika/praktika_run.log + fi + + style_check: + runs-on: [ci_services] + needs: [config_workflow, docker_builds] + if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'U3R5bGUgQ2hlY2s=') }} + name: "Style Check" + outputs: + data: ${{ steps.run.outputs.DATA }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ github.head_ref }} + + - name: Prepare env script + run: | + cat > /tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:. + + cat > /tmp/praktika/workflow_config_pr.json << 'EOF' + ${{ needs.config_workflow.outputs.data }} + EOF + cat > /tmp/praktika/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + rm -rf /tmp/praktika/input /tmp/praktika/output /tmp/praktika + mkdir -p /tmp/praktika /tmp/praktika/input /tmp/praktika/output + + - name: Run + id: run + run: | + . /tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run --job '''Style Check''' --workflow "PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee /tmp/praktika/praktika_run.log + else + python3 -m praktika run --job '''Style Check''' --workflow "PR" --ci |& tee /tmp/praktika/praktika_run.log + fi + + fast_test: + runs-on: [builder] + needs: [config_workflow, docker_builds] + if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'RmFzdCB0ZXN0') }} + name: "Fast test" + outputs: + data: ${{ steps.run.outputs.DATA }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ github.head_ref }} + + - name: Prepare env script + run: | + cat > /tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:. + + cat > /tmp/praktika/workflow_config_pr.json << 'EOF' + ${{ needs.config_workflow.outputs.data }} + EOF + cat > /tmp/praktika/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + rm -rf /tmp/praktika/input /tmp/praktika/output /tmp/praktika + mkdir -p /tmp/praktika /tmp/praktika/input /tmp/praktika/output + + - name: Run + id: run + run: | + . /tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run --job '''Fast test''' --workflow "PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee /tmp/praktika/praktika_run.log + else + python3 -m praktika run --job '''Fast test''' --workflow "PR" --ci |& tee /tmp/praktika/praktika_run.log + fi + + build_amd_debug: + runs-on: [builder] + needs: [config_workflow, docker_builds] + if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'QnVpbGQgKGFtZF9kZWJ1Zyk=') }} + name: "Build (amd_debug)" + outputs: + data: ${{ steps.run.outputs.DATA }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ github.head_ref }} + + - name: Prepare env script + run: | + cat > /tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:. + + cat > /tmp/praktika/workflow_config_pr.json << 'EOF' + ${{ needs.config_workflow.outputs.data }} + EOF + cat > /tmp/praktika/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + rm -rf /tmp/praktika/input /tmp/praktika/output /tmp/praktika + mkdir -p /tmp/praktika /tmp/praktika/input /tmp/praktika/output + + - name: Run + id: run + run: | + . /tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run --job '''Build (amd_debug)''' --workflow "PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee /tmp/praktika/praktika_run.log + else + python3 -m praktika run --job '''Build (amd_debug)''' --workflow "PR" --ci |& tee /tmp/praktika/praktika_run.log + fi + + build_amd_release: + runs-on: [builder] + needs: [config_workflow, docker_builds] + if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'QnVpbGQgKGFtZF9yZWxlYXNlKQ==') }} + name: "Build (amd_release)" + outputs: + data: ${{ steps.run.outputs.DATA }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ github.head_ref }} + + - name: Prepare env script + run: | + cat > /tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:. + + cat > /tmp/praktika/workflow_config_pr.json << 'EOF' + ${{ needs.config_workflow.outputs.data }} + EOF + cat > /tmp/praktika/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + rm -rf /tmp/praktika/input /tmp/praktika/output /tmp/praktika + mkdir -p /tmp/praktika /tmp/praktika/input /tmp/praktika/output + + - name: Run + id: run + run: | + . /tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run --job '''Build (amd_release)''' --workflow "PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee /tmp/praktika/praktika_run.log + else + python3 -m praktika run --job '''Build (amd_release)''' --workflow "PR" --ci |& tee /tmp/praktika/praktika_run.log + fi + + build_arm_release: + runs-on: [builder-aarch64] + needs: [config_workflow, docker_builds] + if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'QnVpbGQgKGFybV9yZWxlYXNlKQ==') }} + name: "Build (arm_release)" + outputs: + data: ${{ steps.run.outputs.DATA }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ github.head_ref }} + + - name: Prepare env script + run: | + cat > /tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:. + + cat > /tmp/praktika/workflow_config_pr.json << 'EOF' + ${{ needs.config_workflow.outputs.data }} + EOF + cat > /tmp/praktika/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + rm -rf /tmp/praktika/input /tmp/praktika/output /tmp/praktika + mkdir -p /tmp/praktika /tmp/praktika/input /tmp/praktika/output + + - name: Run + id: run + run: | + . /tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run --job '''Build (arm_release)''' --workflow "PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee /tmp/praktika/praktika_run.log + else + python3 -m praktika run --job '''Build (arm_release)''' --workflow "PR" --ci |& tee /tmp/praktika/praktika_run.log + fi + + build_arm_asan: + runs-on: [builder-aarch64] + needs: [config_workflow, docker_builds] + if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'QnVpbGQgKGFybV9hc2FuKQ==') }} + name: "Build (arm_asan)" + outputs: + data: ${{ steps.run.outputs.DATA }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ github.head_ref }} + + - name: Prepare env script + run: | + cat > /tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:. + + cat > /tmp/praktika/workflow_config_pr.json << 'EOF' + ${{ needs.config_workflow.outputs.data }} + EOF + cat > /tmp/praktika/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + rm -rf /tmp/praktika/input /tmp/praktika/output /tmp/praktika + mkdir -p /tmp/praktika /tmp/praktika/input /tmp/praktika/output + + - name: Run + id: run + run: | + . /tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run --job '''Build (arm_asan)''' --workflow "PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee /tmp/praktika/praktika_run.log + else + python3 -m praktika run --job '''Build (arm_asan)''' --workflow "PR" --ci |& tee /tmp/praktika/praktika_run.log + fi + + stateless_tests_amd_debugparallel: + runs-on: [builder] + needs: [config_workflow, docker_builds, build_amd_debug] + if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhbWRfZGVidWcscGFyYWxsZWwp') }} + name: "Stateless tests (amd_debug,parallel)" + outputs: + data: ${{ steps.run.outputs.DATA }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ github.head_ref }} + + - name: Prepare env script + run: | + cat > /tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:. + + cat > /tmp/praktika/workflow_config_pr.json << 'EOF' + ${{ needs.config_workflow.outputs.data }} + EOF + cat > /tmp/praktika/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + rm -rf /tmp/praktika/input /tmp/praktika/output /tmp/praktika + mkdir -p /tmp/praktika /tmp/praktika/input /tmp/praktika/output + + - name: Run + id: run + run: | + . /tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run --job '''Stateless tests (amd_debug,parallel)''' --workflow "PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee /tmp/praktika/praktika_run.log + else + python3 -m praktika run --job '''Stateless tests (amd_debug,parallel)''' --workflow "PR" --ci |& tee /tmp/praktika/praktika_run.log + fi + + stateless_tests_amd_debugnon_parallel: + runs-on: [func-tester] + needs: [config_workflow, docker_builds, build_amd_debug] + if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhbWRfZGVidWcsbm9uLXBhcmFsbGVsKQ==') }} + name: "Stateless tests (amd_debug,non-parallel)" + outputs: + data: ${{ steps.run.outputs.DATA }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ github.head_ref }} + + - name: Prepare env script + run: | + cat > /tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:. + + cat > /tmp/praktika/workflow_config_pr.json << 'EOF' + ${{ needs.config_workflow.outputs.data }} + EOF + cat > /tmp/praktika/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + rm -rf /tmp/praktika/input /tmp/praktika/output /tmp/praktika + mkdir -p /tmp/praktika /tmp/praktika/input /tmp/praktika/output + + - name: Run + id: run + run: | + . /tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run --job '''Stateless tests (amd_debug,non-parallel)''' --workflow "PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee /tmp/praktika/praktika_run.log + else + python3 -m praktika run --job '''Stateless tests (amd_debug,non-parallel)''' --workflow "PR" --ci |& tee /tmp/praktika/praktika_run.log + fi + + stateless_tests_amd_releaseparallel: + runs-on: [builder] + needs: [config_workflow, docker_builds, build_amd_release] + if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhbWRfcmVsZWFzZSxwYXJhbGxlbCk=') }} + name: "Stateless tests (amd_release,parallel)" + outputs: + data: ${{ steps.run.outputs.DATA }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ github.head_ref }} + + - name: Prepare env script + run: | + cat > /tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:. + + cat > /tmp/praktika/workflow_config_pr.json << 'EOF' + ${{ needs.config_workflow.outputs.data }} + EOF + cat > /tmp/praktika/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + rm -rf /tmp/praktika/input /tmp/praktika/output /tmp/praktika + mkdir -p /tmp/praktika /tmp/praktika/input /tmp/praktika/output + + - name: Run + id: run + run: | + . /tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run --job '''Stateless tests (amd_release,parallel)''' --workflow "PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee /tmp/praktika/praktika_run.log + else + python3 -m praktika run --job '''Stateless tests (amd_release,parallel)''' --workflow "PR" --ci |& tee /tmp/praktika/praktika_run.log + fi + + stateless_tests_amd_releasenon_parallel: + runs-on: [func-tester] + needs: [config_workflow, docker_builds, build_amd_release] + if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhbWRfcmVsZWFzZSxub24tcGFyYWxsZWwp') }} + name: "Stateless tests (amd_release,non-parallel)" + outputs: + data: ${{ steps.run.outputs.DATA }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ github.head_ref }} + + - name: Prepare env script + run: | + cat > /tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:. + + cat > /tmp/praktika/workflow_config_pr.json << 'EOF' + ${{ needs.config_workflow.outputs.data }} + EOF + cat > /tmp/praktika/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + rm -rf /tmp/praktika/input /tmp/praktika/output /tmp/praktika + mkdir -p /tmp/praktika /tmp/praktika/input /tmp/praktika/output + + - name: Run + id: run + run: | + . /tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run --job '''Stateless tests (amd_release,non-parallel)''' --workflow "PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee /tmp/praktika/praktika_run.log + else + python3 -m praktika run --job '''Stateless tests (amd_release,non-parallel)''' --workflow "PR" --ci |& tee /tmp/praktika/praktika_run.log + fi + + stateless_tests_arm_asanparallel: + runs-on: [builder-aarch64] + needs: [config_workflow, docker_builds, build_arm_asan] + if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhcm1fYXNhbixwYXJhbGxlbCk=') }} + name: "Stateless tests (arm_asan,parallel)" + outputs: + data: ${{ steps.run.outputs.DATA }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ github.head_ref }} + + - name: Prepare env script + run: | + cat > /tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:. + + cat > /tmp/praktika/workflow_config_pr.json << 'EOF' + ${{ needs.config_workflow.outputs.data }} + EOF + cat > /tmp/praktika/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + rm -rf /tmp/praktika/input /tmp/praktika/output /tmp/praktika + mkdir -p /tmp/praktika /tmp/praktika/input /tmp/praktika/output + + - name: Run + id: run + run: | + . /tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run --job '''Stateless tests (arm_asan,parallel)''' --workflow "PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee /tmp/praktika/praktika_run.log + else + python3 -m praktika run --job '''Stateless tests (arm_asan,parallel)''' --workflow "PR" --ci |& tee /tmp/praktika/praktika_run.log + fi + + stateless_tests_arm_asannon_parallel: + runs-on: [func-tester-aarch64] + needs: [config_workflow, docker_builds, build_arm_asan] + if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhcm1fYXNhbixub24tcGFyYWxsZWwp') }} + name: "Stateless tests (arm_asan,non-parallel)" + outputs: + data: ${{ steps.run.outputs.DATA }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ github.head_ref }} + + - name: Prepare env script + run: | + cat > /tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:. + + cat > /tmp/praktika/workflow_config_pr.json << 'EOF' + ${{ needs.config_workflow.outputs.data }} + EOF + cat > /tmp/praktika/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + rm -rf /tmp/praktika/input /tmp/praktika/output /tmp/praktika + mkdir -p /tmp/praktika /tmp/praktika/input /tmp/praktika/output + + - name: Run + id: run + run: | + . /tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run --job '''Stateless tests (arm_asan,non-parallel)''' --workflow "PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee /tmp/praktika/praktika_run.log + else + python3 -m praktika run --job '''Stateless tests (arm_asan,non-parallel)''' --workflow "PR" --ci |& tee /tmp/praktika/praktika_run.log + fi + + stateful_tests_amd_debugparallel: + runs-on: [builder] + needs: [config_workflow, docker_builds, build_amd_debug] + if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'U3RhdGVmdWwgdGVzdHMgKGFtZF9kZWJ1ZyxwYXJhbGxlbCk=') }} + name: "Stateful tests (amd_debug,parallel)" + outputs: + data: ${{ steps.run.outputs.DATA }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ github.head_ref }} + + - name: Prepare env script + run: | + cat > /tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:. + + cat > /tmp/praktika/workflow_config_pr.json << 'EOF' + ${{ needs.config_workflow.outputs.data }} + EOF + cat > /tmp/praktika/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + rm -rf /tmp/praktika/input /tmp/praktika/output /tmp/praktika + mkdir -p /tmp/praktika /tmp/praktika/input /tmp/praktika/output + + - name: Run + id: run + run: | + . /tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run --job '''Stateful tests (amd_debug,parallel)''' --workflow "PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee /tmp/praktika/praktika_run.log + else + python3 -m praktika run --job '''Stateful tests (amd_debug,parallel)''' --workflow "PR" --ci |& tee /tmp/praktika/praktika_run.log + fi + + finish_workflow: + runs-on: [ci_services] + needs: [config_workflow, docker_builds, style_check, fast_test, build_amd_debug, build_amd_release, build_arm_release, build_arm_asan, stateless_tests_amd_debugparallel, stateless_tests_amd_debugnon_parallel, stateless_tests_amd_releaseparallel, stateless_tests_amd_releasenon_parallel, stateless_tests_arm_asanparallel, stateless_tests_arm_asannon_parallel, stateful_tests_amd_debugparallel] + if: ${{ !cancelled() }} + name: "Finish Workflow" + outputs: + data: ${{ steps.run.outputs.DATA }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ github.head_ref }} + + - name: Prepare env script + run: | + cat > /tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:. + + cat > /tmp/praktika/workflow_config_pr.json << 'EOF' + ${{ needs.config_workflow.outputs.data }} + EOF + cat > /tmp/praktika/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + rm -rf /tmp/praktika/input /tmp/praktika/output /tmp/praktika + mkdir -p /tmp/praktika /tmp/praktika/input /tmp/praktika/output + + - name: Run + id: run + run: | + . /tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run --job '''Finish Workflow''' --workflow "PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee /tmp/praktika/praktika_run.log + else + python3 -m praktika run --job '''Finish Workflow''' --workflow "PR" --ci |& tee /tmp/praktika/praktika_run.log + fi diff --git a/ci/__init__.py b/ci/__init__.py deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/ci/docker/fasttest/Dockerfile b/ci/docker/fasttest/Dockerfile index 66e48b163b8..b540bfc11f6 100644 --- a/ci/docker/fasttest/Dockerfile +++ b/ci/docker/fasttest/Dockerfile @@ -105,5 +105,14 @@ RUN groupadd --system --gid 1000 clickhouse \ && useradd --system --gid 1000 --uid 1000 -m clickhouse \ && mkdir -p /.cache/sccache && chmod 777 /.cache/sccache + +# TODO move nfpm to docker that will do packaging +ARG TARGETARCH +ARG NFPM_VERSION=2.20.0 +RUN arch=${TARGETARCH:-amd64} \ + && curl -Lo /tmp/nfpm.deb "https://github.com/goreleaser/nfpm/releases/download/v${NFPM_VERSION}/nfpm_${arch}.deb" \ + && dpkg -i /tmp/nfpm.deb \ + && rm /tmp/nfpm.deb + ENV PYTHONPATH="/wd" ENV PYTHONUNBUFFERED=1 diff --git a/ci/jobs/build_clickhouse.py b/ci/jobs/build_clickhouse.py index ed9fd491fcf..06d8bc1c179 100644 --- a/ci/jobs/build_clickhouse.py +++ b/ci/jobs/build_clickhouse.py @@ -4,11 +4,15 @@ from praktika.result import Result from praktika.settings import Settings from praktika.utils import MetaClasses, Shell, Utils +from ci.jobs.scripts.clickhouse_version import CHVersion + class JobStages(metaclass=MetaClasses.WithIter): CHECKOUT_SUBMODULES = "checkout" CMAKE = "cmake" + UNSHALLOW = "unshallow" BUILD = "build" + PACKAGE = "package" def parse_args(): @@ -92,6 +96,24 @@ def main(): res = True results = [] + if res and JobStages.UNSHALLOW in stages: + results.append( + Result.create_from_command_execution( + name="Repo Unshallow", + command="git rev-parse --is-shallow-repository | grep -q true && git fetch --filter=tree:0 --depth=5000 origin $(git rev-parse --abbrev-ref HEAD)", + with_log=True, + ) + ) + if results[-1].is_ok(): + try: + version = CHVersion.get_version() + print(f"Got version from repo [{version}]") + except Exception as e: + results[-1].set_failed().set_info( + f"Failed to retrieve version from repo: ex [{e}]" + ) + res = results[-1].is_ok() + if res and JobStages.CHECKOUT_SUBMODULES in stages: Shell.check(f"rm -rf {build_dir} && mkdir -p {build_dir}") results.append( @@ -127,6 +149,32 @@ def main(): Shell.check(f"ls -l {build_dir}/programs/") res = results[-1].is_ok() + if res and JobStages.PACKAGE in stages: + if "debug" in build_type: + package_type = "debug" + elif "release" in build_type: + package_type = "release" + elif "asan" in build_type: + package_type = "asan" + else: + assert False, "TODO" + + output_dir = "/tmp/praktika/output/" + assert Shell.check(f"rm -f {output_dir}/*.deb") + + results.append( + Result.create_from_command_execution( + name="Build Packages", + command=[ + f"DESTDIR={build_dir}/root ninja programs/install", + f"ln -sf {build_dir}/root {Utils.cwd()}/packages/root && cd {Utils.cwd()}/packages/ && OUTPUT_DIR={output_dir} BUILD_TYPE={package_type} VERSION_STRING={version} ./build --deb", + ], + workdir=build_dir, + with_log=True, + ) + ) + res = results[-1].is_ok() + Result.create_from(results=results, stopwatch=stop_watch).complete_job() diff --git a/ci/jobs/scripts/clickhouse_version.py b/ci/jobs/scripts/clickhouse_version.py new file mode 100644 index 00000000000..0f60a89e92f --- /dev/null +++ b/ci/jobs/scripts/clickhouse_version.py @@ -0,0 +1,36 @@ +from pathlib import Path + +from praktika.utils import Shell + + +class CHVersion: + FILE_WITH_VERSION_PATH = "./cmake/autogenerated_versions.txt" + + @classmethod + def _get_tweak(cls): + tag = Shell.get_output("git describe --tags --abbrev=0") + assert tag.startswith("v24") + num = Shell.get_output(f"git rev-list --count {tag}..HEAD") + return int(num) + + @classmethod + def get_version(cls): + versions = {} + for line in ( + Path(cls.FILE_WITH_VERSION_PATH).read_text(encoding="utf-8").splitlines() + ): + line = line.strip() + if not line.startswith("SET("): + continue + + name, value = line[4:-1].split(maxsplit=1) + name = name.removeprefix("VERSION_").lower() + try: + value = int(value) + except ValueError: + pass + versions[name] = value + + version_sha = versions["githash"] + tweak = int(Shell.get_output(f"git rev-list --count {version_sha}..HEAD", verbose=True)) + return f"{versions['major']}.{versions['minor']}.{versions['patch']}.{tweak}" diff --git a/ci/praktika/_environment.py b/ci/praktika/_environment.py index 734a4be3176..cef305d1b8b 100644 --- a/ci/praktika/_environment.py +++ b/ci/praktika/_environment.py @@ -179,7 +179,7 @@ class _Environment(MetaClasses.Serializable): if bucket in path: path = path.replace(bucket, endpoint) break - REPORT_URL = f"https://{path}/{Path(settings.HTML_PAGE_FILE).name}?PR={self.PR_NUMBER}&sha={'latest' if latest else self.SHA}&name_0={urllib.parse.quote(self.WORKFLOW_NAME, safe='')}&name_1={urllib.parse.quote(self.JOB_NAME, safe='')}" + REPORT_URL = f"https://{path}/{Path(settings.HTML_PAGE_FILE).name}?PR={self.PR_NUMBER}&sha={'latest' if latest else self.SHA}&name_0={urllib.parse.quote(self.WORKFLOW_NAME, safe='')}" return REPORT_URL def is_local_run(self): diff --git a/ci/praktika/hook_html.py b/ci/praktika/hook_html.py index e2faefb2fa9..ed2335a640a 100644 --- a/ci/praktika/hook_html.py +++ b/ci/praktika/hook_html.py @@ -137,14 +137,14 @@ class HtmlRunnerHooks: summary_result.start_time = Utils.timestamp() assert _ResultS3.copy_result_to_s3_with_version(summary_result, version=0) - page_url = env.get_report_url(settings=Settings) + page_url = env.get_report_url(settings=Settings, latest=True) print(f"CI Status page url [{page_url}]") res1 = GH.post_commit_status( name=_workflow.name, status=Result.Status.PENDING, description="", - url=env.get_report_url(settings=Settings, latest=True), + url=page_url, ) res2 = GH.post_pr_comment( comment_body=f"Workflow [[{_workflow.name}]({page_url})], commit [{_Environment.get().SHA[:8]}]", diff --git a/ci/praktika/result.py b/ci/praktika/result.py index 8164b1d1295..082807fc9f0 100644 --- a/ci/praktika/result.py +++ b/ci/praktika/result.py @@ -121,6 +121,9 @@ class Result(MetaClasses.Serializable): def set_success(self) -> "Result": return self.set_status(Result.Status.SUCCESS) + def set_failed(self) -> "Result": + return self.set_status(Result.Status.FAILED) + def set_results(self, results: List["Result"]) -> "Result": self.results = results self.dump() diff --git a/ci/praktika/runner.py b/ci/praktika/runner.py index 38112dd5684..6dc8debeed1 100644 --- a/ci/praktika/runner.py +++ b/ci/praktika/runner.py @@ -1,3 +1,5 @@ +import glob +import json import os import re import sys @@ -58,6 +60,9 @@ class Runner: workflow_config.digest_dockers[docker.name] = Digest().calc_docker_digest( docker, workflow.dockers ) + + # work around for old clickhouse jobs + os.environ["DOCKER_TAG"] = json.dumps(workflow_config.digest_dockers) workflow_config.dump() Result.generate_pending(job.name).dump() @@ -119,8 +124,21 @@ class Runner: else: prefixes = [env.get_s3_prefix()] * len(required_artifacts) for artifact, prefix in zip(required_artifacts, prefixes): - s3_path = f"{Settings.S3_ARTIFACT_PATH}/{prefix}/{Utils.normalize_string(artifact._provided_by)}/{Path(artifact.path).name}" - assert S3.copy_file_from_s3(s3_path=s3_path, local_path=Settings.INPUT_DIR) + recursive = False + include_pattern = "" + if "*" in artifact.path: + s3_path = f"{Settings.S3_ARTIFACT_PATH}/{prefix}/{Utils.normalize_string(artifact._provided_by)}/" + recursive = True + include_pattern = Path(artifact.path).name + assert "*" in include_pattern + else: + s3_path = f"{Settings.S3_ARTIFACT_PATH}/{prefix}/{Utils.normalize_string(artifact._provided_by)}/{Path(artifact.path).name}" + assert S3.copy_file_from_s3( + s3_path=s3_path, + local_path=Settings.INPUT_DIR, + recursive=recursive, + include_pattern=include_pattern, + ) return 0 @@ -262,10 +280,11 @@ class Runner: f"ls -l {artifact.path}", verbose=True ), f"Artifact {artifact.path} not found" s3_path = f"{Settings.S3_ARTIFACT_PATH}/{env.get_s3_prefix()}/{Utils.normalize_string(env.JOB_NAME)}" - link = S3.copy_file_to_s3( - s3_path=s3_path, local_path=artifact.path - ) - result.set_link(link) + for file_path in glob.glob(artifact.path): + link = S3.copy_file_to_s3( + s3_path=s3_path, local_path=file_path + ) + result.set_link(link) except Exception as e: error = ( f"ERROR: Failed to upload artifact [{artifact}], ex [{e}]" diff --git a/ci/praktika/s3.py b/ci/praktika/s3.py index 82034b57b80..6e8a0a6e8fe 100644 --- a/ci/praktika/s3.py +++ b/ci/praktika/s3.py @@ -117,15 +117,21 @@ class S3: return res @classmethod - def copy_file_from_s3(cls, s3_path, local_path): + def copy_file_from_s3( + cls, s3_path, local_path, recursive=False, include_pattern="" + ): assert Path(s3_path), f"Invalid S3 Path [{s3_path}]" if Path(local_path).is_dir(): - local_path = Path(local_path) / Path(s3_path).name + pass else: assert Path( local_path ).parent.is_dir(), f"Parent path for [{local_path}] does not exist" cmd = f"aws s3 cp s3://{s3_path} {local_path}" + if recursive: + cmd += " --recursive" + if include_pattern: + cmd += f" --include {include_pattern}" res = cls.run_command_with_retries(cmd) return res diff --git a/ci/workflows/pull_request.py b/ci/workflows/pull_request.py index 761ab8a6ebc..63b16ead507 100644 --- a/ci/workflows/pull_request.py +++ b/ci/workflows/pull_request.py @@ -16,6 +16,11 @@ class ArtifactNames: CH_ARM_RELEASE = "CH_ARM_RELEASE" CH_ARM_ASAN = "CH_ARM_ASAN" + DEB_AMD_DEBUG = "DEB_AMD_DEBUG" + DEB_AMD_RELEASE = "DEB_AMD_RELEASE" + DEB_ARM_RELEASE = "DEB_ARM_RELEASE" + DEB_ARM_ASAN = "DEB_ARM_ASAN" + style_check_job = Job.Config( name=JobNames.STYLE_CHECK, @@ -41,7 +46,7 @@ fast_test_job = Job.Config( build_jobs = Job.Config( name=JobNames.BUILD, runs_on=["...from params..."], - requires=[JobNames.FAST_TEST], + requires=[], command="python3 ./ci/jobs/build_clickhouse.py --build-type {PARAMETER}", run_in_docker="clickhouse/fasttest", timeout=3600 * 2, @@ -63,10 +68,10 @@ build_jobs = Job.Config( ).parametrize( parameter=["amd_debug", "amd_release", "arm_release", "arm_asan"], provides=[ - [ArtifactNames.CH_AMD_DEBUG], - [ArtifactNames.CH_AMD_RELEASE], - [ArtifactNames.CH_ARM_RELEASE], - [ArtifactNames.CH_ARM_ASAN], + [ArtifactNames.CH_AMD_DEBUG, ArtifactNames.DEB_AMD_DEBUG], + [ArtifactNames.CH_AMD_RELEASE, ArtifactNames.DEB_AMD_RELEASE], + [ArtifactNames.CH_ARM_RELEASE, ArtifactNames.DEB_ARM_RELEASE], + [ArtifactNames.CH_ARM_ASAN, ArtifactNames.DEB_ARM_ASAN], ], runs_on=[ [RunnerLabels.BUILDER_AMD], @@ -170,6 +175,26 @@ workflow = Workflow.Config( type=Artifact.Type.S3, path=f"{Settings.TEMP_DIR}/build/programs/clickhouse", ), + Artifact.Config( + name=ArtifactNames.DEB_AMD_DEBUG, + type=Artifact.Type.S3, + path=f"{Settings.TEMP_DIR}/output/*.deb", + ), + Artifact.Config( + name=ArtifactNames.DEB_AMD_RELEASE, + type=Artifact.Type.S3, + path=f"{Settings.TEMP_DIR}/output/*.deb", + ), + Artifact.Config( + name=ArtifactNames.DEB_ARM_RELEASE, + type=Artifact.Type.S3, + path=f"{Settings.TEMP_DIR}/output/*.deb", + ), + Artifact.Config( + name=ArtifactNames.DEB_ARM_ASAN, + type=Artifact.Type.S3, + path=f"{Settings.TEMP_DIR}/output/*.deb", + ), ], dockers=DOCKERS, secrets=SECRETS, diff --git a/packages/build b/packages/build index b2dd085d9dd..17ea979622c 100755 --- a/packages/build +++ b/packages/build @@ -5,24 +5,14 @@ set -e # Avoid dependency on locale LC_ALL=C -# Normalize output directory -if [ -n "$OUTPUT_DIR" ]; then - OUTPUT_DIR=$(realpath -m "$OUTPUT_DIR") -fi - -CUR_DIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd) -cd "$CUR_DIR" - -ROOT_DIR=$(readlink -f "$(git rev-parse --show-cdup)") - PKG_ROOT='root' DEB_ARCH=${DEB_ARCH:-amd64} -OUTPUT_DIR=${OUTPUT_DIR:-$ROOT_DIR} -[ -d "${OUTPUT_DIR}" ] || mkdir -p "${OUTPUT_DIR}" SANITIZER=${SANITIZER:-""} SOURCE=${SOURCE:-$PKG_ROOT} +cd "$(dirname "${BASH_SOURCE[0]}")" + HELP="${0} [--test] [--rpm] [-h|--help] --test - adds '+test' prefix to version --apk - build APK packages @@ -40,12 +30,7 @@ Used envs: VERSION_STRING='${VERSION_STRING}' - the package version to overwrite " -if [ -z "${VERSION_STRING}" ]; then - # Get CLICKHOUSE_VERSION_STRING from the current git repo - eval "$("$ROOT_DIR/tests/ci/version_helper.py" -e)" -else - CLICKHOUSE_VERSION_STRING=${VERSION_STRING} -fi +CLICKHOUSE_VERSION_STRING=${VERSION_STRING} export CLICKHOUSE_VERSION_STRING @@ -144,31 +129,32 @@ CLICKHOUSE_VERSION_STRING+=$VERSION_POSTFIX echo -e "\nCurrent version is $CLICKHOUSE_VERSION_STRING" for config in clickhouse*.yaml; do + if [[ $BUILD_TYPE != 'release' ]] && [[ "$config" == "clickhouse-keeper-dbg.yaml" ]]; then + continue + fi if [ -n "$MAKE_DEB" ] || [ -n "$MAKE_TGZ" ]; then echo "Building deb package for $config" - - # Preserve package path - exec 9>&1 - PKG_PATH=$(nfpm package --target "$OUTPUT_DIR" --config "$config" --packager deb | tee /dev/fd/9) - PKG_PATH=${PKG_PATH##*created package: } - exec 9>&- + nfpm package --target "$OUTPUT_DIR" --config "$config" --packager deb fi if [ -n "$MAKE_APK" ]; then - echo "Building apk package for $config" - nfpm package --target "$OUTPUT_DIR" --config "$config" --packager apk + echo "Building apk package for $config" + nfpm package --target "$OUTPUT_DIR" --config "$config" --packager apk fi + if [ -n "$MAKE_ARCHLINUX" ]; then - echo "Building archlinux package for $config" - nfpm package --target "$OUTPUT_DIR" --config "$config" --packager archlinux + echo "Building archlinux package for $config" + nfpm package --target "$OUTPUT_DIR" --config "$config" --packager archlinux fi + if [ -n "$MAKE_RPM" ]; then - echo "Building rpm package for $config" - nfpm package --target "$OUTPUT_DIR" --config "$config" --packager rpm + echo "Building rpm package for $config" + nfpm package --target "$OUTPUT_DIR" --config "$config" --packager rpm fi + if [ -n "$MAKE_TGZ" ]; then - echo "Building tarball for $config" - deb2tgz "$PKG_PATH" + echo "Building tarball for $config" + deb2tgz "$PKG_PATH" fi done From 52ae6b48c26e877335a9b65c9d467a7d5013fd95 Mon Sep 17 00:00:00 2001 From: Max Kainov Date: Thu, 14 Nov 2024 23:05:42 +0000 Subject: [PATCH 02/31] CI: packaging --- ci/praktika/runner.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/ci/praktika/runner.py b/ci/praktika/runner.py index 6dc8debeed1..e88e00d7428 100644 --- a/ci/praktika/runner.py +++ b/ci/praktika/runner.py @@ -1,5 +1,3 @@ -import glob -import json import os import re import sys From e0c023b8bdd552f05c2e2f4054e624d3c867b0ba Mon Sep 17 00:00:00 2001 From: Max Kainov Date: Thu, 14 Nov 2024 23:05:42 +0000 Subject: [PATCH 03/31] CI: packaging --- ci/jobs/build_clickhouse.py | 18 ++++++++++++++++++ tests/config/install.sh | 2 +- 2 files changed, 19 insertions(+), 1 deletion(-) diff --git a/ci/jobs/build_clickhouse.py b/ci/jobs/build_clickhouse.py index 06d8bc1c179..42cecf2e288 100644 --- a/ci/jobs/build_clickhouse.py +++ b/ci/jobs/build_clickhouse.py @@ -135,6 +135,24 @@ def main(): ) res = results[-1].is_ok() + if res and JobStages.UNSHALLOW in stages: + results.append( + Result.create_from_command_execution( + name="Repo Unshallow", + command="git fetch --depth 10000 --filter=tree:0", + with_log=True, + ) + ) + if results[-1].is_ok(): + try: + version = CHVersion.get_version() + print(f"Got version from repo [{version}]") + except Exception as e: + results[-1].set_failed().set_info( + "Failed to retrieve version from repo: ex [{e}]" + ) + res = results[-1].is_ok() + if res and JobStages.BUILD in stages: Shell.check("sccache --show-stats") results.append( diff --git a/tests/config/install.sh b/tests/config/install.sh index ba25f8bc425..54004ef3071 100755 --- a/tests/config/install.sh +++ b/tests/config/install.sh @@ -21,7 +21,7 @@ while [[ "$#" -gt 0 ]]; do --fast-test) FAST_TEST=1 ;; --s3-storage) EXPORT_S3_STORAGE_POLICIES=1 ;; --no-azure) NO_AZURE=1 ;; - *) echo "Unknown option: $1" ; exit 1 ;; + *) echo "Unknown option: $1" ;; esac shift done From d80936977f9a77ab08ec26bf78ec67e105a785a8 Mon Sep 17 00:00:00 2001 From: Max Kainov Date: Fri, 15 Nov 2024 12:32:48 +0100 Subject: [PATCH 04/31] CI: Enable Stress Tests with praktika --- .github/workflows/pr.yaml | 52 +++- ci/docker/stateless-test/Dockerfile | 2 + ci/docker/stateless-test/requirements.txt | 1 + ci/jobs/build_clickhouse.py | 40 +-- ci/jobs/functional_stateful_tests.py | 3 +- ci/jobs/functional_stateless_tests.py | 4 + ci/jobs/scripts/clickhouse_proc.py | 18 ++ ci/jobs/scripts/clickhouse_version.py | 4 +- .../functional_tests/setup_ch_cluster.sh | 118 ++++++++ .../functional_tests/setup_log_cluster.sh | 261 ++++++++++++++++++ ci/praktika/artifact.py | 9 + ci/praktika/hook_html.py | 3 + ci/praktika/native_jobs.py | 2 +- ci/praktika/runner.py | 8 +- ci/praktika/s3.py | 3 +- ci/{jobs => settings}/__init__.py | 0 ci/settings/definitions.py | 1 + ci/workflows/pull_request.py | 87 ++++-- tests/ci/report.py | 38 +++ tests/ci/stress_check.py | 15 +- 20 files changed, 601 insertions(+), 68 deletions(-) create mode 100755 ci/jobs/scripts/functional_tests/setup_ch_cluster.sh create mode 100755 ci/jobs/scripts/functional_tests/setup_log_cluster.sh rename ci/{jobs => settings}/__init__.py (100%) diff --git a/.github/workflows/pr.yaml b/.github/workflows/pr.yaml index 0e24cc379be..925745b9dc0 100644 --- a/.github/workflows/pr.yaml +++ b/.github/workflows/pr.yaml @@ -580,11 +580,11 @@ jobs: python3 -m praktika run --job '''Stateless tests (arm_asan,non-parallel)''' --workflow "PR" --ci |& tee /tmp/praktika/praktika_run.log fi - stateful_tests_amd_debugparallel: + stateful_tests_amd_releaseparallel: runs-on: [builder] needs: [config_workflow, docker_builds, build_amd_debug] - if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'U3RhdGVmdWwgdGVzdHMgKGFtZF9kZWJ1ZyxwYXJhbGxlbCk=') }} - name: "Stateful tests (amd_debug,parallel)" + if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'U3RhdGVmdWwgdGVzdHMgKGFtZF9yZWxlYXNlLHBhcmFsbGVsKQ==') }} + name: "Stateful tests (amd_release,parallel)" outputs: data: ${{ steps.run.outputs.DATA }} steps: @@ -615,14 +615,54 @@ jobs: . /tmp/praktika_setup_env.sh set -o pipefail if command -v ts &> /dev/null; then - python3 -m praktika run --job '''Stateful tests (amd_debug,parallel)''' --workflow "PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee /tmp/praktika/praktika_run.log + python3 -m praktika run --job '''Stateful tests (amd_release,parallel)''' --workflow "PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee /tmp/praktika/praktika_run.log else - python3 -m praktika run --job '''Stateful tests (amd_debug,parallel)''' --workflow "PR" --ci |& tee /tmp/praktika/praktika_run.log + python3 -m praktika run --job '''Stateful tests (amd_release,parallel)''' --workflow "PR" --ci |& tee /tmp/praktika/praktika_run.log + fi + + stress_tests_arm_release: + runs-on: [func-tester-aarch64] + needs: [config_workflow, docker_builds, build_arm_release] + if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'U3RyZXNzIHRlc3RzIChhcm1fcmVsZWFzZSk=') }} + name: "Stress tests (arm_release)" + outputs: + data: ${{ steps.run.outputs.DATA }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ github.head_ref }} + + - name: Prepare env script + run: | + cat > /tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:. + + cat > /tmp/praktika/workflow_config_pr.json << 'EOF' + ${{ needs.config_workflow.outputs.data }} + EOF + cat > /tmp/praktika/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + rm -rf /tmp/praktika/input /tmp/praktika/output /tmp/praktika + mkdir -p /tmp/praktika /tmp/praktika/input /tmp/praktika/output + + - name: Run + id: run + run: | + . /tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run --job '''Stress tests (arm_release)''' --workflow "PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee /tmp/praktika/praktika_run.log + else + python3 -m praktika run --job '''Stress tests (arm_release)''' --workflow "PR" --ci |& tee /tmp/praktika/praktika_run.log fi finish_workflow: runs-on: [ci_services] - needs: [config_workflow, docker_builds, style_check, fast_test, build_amd_debug, build_amd_release, build_arm_release, build_arm_asan, stateless_tests_amd_debugparallel, stateless_tests_amd_debugnon_parallel, stateless_tests_amd_releaseparallel, stateless_tests_amd_releasenon_parallel, stateless_tests_arm_asanparallel, stateless_tests_arm_asannon_parallel, stateful_tests_amd_debugparallel] + needs: [config_workflow, docker_builds, style_check, fast_test, build_amd_debug, build_amd_release, build_arm_release, build_arm_asan, stateless_tests_amd_debugparallel, stateless_tests_amd_debugnon_parallel, stateless_tests_amd_releaseparallel, stateless_tests_amd_releasenon_parallel, stateless_tests_arm_asanparallel, stateless_tests_arm_asannon_parallel, stateful_tests_amd_releaseparallel, stress_tests_arm_release] if: ${{ !cancelled() }} name: "Finish Workflow" outputs: diff --git a/ci/docker/stateless-test/Dockerfile b/ci/docker/stateless-test/Dockerfile index dcfaa5f6267..4bf5d2788cc 100644 --- a/ci/docker/stateless-test/Dockerfile +++ b/ci/docker/stateless-test/Dockerfile @@ -58,6 +58,7 @@ RUN apt-get update -y \ curl \ wget \ xz-utils \ + ripgrep \ && apt-get clean \ && rm -rf /var/lib/apt/lists/* /var/cache/debconf /tmp/* @@ -114,4 +115,5 @@ RUN curl -L --no-verbose -O 'https://archive.apache.org/dist/hadoop/common/hadoo RUN npm install -g azurite@3.30.0 \ && npm install -g tslib && npm install -g node +ENV PYTHONPATH=".:./ci" USER clickhouse diff --git a/ci/docker/stateless-test/requirements.txt b/ci/docker/stateless-test/requirements.txt index 6f64cc08951..64b06640c31 100644 --- a/ci/docker/stateless-test/requirements.txt +++ b/ci/docker/stateless-test/requirements.txt @@ -4,3 +4,4 @@ requests==2.32.3 pandas==1.5.3 scipy==1.12.0 pyarrow==18.0.0 +grpcio==1.47.0 diff --git a/ci/jobs/build_clickhouse.py b/ci/jobs/build_clickhouse.py index 42cecf2e288..5ab721fb81e 100644 --- a/ci/jobs/build_clickhouse.py +++ b/ci/jobs/build_clickhouse.py @@ -37,8 +37,7 @@ CMAKE_CMD = """cmake --debug-trycompile -DCMAKE_VERBOSE_MAKEFILE=1 -LA \ -DCMAKE_INSTALL_SYSCONFDIR=/etc -DCMAKE_INSTALL_LOCALSTATEDIR=/var -DCMAKE_SKIP_INSTALL_ALL_DEPENDENCY=ON \ {AUX_DEFS} \ -DCMAKE_C_COMPILER=clang-18 -DCMAKE_CXX_COMPILER=clang++-18 \ --DCOMPILER_CACHE={CACHE_TYPE} \ --DENABLE_BUILD_PROFILING=1 {DIR}""" +-DCOMPILER_CACHE={CACHE_TYPE} -DENABLE_BUILD_PROFILING=1 {DIR}""" def main(): @@ -95,24 +94,27 @@ def main(): res = True results = [] + version = "" if res and JobStages.UNSHALLOW in stages: results.append( Result.create_from_command_execution( name="Repo Unshallow", - command="git rev-parse --is-shallow-repository | grep -q true && git fetch --filter=tree:0 --depth=5000 origin $(git rev-parse --abbrev-ref HEAD)", + command="git rev-parse --is-shallow-repository | grep -q true && git fetch --depth 10000 --no-tags --filter=tree:0 origin $(git rev-parse --abbrev-ref HEAD)", with_log=True, ) ) - if results[-1].is_ok(): + res = results[-1].is_ok() + if res: try: version = CHVersion.get_version() + assert version print(f"Got version from repo [{version}]") except Exception as e: results[-1].set_failed().set_info( - f"Failed to retrieve version from repo: ex [{e}]" + f"Failed to get version from repo, ex [{e}]" ) - res = results[-1].is_ok() + res = False if res and JobStages.CHECKOUT_SUBMODULES in stages: Shell.check(f"rm -rf {build_dir} && mkdir -p {build_dir}") @@ -135,24 +137,6 @@ def main(): ) res = results[-1].is_ok() - if res and JobStages.UNSHALLOW in stages: - results.append( - Result.create_from_command_execution( - name="Repo Unshallow", - command="git fetch --depth 10000 --filter=tree:0", - with_log=True, - ) - ) - if results[-1].is_ok(): - try: - version = CHVersion.get_version() - print(f"Got version from repo [{version}]") - except Exception as e: - results[-1].set_failed().set_info( - "Failed to retrieve version from repo: ex [{e}]" - ) - res = results[-1].is_ok() - if res and JobStages.BUILD in stages: Shell.check("sccache --show-stats") results.append( @@ -177,6 +161,11 @@ def main(): else: assert False, "TODO" + if "amd" in build_type: + deb_arch = "amd64" + else: + deb_arch = "arm64" + output_dir = "/tmp/praktika/output/" assert Shell.check(f"rm -f {output_dir}/*.deb") @@ -185,7 +174,8 @@ def main(): name="Build Packages", command=[ f"DESTDIR={build_dir}/root ninja programs/install", - f"ln -sf {build_dir}/root {Utils.cwd()}/packages/root && cd {Utils.cwd()}/packages/ && OUTPUT_DIR={output_dir} BUILD_TYPE={package_type} VERSION_STRING={version} ./build --deb", + f"ln -sf {build_dir}/root {Utils.cwd()}/packages/root", + f"cd {Utils.cwd()}/packages/ && OUTPUT_DIR={output_dir} BUILD_TYPE={package_type} VERSION_STRING={version} DEB_ARCH={deb_arch} ./build --deb", ], workdir=build_dir, with_log=True, diff --git a/ci/jobs/functional_stateful_tests.py b/ci/jobs/functional_stateful_tests.py index b5840fcd45d..f78e158037f 100644 --- a/ci/jobs/functional_stateful_tests.py +++ b/ci/jobs/functional_stateful_tests.py @@ -1,5 +1,4 @@ import argparse -import os import time from pathlib import Path @@ -131,6 +130,8 @@ def main(): ) res = res and CH.start() res = res and CH.wait_ready() + # TODO: Use --database-replicated optionally + res = res and Shell.check(f"./ci/jobs/scripts/functional_tests/setup_ch_cluster.sh") if res: print("ch started") logs_to_attach += [ diff --git a/ci/jobs/functional_stateless_tests.py b/ci/jobs/functional_stateless_tests.py index 0d73312bd9e..676a05fbac1 100644 --- a/ci/jobs/functional_stateless_tests.py +++ b/ci/jobs/functional_stateless_tests.py @@ -101,6 +101,7 @@ def main(): f"ln -sf {ch_path}/clickhouse {ch_path}/clickhouse-client", f"ln -sf {ch_path}/clickhouse {ch_path}/clickhouse-compressor", f"ln -sf {ch_path}/clickhouse {ch_path}/clickhouse-local", + f"ln -sf {ch_path}/clickhouse {ch_path}/clickhouse-disks", f"rm -rf {Settings.TEMP_DIR}/etc/ && mkdir -p {Settings.TEMP_DIR}/etc/clickhouse-client {Settings.TEMP_DIR}/etc/clickhouse-server", f"cp programs/server/config.xml programs/server/users.xml {Settings.TEMP_DIR}/etc/clickhouse-server/", # TODO: find a way to work with Azure secret so it's ok for local tests as well, for now keep azure disabled @@ -114,6 +115,7 @@ def main(): f"for file in /tmp/praktika/etc/clickhouse-server/*.xml; do [ -f $file ] && echo Change config $file && sed -i 's|>/var/log|>{Settings.TEMP_DIR}/var/log|g; s|>/etc/|>{Settings.TEMP_DIR}/etc/|g' $(readlink -f $file); done", f"for file in /tmp/praktika/etc/clickhouse-server/config.d/*.xml; do [ -f $file ] && echo Change config $file && sed -i 's|local_disk|{Settings.TEMP_DIR}/local_disk|g' $(readlink -f $file); done", f"clickhouse-server --version", + f"chmod +x /tmp/praktika/input/clickhouse-odbc-bridge", ] results.append( Result.create_from_command_execution( @@ -138,6 +140,7 @@ def main(): res = res and Shell.check( "aws s3 ls s3://test --endpoint-url http://localhost:11111/", verbose=True ) + res = res and CH.log_cluster_config() res = res and CH.start() res = res and CH.wait_ready() if res: @@ -170,6 +173,7 @@ def main(): batch_total=total_batches, test=args.test, ) + CH.log_cluster_stop_replication() results.append(FTResultsProcessor(wd=Settings.OUTPUT_DIR).run()) results[-1].set_timing(stopwatch=stop_watch_) res = results[-1].is_ok() diff --git a/ci/jobs/scripts/clickhouse_proc.py b/ci/jobs/scripts/clickhouse_proc.py index 6108563605f..46efc41ee27 100644 --- a/ci/jobs/scripts/clickhouse_proc.py +++ b/ci/jobs/scripts/clickhouse_proc.py @@ -66,6 +66,24 @@ class ClickHouseProc: print(f"Started setup_minio.sh asynchronously with PID {process.pid}") return True + def log_cluster_config(self): + return Shell.check( + f"./ci/jobs/scripts/functional_tests/setup_log_cluster.sh --config-logs-export-cluster /tmp/praktika/etc/clickhouse-server/config.d/system_logs_export.yaml", + verbose=True, + ) + + def log_cluster_setup_replication(self): + return Shell.check( + f"./ci/jobs/scripts/functional_tests/setup_log_cluster.sh --setup-logs-replication", + verbose=True, + ) + + def log_cluster_stop_replication(self): + return Shell.check( + f"./ci/jobs/scripts/functional_tests/setup_log_cluster.sh --stop-log-replication", + verbose=True, + ) + def start(self): print("Starting ClickHouse server") Shell.check(f"rm {self.pid_file}") diff --git a/ci/jobs/scripts/clickhouse_version.py b/ci/jobs/scripts/clickhouse_version.py index 0f60a89e92f..44c2753fb11 100644 --- a/ci/jobs/scripts/clickhouse_version.py +++ b/ci/jobs/scripts/clickhouse_version.py @@ -32,5 +32,7 @@ class CHVersion: versions[name] = value version_sha = versions["githash"] - tweak = int(Shell.get_output(f"git rev-list --count {version_sha}..HEAD", verbose=True)) + tweak = int( + Shell.get_output(f"git rev-list --count {version_sha}..HEAD", verbose=True) + ) return f"{versions['major']}.{versions['minor']}.{versions['patch']}.{tweak}" diff --git a/ci/jobs/scripts/functional_tests/setup_ch_cluster.sh b/ci/jobs/scripts/functional_tests/setup_ch_cluster.sh new file mode 100755 index 00000000000..0c2d6b11b05 --- /dev/null +++ b/ci/jobs/scripts/functional_tests/setup_ch_cluster.sh @@ -0,0 +1,118 @@ +#!/bin/bash + +set -e -x + +clickhouse-client --query "SHOW DATABASES" +clickhouse-client --query "CREATE DATABASE datasets" +clickhouse-client < ./tests/docker_scripts/create.sql +clickhouse-client --query "SHOW TABLES FROM datasets" + +USE_DATABASE_REPLICATED=0 + +while [[ "$#" -gt 0 ]]; do + case $1 in + --database-replicated) + echo "Setup cluster for testing with Database Replicated" + USE_DATABASE_REPLICATED=1 + ;; + *) + echo "Unknown option: $1" + exit 1 + ;; + esac + shift +done + +if [[ "$USE_DATABASE_REPLICATED" -eq 1 ]]; then + clickhouse-client --query "CREATE DATABASE test ON CLUSTER 'test_cluster_database_replicated' + ENGINE=Replicated('/test/clickhouse/db/test', '{shard}', '{replica}')" + + clickhouse-client --query "CREATE TABLE test.hits AS datasets.hits_v1" + clickhouse-client --query "CREATE TABLE test.visits AS datasets.visits_v1" + + clickhouse-client --max_memory_usage 10G --query "INSERT INTO test.hits SELECT * FROM datasets.hits_v1" + clickhouse-client --max_memory_usage 10G --query "INSERT INTO test.visits SELECT * FROM datasets.visits_v1" + + clickhouse-client --query "DROP TABLE datasets.hits_v1" + clickhouse-client --query "DROP TABLE datasets.visits_v1" +else + clickhouse-client --query "CREATE DATABASE test" + clickhouse-client --query "SHOW TABLES FROM test" + if [[ -n "$USE_S3_STORAGE_FOR_MERGE_TREE" ]] && [[ "$USE_S3_STORAGE_FOR_MERGE_TREE" -eq 1 ]]; then + clickhouse-client --query "CREATE TABLE test.hits (WatchID UInt64, JavaEnable UInt8, Title String, GoodEvent Int16, + EventTime DateTime, EventDate Date, CounterID UInt32, ClientIP UInt32, ClientIP6 FixedString(16), RegionID UInt32, + UserID UInt64, CounterClass Int8, OS UInt8, UserAgent UInt8, URL String, Referer String, URLDomain String, + RefererDomain String, Refresh UInt8, IsRobot UInt8, RefererCategories Array(UInt16), URLCategories Array(UInt16), + URLRegions Array(UInt32), RefererRegions Array(UInt32), ResolutionWidth UInt16, ResolutionHeight UInt16, ResolutionDepth UInt8, + FlashMajor UInt8, FlashMinor UInt8, FlashMinor2 String, NetMajor UInt8, NetMinor UInt8, UserAgentMajor UInt16, + UserAgentMinor FixedString(2), CookieEnable UInt8, JavascriptEnable UInt8, IsMobile UInt8, MobilePhone UInt8, + MobilePhoneModel String, Params String, IPNetworkID UInt32, TraficSourceID Int8, SearchEngineID UInt16, + SearchPhrase String, AdvEngineID UInt8, IsArtifical UInt8, WindowClientWidth UInt16, WindowClientHeight UInt16, + ClientTimeZone Int16, ClientEventTime DateTime, SilverlightVersion1 UInt8, SilverlightVersion2 UInt8, SilverlightVersion3 UInt32, + SilverlightVersion4 UInt16, PageCharset String, CodeVersion UInt32, IsLink UInt8, IsDownload UInt8, IsNotBounce UInt8, + FUniqID UInt64, HID UInt32, IsOldCounter UInt8, IsEvent UInt8, IsParameter UInt8, DontCountHits UInt8, WithHash UInt8, + HitColor FixedString(1), UTCEventTime DateTime, Age UInt8, Sex UInt8, Income UInt8, Interests UInt16, Robotness UInt8, + GeneralInterests Array(UInt16), RemoteIP UInt32, RemoteIP6 FixedString(16), WindowName Int32, OpenerName Int32, + HistoryLength Int16, BrowserLanguage FixedString(2), BrowserCountry FixedString(2), SocialNetwork String, SocialAction String, + HTTPError UInt16, SendTiming Int32, DNSTiming Int32, ConnectTiming Int32, ResponseStartTiming Int32, ResponseEndTiming Int32, + FetchTiming Int32, RedirectTiming Int32, DOMInteractiveTiming Int32, DOMContentLoadedTiming Int32, DOMCompleteTiming Int32, + LoadEventStartTiming Int32, LoadEventEndTiming Int32, NSToDOMContentLoadedTiming Int32, FirstPaintTiming Int32, + RedirectCount Int8, SocialSourceNetworkID UInt8, SocialSourcePage String, ParamPrice Int64, ParamOrderID String, + ParamCurrency FixedString(3), ParamCurrencyID UInt16, GoalsReached Array(UInt32), OpenstatServiceName String, + OpenstatCampaignID String, OpenstatAdID String, OpenstatSourceID String, UTMSource String, UTMMedium String, + UTMCampaign String, UTMContent String, UTMTerm String, FromTag String, HasGCLID UInt8, RefererHash UInt64, + URLHash UInt64, CLID UInt32, YCLID UInt64, ShareService String, ShareURL String, ShareTitle String, + ParsedParams Nested(Key1 String, Key2 String, Key3 String, Key4 String, Key5 String, ValueDouble Float64), + IslandID FixedString(16), RequestNum UInt32, RequestTry UInt8) ENGINE = MergeTree() PARTITION BY toYYYYMM(EventDate) + ORDER BY (CounterID, EventDate, intHash32(UserID)) SAMPLE BY intHash32(UserID) SETTINGS index_granularity = 8192, storage_policy='s3_cache'" + clickhouse-client --query "CREATE TABLE test.visits (CounterID UInt32, StartDate Date, Sign Int8, IsNew UInt8, + VisitID UInt64, UserID UInt64, StartTime DateTime, Duration UInt32, UTCStartTime DateTime, PageViews Int32, + Hits Int32, IsBounce UInt8, Referer String, StartURL String, RefererDomain String, StartURLDomain String, + EndURL String, LinkURL String, IsDownload UInt8, TraficSourceID Int8, SearchEngineID UInt16, SearchPhrase String, + AdvEngineID UInt8, PlaceID Int32, RefererCategories Array(UInt16), URLCategories Array(UInt16), URLRegions Array(UInt32), + RefererRegions Array(UInt32), IsYandex UInt8, GoalReachesDepth Int32, GoalReachesURL Int32, GoalReachesAny Int32, + SocialSourceNetworkID UInt8, SocialSourcePage String, MobilePhoneModel String, ClientEventTime DateTime, RegionID UInt32, + ClientIP UInt32, ClientIP6 FixedString(16), RemoteIP UInt32, RemoteIP6 FixedString(16), IPNetworkID UInt32, + SilverlightVersion3 UInt32, CodeVersion UInt32, ResolutionWidth UInt16, ResolutionHeight UInt16, UserAgentMajor UInt16, + UserAgentMinor UInt16, WindowClientWidth UInt16, WindowClientHeight UInt16, SilverlightVersion2 UInt8, SilverlightVersion4 UInt16, + FlashVersion3 UInt16, FlashVersion4 UInt16, ClientTimeZone Int16, OS UInt8, UserAgent UInt8, ResolutionDepth UInt8, + FlashMajor UInt8, FlashMinor UInt8, NetMajor UInt8, NetMinor UInt8, MobilePhone UInt8, SilverlightVersion1 UInt8, + Age UInt8, Sex UInt8, Income UInt8, JavaEnable UInt8, CookieEnable UInt8, JavascriptEnable UInt8, IsMobile UInt8, + BrowserLanguage UInt16, BrowserCountry UInt16, Interests UInt16, Robotness UInt8, GeneralInterests Array(UInt16), + Params Array(String), Goals Nested(ID UInt32, Serial UInt32, EventTime DateTime, Price Int64, OrderID String, CurrencyID UInt32), + WatchIDs Array(UInt64), ParamSumPrice Int64, ParamCurrency FixedString(3), ParamCurrencyID UInt16, ClickLogID UInt64, + ClickEventID Int32, ClickGoodEvent Int32, ClickEventTime DateTime, ClickPriorityID Int32, ClickPhraseID Int32, ClickPageID Int32, + ClickPlaceID Int32, ClickTypeID Int32, ClickResourceID Int32, ClickCost UInt32, ClickClientIP UInt32, ClickDomainID UInt32, + ClickURL String, ClickAttempt UInt8, ClickOrderID UInt32, ClickBannerID UInt32, ClickMarketCategoryID UInt32, ClickMarketPP UInt32, + ClickMarketCategoryName String, ClickMarketPPName String, ClickAWAPSCampaignName String, ClickPageName String, ClickTargetType UInt16, + ClickTargetPhraseID UInt64, ClickContextType UInt8, ClickSelectType Int8, ClickOptions String, ClickGroupBannerID Int32, + OpenstatServiceName String, OpenstatCampaignID String, OpenstatAdID String, OpenstatSourceID String, UTMSource String, + UTMMedium String, UTMCampaign String, UTMContent String, UTMTerm String, FromTag String, HasGCLID UInt8, FirstVisit DateTime, + PredLastVisit Date, LastVisit Date, TotalVisits UInt32, TraficSource Nested(ID Int8, SearchEngineID UInt16, AdvEngineID UInt8, + PlaceID UInt16, SocialSourceNetworkID UInt8, Domain String, SearchPhrase String, SocialSourcePage String), Attendance FixedString(16), + CLID UInt32, YCLID UInt64, NormalizedRefererHash UInt64, SearchPhraseHash UInt64, RefererDomainHash UInt64, NormalizedStartURLHash UInt64, + StartURLDomainHash UInt64, NormalizedEndURLHash UInt64, TopLevelDomain UInt64, URLScheme UInt64, OpenstatServiceNameHash UInt64, + OpenstatCampaignIDHash UInt64, OpenstatAdIDHash UInt64, OpenstatSourceIDHash UInt64, UTMSourceHash UInt64, UTMMediumHash UInt64, + UTMCampaignHash UInt64, UTMContentHash UInt64, UTMTermHash UInt64, FromHash UInt64, WebVisorEnabled UInt8, WebVisorActivity UInt32, + ParsedParams Nested(Key1 String, Key2 String, Key3 String, Key4 String, Key5 String, ValueDouble Float64), + Market Nested(Type UInt8, GoalID UInt32, OrderID String, OrderPrice Int64, PP UInt32, DirectPlaceID UInt32, DirectOrderID UInt32, + DirectBannerID UInt32, GoodID String, GoodName String, GoodQuantity Int32, GoodPrice Int64), IslandID FixedString(16)) + ENGINE = CollapsingMergeTree(Sign) PARTITION BY toYYYYMM(StartDate) ORDER BY (CounterID, StartDate, intHash32(UserID), VisitID) + SAMPLE BY intHash32(UserID) SETTINGS index_granularity = 8192, storage_policy='s3_cache'" + + clickhouse-client --max_memory_usage 10G --query "INSERT INTO test.hits SELECT * FROM datasets.hits_v1 SETTINGS enable_filesystem_cache_on_write_operations=0, max_insert_threads=16" + clickhouse-client --max_memory_usage 10G --query "INSERT INTO test.visits SELECT * FROM datasets.visits_v1 SETTINGS enable_filesystem_cache_on_write_operations=0, max_insert_threads=16" + clickhouse-client --query "DROP TABLE datasets.visits_v1 SYNC" + clickhouse-client --query "DROP TABLE datasets.hits_v1 SYNC" + else + clickhouse-client --query "RENAME TABLE datasets.hits_v1 TO test.hits" + clickhouse-client --query "RENAME TABLE datasets.visits_v1 TO test.visits" + fi + clickhouse-client --query "CREATE TABLE test.hits_s3 (WatchID UInt64, JavaEnable UInt8, Title String, GoodEvent Int16, EventTime DateTime, EventDate Date, CounterID UInt32, ClientIP UInt32, ClientIP6 FixedString(16), RegionID UInt32, UserID UInt64, CounterClass Int8, OS UInt8, UserAgent UInt8, URL String, Referer String, URLDomain String, RefererDomain String, Refresh UInt8, IsRobot UInt8, RefererCategories Array(UInt16), URLCategories Array(UInt16), URLRegions Array(UInt32), RefererRegions Array(UInt32), ResolutionWidth UInt16, ResolutionHeight UInt16, ResolutionDepth UInt8, FlashMajor UInt8, FlashMinor UInt8, FlashMinor2 String, NetMajor UInt8, NetMinor UInt8, UserAgentMajor UInt16, UserAgentMinor FixedString(2), CookieEnable UInt8, JavascriptEnable UInt8, IsMobile UInt8, MobilePhone UInt8, MobilePhoneModel String, Params String, IPNetworkID UInt32, TraficSourceID Int8, SearchEngineID UInt16, SearchPhrase String, AdvEngineID UInt8, IsArtifical UInt8, WindowClientWidth UInt16, WindowClientHeight UInt16, ClientTimeZone Int16, ClientEventTime DateTime, SilverlightVersion1 UInt8, SilverlightVersion2 UInt8, SilverlightVersion3 UInt32, SilverlightVersion4 UInt16, PageCharset String, CodeVersion UInt32, IsLink UInt8, IsDownload UInt8, IsNotBounce UInt8, FUniqID UInt64, HID UInt32, IsOldCounter UInt8, IsEvent UInt8, IsParameter UInt8, DontCountHits UInt8, WithHash UInt8, HitColor FixedString(1), UTCEventTime DateTime, Age UInt8, Sex UInt8, Income UInt8, Interests UInt16, Robotness UInt8, GeneralInterests Array(UInt16), RemoteIP UInt32, RemoteIP6 FixedString(16), WindowName Int32, OpenerName Int32, HistoryLength Int16, BrowserLanguage FixedString(2), BrowserCountry FixedString(2), SocialNetwork String, SocialAction String, HTTPError UInt16, SendTiming Int32, DNSTiming Int32, ConnectTiming Int32, ResponseStartTiming Int32, ResponseEndTiming Int32, FetchTiming Int32, RedirectTiming Int32, DOMInteractiveTiming Int32, DOMContentLoadedTiming Int32, DOMCompleteTiming Int32, LoadEventStartTiming Int32, LoadEventEndTiming Int32, NSToDOMContentLoadedTiming Int32, FirstPaintTiming Int32, RedirectCount Int8, SocialSourceNetworkID UInt8, SocialSourcePage String, ParamPrice Int64, ParamOrderID String, ParamCurrency FixedString(3), ParamCurrencyID UInt16, GoalsReached Array(UInt32), OpenstatServiceName String, OpenstatCampaignID String, OpenstatAdID String, OpenstatSourceID String, UTMSource String, UTMMedium String, UTMCampaign String, UTMContent String, UTMTerm String, FromTag String, HasGCLID UInt8, RefererHash UInt64, URLHash UInt64, CLID UInt32, YCLID UInt64, ShareService String, ShareURL String, ShareTitle String, ParsedParams Nested(Key1 String, Key2 String, Key3 String, Key4 String, Key5 String, ValueDouble Float64), IslandID FixedString(16), RequestNum UInt32, RequestTry UInt8) ENGINE = MergeTree() PARTITION BY toYYYYMM(EventDate) ORDER BY (CounterID, EventDate, intHash32(UserID)) SAMPLE BY intHash32(UserID) SETTINGS index_granularity = 8192, storage_policy='s3_cache'" + # AWS S3 is very inefficient, so increase memory even further: + clickhouse-client --max_memory_usage 30G --max_memory_usage_for_user 30G --query "INSERT INTO test.hits_s3 SELECT * FROM test.hits SETTINGS enable_filesystem_cache_on_write_operations=0, max_insert_threads=16" +fi + +clickhouse-client --query "SHOW TABLES FROM test" +clickhouse-client --query "SELECT count() FROM test.hits" +clickhouse-client --query "SELECT count() FROM test.visits" diff --git a/ci/jobs/scripts/functional_tests/setup_log_cluster.sh b/ci/jobs/scripts/functional_tests/setup_log_cluster.sh new file mode 100755 index 00000000000..fc6946b7025 --- /dev/null +++ b/ci/jobs/scripts/functional_tests/setup_log_cluster.sh @@ -0,0 +1,261 @@ +#!/bin/bash + +set -e +# This script sets up export of system log tables to a remote server. +# Remote tables are created if not exist, and augmented with extra columns, +# and their names will contain a hash of the table structure, +# which allows exporting tables from servers of different versions. + +# Config file contains KEY=VALUE pairs with any necessary parameters like: +# CLICKHOUSE_CI_LOGS_HOST - remote host +# CLICKHOUSE_CI_LOGS_USER - password for user +# CLICKHOUSE_CI_LOGS_PASSWORD - password for user +CLICKHOUSE_CI_LOGS_CREDENTIALS=${CLICKHOUSE_CI_LOGS_CREDENTIALS:-/tmp/export-logs-config.sh} +CLICKHOUSE_CI_LOGS_USER=${CLICKHOUSE_CI_LOGS_USER:-ci} + +# Pre-configured destination cluster, where to export the data +CLICKHOUSE_CI_LOGS_CLUSTER=${CLICKHOUSE_CI_LOGS_CLUSTER:-system_logs_export} + +EXTRA_COLUMNS=${EXTRA_COLUMNS:-"pull_request_number UInt32, commit_sha String, check_start_time DateTime('UTC'), check_name LowCardinality(String), instance_type LowCardinality(String), instance_id String, INDEX ix_pr (pull_request_number) TYPE set(100), INDEX ix_commit (commit_sha) TYPE set(100), INDEX ix_check_time (check_start_time) TYPE minmax, "} +EXTRA_COLUMNS_EXPRESSION=${EXTRA_COLUMNS_EXPRESSION:-"CAST(0 AS UInt32) AS pull_request_number, '' AS commit_sha, now() AS check_start_time, toLowCardinality('') AS check_name, toLowCardinality('') AS instance_type, '' AS instance_id"} +EXTRA_ORDER_BY_COLUMNS=${EXTRA_ORDER_BY_COLUMNS:-"check_name"} + +# trace_log needs more columns for symbolization +EXTRA_COLUMNS_TRACE_LOG="${EXTRA_COLUMNS} symbols Array(LowCardinality(String)), lines Array(LowCardinality(String)), " +EXTRA_COLUMNS_EXPRESSION_TRACE_LOG="${EXTRA_COLUMNS_EXPRESSION}, arrayMap(x -> demangle(addressToSymbol(x)), trace)::Array(LowCardinality(String)) AS symbols, arrayMap(x -> addressToLine(x), trace)::Array(LowCardinality(String)) AS lines" + +# coverage_log needs more columns for symbolization, but only symbol names (the line numbers are too heavy to calculate) +EXTRA_COLUMNS_COVERAGE_LOG="${EXTRA_COLUMNS} symbols Array(LowCardinality(String)), " +EXTRA_COLUMNS_EXPRESSION_COVERAGE_LOG="${EXTRA_COLUMNS_EXPRESSION}, arrayDistinct(arrayMap(x -> demangle(addressToSymbol(x)), coverage))::Array(LowCardinality(String)) AS symbols" + + +function __set_connection_args +{ + # It's impossible to use a generic $CONNECTION_ARGS string, it's unsafe from word splitting perspective. + # That's why we must stick to the generated option + CONNECTION_ARGS=( + --receive_timeout=45 --send_timeout=45 --secure + --user "${CLICKHOUSE_CI_LOGS_USER}" --host "${CLICKHOUSE_CI_LOGS_HOST}" + --password "${CLICKHOUSE_CI_LOGS_PASSWORD}" + ) +} + +function __shadow_credentials +{ + # The function completely screws the output, it shouldn't be used in normal functions, only in () + # The only way to substitute the env as a plain text is using perl 's/\Qsomething\E/another/ + exec &> >(perl -pe ' + s(\Q$ENV{CLICKHOUSE_CI_LOGS_HOST}\E)[CLICKHOUSE_CI_LOGS_HOST]g; + s(\Q$ENV{CLICKHOUSE_CI_LOGS_USER}\E)[CLICKHOUSE_CI_LOGS_USER]g; + s(\Q$ENV{CLICKHOUSE_CI_LOGS_PASSWORD}\E)[CLICKHOUSE_CI_LOGS_PASSWORD]g; + ') +} + +function check_logs_credentials +( + # The function connects with given credentials, and if it's unable to execute the simplest query, returns exit code + + # First check, if all necessary parameters are set + set +x + for parameter in CLICKHOUSE_CI_LOGS_HOST CLICKHOUSE_CI_LOGS_USER CLICKHOUSE_CI_LOGS_PASSWORD; do + export -p | grep -q "$parameter" || { + echo "Credentials parameter $parameter is unset" + return 1 + } + done + + __shadow_credentials + __set_connection_args + local code + # Catch both success and error to not fail on `set -e` + clickhouse-client "${CONNECTION_ARGS[@]}" -q 'SELECT 1 FORMAT Null' && return 0 || code=$? + if [ "$code" != 0 ]; then + echo 'Failed to connect to CI Logs cluster' + return $code + fi +) + +function config_logs_export_cluster +( + # The function is launched in a separate shell instance to not expose the + # exported values from CLICKHOUSE_CI_LOGS_CREDENTIALS + set +x + if ! [ -r "${CLICKHOUSE_CI_LOGS_CREDENTIALS}" ]; then + echo "File $CLICKHOUSE_CI_LOGS_CREDENTIALS does not exist, do not setup" + return + fi + set -a + # shellcheck disable=SC1090 + source "${CLICKHOUSE_CI_LOGS_CREDENTIALS}" + set +a + __shadow_credentials + echo "Checking if the credentials work" + check_logs_credentials || return 0 + cluster_config="${1:-/etc/clickhouse-server/config.d/system_logs_export.yaml}" + mkdir -p "$(dirname "$cluster_config")" + echo "remote_servers: + ${CLICKHOUSE_CI_LOGS_CLUSTER}: + shard: + replica: + secure: 1 + user: '${CLICKHOUSE_CI_LOGS_USER}' + host: '${CLICKHOUSE_CI_LOGS_HOST}' + port: 9440 + password: '${CLICKHOUSE_CI_LOGS_PASSWORD}' +" > "$cluster_config" + echo "Cluster ${CLICKHOUSE_CI_LOGS_CLUSTER} is confugured in ${cluster_config}" +) + +function setup_logs_replication +( + # The function is launched in a separate shell instance to not expose the + # exported values from CLICKHOUSE_CI_LOGS_CREDENTIALS + set +x + # disable output + if ! [ -r "${CLICKHOUSE_CI_LOGS_CREDENTIALS}" ]; then + echo "File $CLICKHOUSE_CI_LOGS_CREDENTIALS does not exist, do not setup" + return 0 + fi + set -a + # shellcheck disable=SC1090 + source "${CLICKHOUSE_CI_LOGS_CREDENTIALS}" + set +a + __shadow_credentials + echo "Checking if the credentials work" + check_logs_credentials || return 0 + __set_connection_args + + echo "My hostname is ${HOSTNAME}" + + echo 'Create all configured system logs' + clickhouse-client --query "SYSTEM FLUSH LOGS" + + debug_or_sanitizer_build=$(clickhouse-client -q "WITH ((SELECT value FROM system.build_options WHERE name='BUILD_TYPE') AS build, (SELECT value FROM system.build_options WHERE name='CXX_FLAGS') as flags) SELECT build='Debug' OR flags LIKE '%fsanitize%'") + echo "Build is debug or sanitizer: $debug_or_sanitizer_build" + + # We will pre-create a table system.coverage_log. + # It is normally created by clickhouse-test rather than the server, + # so we will create it in advance to make it be picked up by the next commands: + + clickhouse-client --query " + CREATE TABLE IF NOT EXISTS system.coverage_log + ( + time DateTime COMMENT 'The time of test run', + test_name String COMMENT 'The name of the test', + coverage Array(UInt64) COMMENT 'An array of addresses of the code (a subset of addresses instrumented for coverage) that were encountered during the test run' + ) ENGINE = MergeTree ORDER BY test_name COMMENT 'Contains information about per-test coverage from the CI, but used only for exporting to the CI cluster' + " + + # For each system log table: + echo 'Create %_log tables' + clickhouse-client --query "SHOW TABLES FROM system LIKE '%\\_log'" | while read -r table + do + if [[ "$table" = "trace_log" ]] + then + EXTRA_COLUMNS_FOR_TABLE="${EXTRA_COLUMNS_TRACE_LOG}" + # Do not try to resolve stack traces in case of debug/sanitizers + # build, since it is too slow (flushing of trace_log can take ~1min + # with such MV attached) + if [[ "$debug_or_sanitizer_build" = 1 ]] + then + EXTRA_COLUMNS_EXPRESSION_FOR_TABLE="${EXTRA_COLUMNS_EXPRESSION}" + else + EXTRA_COLUMNS_EXPRESSION_FOR_TABLE="${EXTRA_COLUMNS_EXPRESSION_TRACE_LOG}" + fi + elif [[ "$table" = "coverage_log" ]] + then + EXTRA_COLUMNS_FOR_TABLE="${EXTRA_COLUMNS_COVERAGE_LOG}" + EXTRA_COLUMNS_EXPRESSION_FOR_TABLE="${EXTRA_COLUMNS_EXPRESSION_COVERAGE_LOG}" + else + EXTRA_COLUMNS_FOR_TABLE="${EXTRA_COLUMNS}" + EXTRA_COLUMNS_EXPRESSION_FOR_TABLE="${EXTRA_COLUMNS_EXPRESSION}" + fi + + # Calculate hash of its structure. Note: 4 is the version of extra columns - increment it if extra columns are changed: + hash=$(clickhouse-client --query " + SELECT sipHash64(9, groupArray((name, type))) + FROM (SELECT name, type FROM system.columns + WHERE database = 'system' AND table = '$table' + ORDER BY position) + ") + + # Create the destination table with adapted name and structure: + statement=$(clickhouse-client --format TSVRaw --query "SHOW CREATE TABLE system.${table}" | sed -r -e ' + s/^\($/('"$EXTRA_COLUMNS_FOR_TABLE"'/; + s/^ORDER BY (([^\(].+?)|\((.+?)\))$/ORDER BY ('"$EXTRA_ORDER_BY_COLUMNS"', \2\3)/; + s/^CREATE TABLE system\.\w+_log$/CREATE TABLE IF NOT EXISTS '"$table"'_'"$hash"'/; + /^TTL /d + ') + + echo -e "Creating remote destination table ${table}_${hash} with statement:" >&2 + + echo "::group::${table}" + # there's the only way big "$statement" can be printed without causing EAGAIN error + # cat: write error: Resource temporarily unavailable + statement_print="${statement}" + if [ "${#statement_print}" -gt 4000 ]; then + statement_print="${statement::1999}\n…\n${statement:${#statement}-1999}" + fi + echo -e "$statement_print" + echo "::endgroup::" + + echo "$statement" | clickhouse-client --database_replicated_initial_query_timeout_sec=10 \ + --distributed_ddl_task_timeout=30 --distributed_ddl_output_mode=throw_only_active \ + "${CONNECTION_ARGS[@]}" || continue + + echo "Creating table system.${table}_sender" >&2 + + # Create Distributed table and materialized view to watch on the original table: + clickhouse-client --query " + CREATE TABLE system.${table}_sender + ENGINE = Distributed(${CLICKHOUSE_CI_LOGS_CLUSTER}, default, ${table}_${hash}) + SETTINGS flush_on_detach=0 + EMPTY AS + SELECT ${EXTRA_COLUMNS_EXPRESSION_FOR_TABLE}, * + FROM system.${table} + " || continue + + echo "Creating materialized view system.${table}_watcher" >&2 + + clickhouse-client --query " + CREATE MATERIALIZED VIEW system.${table}_watcher TO system.${table}_sender AS + SELECT ${EXTRA_COLUMNS_EXPRESSION_FOR_TABLE}, * + FROM system.${table} + " || continue + done +) + +function stop_logs_replication +{ + echo "Detach all logs replication" + clickhouse-client --query "select database||'.'||table from system.tables where database = 'system' and (table like '%_sender' or table like '%_watcher')" | { + tee /dev/stderr + } | { + timeout --preserve-status --signal TERM --kill-after 5m 15m xargs -n1 -r -i clickhouse-client --query "drop table {}" + } +} + + +while [[ "$#" -gt 0 ]]; do + case $1 in + --stop-log-replication) + echo "Stopping log replication..." + stop_logs_replication + ;; + --setup-logs-replication) + echo "Setting up log replication..." + setup_logs_replication + ;; + --config-logs-export-cluster) + echo "Configuring logs export for the cluster..." + config_logs_export_cluster "$2" + shift + ;; + *) + echo "Unknown option: $1" + echo "Usage: $0 [--stop-log-replication | --setup-logs-replication | --config-logs-export-cluster ]" + exit 1 + ;; + esac + shift +done \ No newline at end of file diff --git a/ci/praktika/artifact.py b/ci/praktika/artifact.py index ba05f18b9b1..1ae49e62259 100644 --- a/ci/praktika/artifact.py +++ b/ci/praktika/artifact.py @@ -1,3 +1,4 @@ +import copy from dataclasses import dataclass @@ -24,6 +25,14 @@ class Artifact: def is_s3_artifact(self): return self.type == Artifact.Type.S3 + def parametrize(self, names): + res = [] + for name in names: + obj = copy.deepcopy(self) + obj.name = name + res.append(obj) + return res + @classmethod def define_artifact(cls, name, type, path): return cls.Config(name=name, type=type, path=path) diff --git a/ci/praktika/hook_html.py b/ci/praktika/hook_html.py index ed2335a640a..5d476fe9d2b 100644 --- a/ci/praktika/hook_html.py +++ b/ci/praktika/hook_html.py @@ -128,6 +128,9 @@ class HtmlRunnerHooks: for job in _workflow.jobs: if job.name not in skip_jobs: result = Result.generate_pending(job.name) + # Preemptively add the general job log to the result directory to ensure + # the post-job handler can upload it, even if the job is terminated unexpectedly + result.set_files([Settings.RUN_LOG]) else: result = Result.generate_skipped(job.name, job_cache_records[job.name]) results.append(result) diff --git a/ci/praktika/native_jobs.py b/ci/praktika/native_jobs.py index cff6c851d0e..ce42b6a33fc 100644 --- a/ci/praktika/native_jobs.py +++ b/ci/praktika/native_jobs.py @@ -333,7 +333,7 @@ def _finish_workflow(workflow, job_name): # dump workflow result after update - to have an updated result in post workflow_result.dump() # add error into env - should apper in the report - env.add_info(ResultInfo.NOT_FINALIZED + f" [{result.name}]") + env.add_info(f"{result.name}: {ResultInfo.NOT_FINALIZED}") update_final_report = True job = workflow.get_job(result.name) if not job or not job.allow_merge_on_failure: diff --git a/ci/praktika/runner.py b/ci/praktika/runner.py index e88e00d7428..4ab7f8eeebc 100644 --- a/ci/praktika/runner.py +++ b/ci/praktika/runner.py @@ -1,3 +1,5 @@ +import glob +import json import os import re import sys @@ -255,9 +257,11 @@ class Runner: info = f"ERROR: {ResultInfo.KILLED}" print(info) result.set_info(info).set_status(Result.Status.ERROR).dump() + else: + # TODO: add setting with different ways of storing general praktika log: always, on error, never. + # now let's store it on error only + result.files = [file for file in result.files if file != Settings.RUN_LOG] - if not result.is_ok(): - result.set_files(files=[Settings.RUN_LOG]) result.update_duration().dump() if run_exit_code == 0: diff --git a/ci/praktika/s3.py b/ci/praktika/s3.py index 6e8a0a6e8fe..8202d71b3d8 100644 --- a/ci/praktika/s3.py +++ b/ci/praktika/s3.py @@ -2,6 +2,7 @@ import dataclasses import json from pathlib import Path from typing import Dict +from urllib.parse import quote from praktika._environment import _Environment from praktika.settings import Settings @@ -55,7 +56,7 @@ class S3: bucket = s3_path.split("/")[0] endpoint = Settings.S3_BUCKET_TO_HTTP_ENDPOINT[bucket] assert endpoint - return f"https://{s3_full_path}".replace(bucket, endpoint) + return quote(f"https://{s3_full_path}".replace(bucket, endpoint), safe=":/?&=") @classmethod def put(cls, s3_path, local_path, text=False, metadata=None, if_none_matched=False): diff --git a/ci/jobs/__init__.py b/ci/settings/__init__.py similarity index 100% rename from ci/jobs/__init__.py rename to ci/settings/__init__.py diff --git a/ci/settings/definitions.py b/ci/settings/definitions.py index 8ebf79231ac..ced1289b950 100644 --- a/ci/settings/definitions.py +++ b/ci/settings/definitions.py @@ -242,3 +242,4 @@ class JobNames: BUILD = "Build" STATELESS = "Stateless tests" STATEFUL = "Stateful tests" + STRESS = "Stress tests" diff --git a/ci/workflows/pull_request.py b/ci/workflows/pull_request.py index 63b16ead507..541d530b6c5 100644 --- a/ci/workflows/pull_request.py +++ b/ci/workflows/pull_request.py @@ -16,6 +16,11 @@ class ArtifactNames: CH_ARM_RELEASE = "CH_ARM_RELEASE" CH_ARM_ASAN = "CH_ARM_ASAN" + CH_ODBC_B_AMD_DEBUG = "CH_ODBC_B_AMD_DEBUG" + CH_ODBC_B_AMD_RELEASE = "CH_ODBC_B_AMD_RELEASE" + CH_ODBC_B_ARM_RELEASE = "CH_ODBC_B_ARM_RELEASE" + CH_ODBC_B_ARM_ASAN = "CH_ODBC_B_ARM_ASAN" + DEB_AMD_DEBUG = "DEB_AMD_DEBUG" DEB_AMD_RELEASE = "DEB_AMD_RELEASE" DEB_ARM_RELEASE = "DEB_ARM_RELEASE" @@ -68,10 +73,10 @@ build_jobs = Job.Config( ).parametrize( parameter=["amd_debug", "amd_release", "arm_release", "arm_asan"], provides=[ - [ArtifactNames.CH_AMD_DEBUG, ArtifactNames.DEB_AMD_DEBUG], - [ArtifactNames.CH_AMD_RELEASE, ArtifactNames.DEB_AMD_RELEASE], - [ArtifactNames.CH_ARM_RELEASE, ArtifactNames.DEB_ARM_RELEASE], - [ArtifactNames.CH_ARM_ASAN, ArtifactNames.DEB_ARM_ASAN], + [ArtifactNames.CH_AMD_DEBUG, ArtifactNames.DEB_AMD_DEBUG, ArtifactNames.CH_ODBC_B_AMD_DEBUG], + [ArtifactNames.CH_AMD_RELEASE, ArtifactNames.DEB_AMD_RELEASE, ArtifactNames.CH_ODBC_B_AMD_RELEASE], + [ArtifactNames.CH_ARM_RELEASE, ArtifactNames.DEB_ARM_RELEASE, ArtifactNames.CH_ODBC_B_ARM_RELEASE], + [ArtifactNames.CH_ARM_ASAN, ArtifactNames.DEB_ARM_ASAN, ArtifactNames.CH_ODBC_B_ARM_ASAN], ], runs_on=[ [RunnerLabels.BUILDER_AMD], @@ -110,12 +115,12 @@ stateless_tests_jobs = Job.Config( [RunnerLabels.FUNC_TESTER_ARM], ], requires=[ - [ArtifactNames.CH_AMD_DEBUG], - [ArtifactNames.CH_AMD_DEBUG], - [ArtifactNames.CH_AMD_RELEASE], - [ArtifactNames.CH_AMD_RELEASE], - [ArtifactNames.CH_ARM_ASAN], - [ArtifactNames.CH_ARM_ASAN], + [ArtifactNames.CH_AMD_DEBUG, ArtifactNames.CH_ODBC_B_AMD_DEBUG], + [ArtifactNames.CH_AMD_DEBUG, ArtifactNames.CH_ODBC_B_AMD_DEBUG], + [ArtifactNames.CH_AMD_RELEASE, ArtifactNames.CH_ODBC_B_AMD_RELEASE], + [ArtifactNames.CH_AMD_RELEASE, ArtifactNames.CH_ODBC_B_AMD_RELEASE], + [ArtifactNames.CH_ARM_ASAN, ArtifactNames.CH_ODBC_B_ARM_ASAN], + [ArtifactNames.CH_ARM_ASAN, ArtifactNames.CH_ODBC_B_ARM_ASAN], ], ) @@ -133,7 +138,7 @@ stateful_tests_jobs = Job.Config( ), ).parametrize( parameter=[ - "amd_debug,parallel", + "amd_release,parallel", ], runs_on=[ [RunnerLabels.BUILDER_AMD], @@ -143,6 +148,29 @@ stateful_tests_jobs = Job.Config( ], ) +# TODO: refactor job to be aligned with praktika style (remove wrappers, run in docker) +stress_test_jobs = Job.Config( + name=JobNames.STRESS, + runs_on=[RunnerLabels.BUILDER_ARM], + command="python3 ./tests/ci/stress_check.py {PARAMETER}", + digest_config=Job.CacheDigestConfig( + include_paths=[ + "./ci/jobs/functional_stateful_tests.py", + ], + ), +).parametrize( + parameter=[ + "arm_release", + ], + runs_on=[ + [RunnerLabels.FUNC_TESTER_ARM], + ], + requires=[ + [ArtifactNames.DEB_ARM_RELEASE], + ], +) + + workflow = Workflow.Config( name="PR", event=Workflow.Event.PULL_REQUEST, @@ -153,28 +181,31 @@ workflow = Workflow.Config( *build_jobs, *stateless_tests_jobs, *stateful_tests_jobs, + *stress_test_jobs, ], artifacts=[ - Artifact.Config( - name=ArtifactNames.CH_AMD_DEBUG, + *Artifact.Config( + name="...", type=Artifact.Type.S3, path=f"{Settings.TEMP_DIR}/build/programs/clickhouse", - ), - Artifact.Config( - name=ArtifactNames.CH_AMD_RELEASE, + ).parametrize(names=[ + ArtifactNames.CH_AMD_DEBUG, + ArtifactNames.CH_AMD_RELEASE, + ArtifactNames.CH_ARM_RELEASE, + ArtifactNames.CH_ARM_ASAN, + ]), + + *Artifact.Config( + name="...", type=Artifact.Type.S3, - path=f"{Settings.TEMP_DIR}/build/programs/clickhouse", - ), - Artifact.Config( - name=ArtifactNames.CH_ARM_RELEASE, - type=Artifact.Type.S3, - path=f"{Settings.TEMP_DIR}/build/programs/clickhouse", - ), - Artifact.Config( - name=ArtifactNames.CH_ARM_ASAN, - type=Artifact.Type.S3, - path=f"{Settings.TEMP_DIR}/build/programs/clickhouse", - ), + path=f"{Settings.TEMP_DIR}/build/programs/clickhouse-odbc-bridge", + ).parametrize(names=[ + ArtifactNames.CH_ODBC_B_AMD_DEBUG, + ArtifactNames.CH_ODBC_B_AMD_RELEASE, + ArtifactNames.CH_ODBC_B_ARM_RELEASE, + ArtifactNames.CH_ODBC_B_ARM_ASAN, + ]), + Artifact.Config( name=ArtifactNames.DEB_AMD_DEBUG, type=Artifact.Type.S3, diff --git a/tests/ci/report.py b/tests/ci/report.py index 30b83b7409f..3e4f2ff2522 100644 --- a/tests/ci/report.py +++ b/tests/ci/report.py @@ -4,6 +4,7 @@ import datetime import json import logging import os +import sys from ast import literal_eval from dataclasses import asdict, dataclass from html import escape @@ -414,6 +415,42 @@ class JobReport: dummy: bool = False exit_code: int = -1 + def to_praktika_result(self, job_name): + sys.path.append("./ci") + + # ugly WA to exclude ci.py file form import + current_dir = os.path.dirname(os.path.abspath(__file__)) + if current_dir in sys.path: + sys.path.remove(current_dir) + from praktika.result import Result + + if self.start_time: + dt = datetime.datetime.strptime(self.start_time, "%Y-%m-%d %H:%M:%S") + timestamp = dt.timestamp() + else: + timestamp = None + + sub_results = [] + for r in self.test_results: + sub_results.append( + Result( + name=r.name, + status=r.status, + info=r.raw_logs, + links=list(r.log_urls) if r.log_urls else [], + duration=r.time, + ) + ) + + return Result( + name=job_name, + status=self.status, + start_time=timestamp, + duration=self.duration, + results=sub_results, + files=[f for f in self.additional_files], + ) + @staticmethod def get_start_time_from_current(): return datetime.datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S") @@ -474,6 +511,7 @@ class JobReport: to_file = to_file or JOB_REPORT_FILE with open(to_file, "w", encoding="utf-8") as json_file: json.dump(asdict(self), json_file, default=path_converter, indent=2) + return self def read_test_results(results_path: Path, with_raw_logs: bool = True) -> TestResults: diff --git a/tests/ci/stress_check.py b/tests/ci/stress_check.py index f9656e60448..c1d887ee536 100644 --- a/tests/ci/stress_check.py +++ b/tests/ci/stress_check.py @@ -15,6 +15,7 @@ from docker_images_helper import DockerImage, get_docker_image, pull_image from env_helper import REPO_COPY, REPORT_PATH, TEMP_PATH from get_robot_token import get_parameter_from_ssm from pr_info import PRInfo +from praktika.utils import Shell from report import ERROR, JobReport, TestResults, read_test_results from stopwatch import Stopwatch from tee_popen import TeePopen @@ -154,12 +155,19 @@ def run_stress_test(upgrade_check: bool = False) -> None: pr_info = PRInfo() - docker_image = pull_image(get_docker_image("clickhouse/stress-test")) + docker_image = pull_image(get_docker_image("clickhouse/stateful-test")) packages_path = temp_path / "packages" packages_path.mkdir(parents=True, exist_ok=True) - download_all_deb_packages(check_name, reports_path, packages_path) + if check_name in ("amd_release", "amd_debug", "arm_release"): + # this is praktika based CI + print("Copy input *.deb artifacts") + assert Shell.check( + f"cp /tmp/praktika/input/*.deb {packages_path}", verbose=True + ) + else: + download_all_deb_packages(check_name, reports_path, packages_path) server_log_path = temp_path / "server_log" server_log_path.mkdir(parents=True, exist_ok=True) @@ -201,6 +209,7 @@ def run_stress_test(upgrade_check: bool = False) -> None: result_path, server_log_path, run_log_path ) + Shell.check("pwd", verbose=True) JobReport( description=description, test_results=test_results, @@ -208,7 +217,7 @@ def run_stress_test(upgrade_check: bool = False) -> None: start_time=stopwatch.start_time_str, duration=stopwatch.duration_seconds, additional_files=additional_logs, - ).dump() + ).dump().to_praktika_result(job_name=f"Stress tests ({check_name})").dump() if state == "failure": sys.exit(1) From b2c81981fbc5746aeccece8cf288167b5ad18df4 Mon Sep 17 00:00:00 2001 From: Konstantin Bogdanov Date: Tue, 15 Oct 2024 18:24:06 +0200 Subject: [PATCH 05/31] Use clang-19 --- .clang-tidy | 8 +++- base/base/preciseExp10.cpp | 2 - cmake/clang_tidy.cmake | 4 +- docker/packager/README.md | 8 ++-- docker/packager/packager | 26 +++++------ docker/test/fasttest/Dockerfile | 2 +- docker/test/fasttest/requirements.txt | 2 +- docker/test/fuzzer/requirements.txt | 2 +- docker/test/fuzzer/run-fuzzer.sh | 2 +- docker/test/integration/base/requirements.txt | 2 +- docker/test/keeper-jepsen/run.sh | 2 +- docker/test/libfuzzer/requirements.txt | 2 +- .../performance-comparison/requirements.txt | 2 +- docker/test/server-jepsen/run.sh | 2 +- docker/test/sqllogic/requirements.txt | 2 +- docker/test/sqltest/requirements.txt | 2 +- docker/test/sqltest/run.sh | 2 +- docker/test/stateless/requirements.txt | 2 +- docker/test/util/Dockerfile | 2 +- docs/en/development/build-cross-loongarch.md | 6 +-- docs/en/development/build-cross-osx.md | 6 +-- docs/en/development/build-cross-riscv.md | 4 +- docs/en/development/build.md | 6 +-- docs/en/development/continuous-integration.md | 4 +- docs/en/development/developer-instruction.md | 2 +- .../disks/CommandGetCurrentDiskAndPath.cpp | 2 +- programs/disks/CommandLink.cpp | 2 +- programs/disks/CommandList.cpp | 2 +- programs/disks/CommandMkDir.cpp | 2 +- programs/disks/CommandRead.cpp | 2 +- programs/disks/CommandRemove.cpp | 2 +- programs/disks/CommandTouch.cpp | 2 +- programs/disks/DisksApp.cpp | 2 +- programs/git-import/git-import.cpp | 24 +++++----- programs/keeper/keeper_main.cpp | 4 +- programs/obfuscator/Obfuscator.cpp | 4 +- .../static-files-disk-uploader.cpp | 4 +- programs/su/su.cpp | 2 +- src/Access/AccessEntityIO.cpp | 2 +- src/Access/AuthenticationData.cpp | 4 +- src/Access/ExternalAuthenticators.cpp | 2 +- src/Access/SettingsProfileElement.cpp | 2 +- .../AggregateFunctionDistinctDynamicTypes.cpp | 2 +- .../AggregateFunctionDistinctJSONPaths.cpp | 2 +- .../AggregateFunctionFlameGraph.cpp | 4 +- .../AggregateFunctionGroupArray.cpp | 4 +- .../AggregateFunctionSequenceNextNode.cpp | 2 +- src/AggregateFunctions/IAggregateFunction.h | 2 +- src/AggregateFunctions/ReservoirSampler.h | 2 +- src/Client/LineReader.cpp | 4 +- src/Client/TerminalKeystrokeInterceptor.cpp | 2 +- src/Columns/ColumnObject.cpp | 2 +- src/Columns/IColumn.h | 9 ++++ src/Common/AsyncLoader.cpp | 4 +- src/Common/COW.h | 10 +++++ src/Common/ColumnsHashingImpl.h | 2 +- src/Common/DNSResolver.cpp | 2 +- src/Common/DateLUTImpl.cpp | 2 +- src/Common/Dwarf.cpp | 24 +++++----- src/Common/ErrorCodes.cpp | 2 +- src/Common/Exception.cpp | 2 +- src/Common/HTTPConnectionPool.cpp | 10 ++--- src/Common/HashTable/HashTable.h | 2 +- src/Common/HashTable/HashTableKeyHolder.h | 2 +- src/Common/IFactoryWithAliases.h | 2 +- src/Common/JSONParsers/SimdJSONParser.h | 2 +- src/Common/OpenTelemetryTraceContext.cpp | 2 +- src/Common/PageCache.cpp | 4 +- src/Common/ProfileEvents.cpp | 2 +- src/Common/QueryFuzzer.cpp | 8 ++-- src/Common/QueryProfiler.h | 4 +- src/Common/SharedMutexHelper.h | 5 +++ src/Common/ShellCommand.cpp | 4 +- src/Common/StackTrace.cpp | 14 +++--- src/Common/StringUtils.cpp | 6 +-- src/Common/TargetSpecific.cpp | 2 +- src/Common/TimerDescriptor.cpp | 2 +- src/Common/TypePromotion.h | 5 +++ src/Common/Visitor.h | 2 +- src/Common/filesystemHelpers.cpp | 2 +- src/Common/formatIPv6.cpp | 4 +- src/Common/iota.cpp | 4 +- src/Compression/CompressionCodecMultiple.cpp | 3 ++ src/Coordination/Changelog.cpp | 2 +- src/Coordination/KeeperStorage.cpp | 2 +- src/Core/BaseSettingsProgramOptions.h | 4 +- src/Core/Settings.cpp | 2 +- src/Daemon/SentryWriter.cpp | 2 +- src/DataTypes/DataTypeArray.cpp | 2 +- src/DataTypes/DataTypeDynamic.cpp | 2 +- src/DataTypes/DataTypeObject.cpp | 2 +- .../SerializationIPv4andIPv6.cpp | 4 +- src/Databases/DDLRenamingVisitor.cpp | 2 +- src/Databases/DDLRenamingVisitor.h | 2 +- .../ObjectStorages/DiskObjectStorage.cpp | 2 +- src/Functions/FunctionHelpers.cpp | 2 +- src/Functions/FunctionsEmbeddedDictionaries.h | 2 +- src/Functions/FunctionsExternalDictionaries.h | 4 +- src/Functions/GatherUtils/IArraySink.h | 2 +- src/Functions/GatherUtils/IArraySource.h | 2 +- src/Functions/GatherUtils/IValueSource.h | 2 +- src/Functions/URL/domain.h | 2 +- src/Functions/URL/topLevelDomain.cpp | 2 +- src/Functions/extractAllGroups.h | 4 +- src/Functions/isIPAddressContainedIn.cpp | 2 +- src/IO/BufferWithOwnMemory.h | 2 +- src/IO/readDecimalText.h | 2 +- src/Interpreters/Aggregator.cpp | 1 + src/Interpreters/Cluster.cpp | 4 +- src/Interpreters/DDLWorker.cpp | 2 +- src/Interpreters/EmbeddedDictionaries.cpp | 8 ++-- src/Interpreters/HashJoin/HashJoin.cpp | 1 + src/Parsers/ASTBackupQuery.cpp | 2 +- .../Access/ParserCreateRowPolicyQuery.cpp | 2 +- src/Planner/PlannerActionsVisitor.cpp | 2 +- src/Processors/Chunk.h | 5 ++- src/Processors/Executors/PollingQueue.cpp | 4 +- src/Processors/Merges/Algorithms/Graphite.cpp | 2 +- .../optimizeUseAggregateProjection.cpp | 4 +- src/Storages/MaterializedView/RefreshTask.cpp | 2 +- src/Storages/MergeTree/IMergeTreeDataPart.cpp | 2 +- .../MergeTree/MergeTreeMarksLoader.cpp | 2 +- src/Storages/MergeTree/MergeTreeSettings.cpp | 2 +- .../MergeTreeSplitPrewhereIntoReadSteps.cpp | 2 +- .../ParallelReplicasReadingCoordinator.cpp | 1 + .../ReplicatedMergeTreeCleanupThread.cpp | 1 + .../MergeTree/ReplicatedMergeTreeSink.cpp | 1 + .../DataLakes/IcebergMetadata.cpp | 2 +- src/Storages/StorageFactory.cpp | 2 +- src/Storages/StorageMergeTreeIndex.cpp | 2 +- src/Storages/VirtualColumnUtils.cpp | 4 +- tests/ci/ci_config.py | 44 +++++++++---------- tests/docker_scripts/fasttest_runner.sh | 2 +- 133 files changed, 276 insertions(+), 228 deletions(-) diff --git a/.clang-tidy b/.clang-tidy index bb63bf2eea6..2a2d416179d 100644 --- a/.clang-tidy +++ b/.clang-tidy @@ -16,6 +16,9 @@ Checks: [ '-android-*', + '-boost-use-ranges', + '-modernize-use-ranges', + '-bugprone-assignment-in-if-condition', '-bugprone-branch-clone', '-bugprone-easily-swappable-parameters', @@ -28,7 +31,6 @@ Checks: [ '-bugprone-reserved-identifier', # useful but too slow, TODO retry when https://reviews.llvm.org/rG1c282052624f9d0bd273bde0b47b30c96699c6c7 is merged '-bugprone-unchecked-optional-access', '-bugprone-crtp-constructor-accessibility', - '-bugprone-suspicious-stringview-data-usage', '-cert-dcl16-c', '-cert-dcl37-c', @@ -42,6 +44,8 @@ Checks: [ '-clang-analyzer-optin.performance.Padding', + '-clang-analyzer-cplusplus.PlacementNew', + '-clang-analyzer-unix.Malloc', '-cppcoreguidelines-*', # impractical in a codebase as large as ClickHouse, also slow @@ -90,6 +94,7 @@ Checks: [ '-misc-non-private-member-variables-in-classes', '-misc-confusable-identifiers', # useful but slooo '-misc-use-anonymous-namespace', + '-misc-use-internal-linkage', '-modernize-avoid-c-arrays', '-modernize-concat-nested-namespaces', @@ -137,6 +142,7 @@ Checks: [ '-readability-suspicious-call-argument', '-readability-uppercase-literal-suffix', '-readability-use-anyofallof', + '-readability-math-missing-parentheses', '-zircon-*' ] diff --git a/base/base/preciseExp10.cpp b/base/base/preciseExp10.cpp index 1cd660dc569..5e3e7fb6983 100644 --- a/base/base/preciseExp10.cpp +++ b/base/base/preciseExp10.cpp @@ -30,8 +30,6 @@ SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ #include -#include -#include double preciseExp10(double x) { diff --git a/cmake/clang_tidy.cmake b/cmake/clang_tidy.cmake index 4c9331f6283..24ec71e7a05 100644 --- a/cmake/clang_tidy.cmake +++ b/cmake/clang_tidy.cmake @@ -5,14 +5,14 @@ if (ENABLE_CLANG_TIDY) find_program (CLANG_TIDY_CACHE_PATH NAMES "clang-tidy-cache") if (CLANG_TIDY_CACHE_PATH) - find_program (_CLANG_TIDY_PATH NAMES "clang-tidy-18" "clang-tidy-17" "clang-tidy-16" "clang-tidy") + find_program (_CLANG_TIDY_PATH NAMES "clang-tidy-19" "clang-tidy-18" "clang-tidy-17" "clang-tidy") # Why do we use ';' here? # It's a cmake black magic: https://cmake.org/cmake/help/latest/prop_tgt/LANG_CLANG_TIDY.html#prop_tgt:%3CLANG%3E_CLANG_TIDY # The CLANG_TIDY_PATH is passed to CMAKE_CXX_CLANG_TIDY, which follows CXX_CLANG_TIDY syntax. set (CLANG_TIDY_PATH "${CLANG_TIDY_CACHE_PATH};${_CLANG_TIDY_PATH}" CACHE STRING "A combined command to run clang-tidy with caching wrapper") else () - find_program (CLANG_TIDY_PATH NAMES "clang-tidy-18" "clang-tidy-17" "clang-tidy-16" "clang-tidy") + find_program (CLANG_TIDY_PATH NAMES "clang-tidy-19" "clang-tidy-18" "clang-tidy-17" "clang-tidy") endif () if (CLANG_TIDY_PATH) diff --git a/docker/packager/README.md b/docker/packager/README.md index 12947aed62f..f41da99bcfe 100644 --- a/docker/packager/README.md +++ b/docker/packager/README.md @@ -3,10 +3,10 @@ compilers and build settings. Correctly configured Docker daemon is single depen Usage: -Build deb package with `clang-18` in `debug` mode: +Build deb package with `clang-19` in `debug` mode: ``` $ mkdir deb/test_output -$ ./packager --output-dir deb/test_output/ --package-type deb --compiler=clang-18 --debug-build +$ ./packager --output-dir deb/test_output/ --package-type deb --compiler=clang-19 --debug-build $ ls -l deb/test_output -rw-r--r-- 1 root root 3730 clickhouse-client_22.2.2+debug_all.deb -rw-r--r-- 1 root root 84221888 clickhouse-common-static_22.2.2+debug_amd64.deb @@ -17,11 +17,11 @@ $ ls -l deb/test_output ``` -Build ClickHouse binary with `clang-18` and `address` sanitizer in `relwithdebuginfo` +Build ClickHouse binary with `clang-19` and `address` sanitizer in `relwithdebuginfo` mode: ``` $ mkdir $HOME/some_clickhouse -$ ./packager --output-dir=$HOME/some_clickhouse --package-type binary --compiler=clang-18 --sanitizer=address +$ ./packager --output-dir=$HOME/some_clickhouse --package-type binary --compiler=clang-19 --sanitizer=address $ ls -l $HOME/some_clickhouse -rwxr-xr-x 1 root root 787061952 clickhouse lrwxrwxrwx 1 root root 10 clickhouse-benchmark -> clickhouse diff --git a/docker/packager/packager b/docker/packager/packager index da4af7fc1be..e7c0f4b3a00 100755 --- a/docker/packager/packager +++ b/docker/packager/packager @@ -407,20 +407,20 @@ def parse_args() -> argparse.Namespace: parser.add_argument( "--compiler", choices=( - "clang-18", - "clang-18-darwin", - "clang-18-darwin-aarch64", - "clang-18-aarch64", - "clang-18-aarch64-v80compat", - "clang-18-ppc64le", - "clang-18-riscv64", - "clang-18-s390x", - "clang-18-loongarch64", - "clang-18-amd64-compat", - "clang-18-amd64-musl", - "clang-18-freebsd", + "clang-19", + "clang-19-darwin", + "clang-19-darwin-aarch64", + "clang-19-aarch64", + "clang-19-aarch64-v80compat", + "clang-19-ppc64le", + "clang-19-riscv64", + "clang-19-s390x", + "clang-19-loongarch64", + "clang-19-amd64-compat", + "clang-19-amd64-musl", + "clang-19-freebsd", ), - default="clang-18", + default="clang-19", help="a compiler to use", ) parser.add_argument( diff --git a/docker/test/fasttest/Dockerfile b/docker/test/fasttest/Dockerfile index ca93b24f66e..703ab54c242 100644 --- a/docker/test/fasttest/Dockerfile +++ b/docker/test/fasttest/Dockerfile @@ -42,7 +42,7 @@ RUN sed -i '/_IMPORT_CHECK_FILES_FOR_\(mlir-\|llvm-bolt\|merge-fdata\|MLIR\)/ {s # LLVM changes paths for compiler-rt libraries. For some reason clang-18.1.8 cannot catch up libraries from default install path. # It's very dirty workaround, better to build compiler and LLVM ourself and use it. Details: https://github.com/llvm/llvm-project/issues/95792 -RUN test ! -d /usr/lib/llvm-18/lib/clang/18/lib/x86_64-pc-linux-gnu || ln -s /usr/lib/llvm-18/lib/clang/18/lib/x86_64-pc-linux-gnu /usr/lib/llvm-18/lib/clang/18/lib/x86_64-unknown-linux-gnu +RUN test ! -d /usr/lib/llvm-19/lib/clang/19/lib/x86_64-pc-linux-gnu || ln -s /usr/lib/llvm-19/lib/clang/19/lib/x86_64-pc-linux-gnu /usr/lib/llvm-19/lib/clang/19/lib/x86_64-unknown-linux-gnu ARG CCACHE_VERSION=4.6.1 RUN mkdir /tmp/ccache \ diff --git a/docker/test/fasttest/requirements.txt b/docker/test/fasttest/requirements.txt index 993ea22e5ae..6fbcf7cccd4 100644 --- a/docker/test/fasttest/requirements.txt +++ b/docker/test/fasttest/requirements.txt @@ -27,7 +27,7 @@ pandas==1.5.3 pip==24.1.1 pipdeptree==2.23.0 pyparsing==2.4.7 -python-apt==2.4.0+ubuntu3 +# python-apt==2.4.0+ubuntu3 python-dateutil==2.9.0.post0 pytz==2024.1 requests==2.32.3 diff --git a/docker/test/fuzzer/requirements.txt b/docker/test/fuzzer/requirements.txt index 3dce93e023b..e51da509b27 100644 --- a/docker/test/fuzzer/requirements.txt +++ b/docker/test/fuzzer/requirements.txt @@ -18,7 +18,7 @@ pip==24.1.1 pipdeptree==2.23.0 PyJWT==2.3.0 pyparsing==2.4.7 -python-apt==2.4.0+ubuntu3 +# python-apt==2.4.0+ubuntu3 SecretStorage==3.3.1 setuptools==59.6.0 six==1.16.0 diff --git a/docker/test/fuzzer/run-fuzzer.sh b/docker/test/fuzzer/run-fuzzer.sh index ae1b9e94bed..fd3efe4e174 100755 --- a/docker/test/fuzzer/run-fuzzer.sh +++ b/docker/test/fuzzer/run-fuzzer.sh @@ -17,7 +17,7 @@ stage=${stage:-} script_dir="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )" echo "$script_dir" repo_dir=ch -BINARY_TO_DOWNLOAD=${BINARY_TO_DOWNLOAD:="clang-18_debug_none_unsplitted_disable_False_binary"} +BINARY_TO_DOWNLOAD=${BINARY_TO_DOWNLOAD:="clang-19_debug_none_unsplitted_disable_False_binary"} BINARY_URL_TO_DOWNLOAD=${BINARY_URL_TO_DOWNLOAD:="https://clickhouse-builds.s3.amazonaws.com/$PR_TO_TEST/$SHA_TO_TEST/clickhouse_build_check/$BINARY_TO_DOWNLOAD/clickhouse"} function git_clone_with_retry diff --git a/docker/test/integration/base/requirements.txt b/docker/test/integration/base/requirements.txt index d195d8deaf6..a8ef35cd13f 100644 --- a/docker/test/integration/base/requirements.txt +++ b/docker/test/integration/base/requirements.txt @@ -17,7 +17,7 @@ pipdeptree==2.23.0 pycurl==7.45.3 PyJWT==2.3.0 pyparsing==2.4.7 -python-apt==2.4.0+ubuntu3 +# python-apt==2.4.0+ubuntu3 SecretStorage==3.3.1 setuptools==59.6.0 six==1.16.0 diff --git a/docker/test/keeper-jepsen/run.sh b/docker/test/keeper-jepsen/run.sh index 444f3cd0de7..a36fa5ca0a1 100644 --- a/docker/test/keeper-jepsen/run.sh +++ b/docker/test/keeper-jepsen/run.sh @@ -2,7 +2,7 @@ set -euo pipefail -CLICKHOUSE_PACKAGE=${CLICKHOUSE_PACKAGE:="https://clickhouse-builds.s3.amazonaws.com/$PR_TO_TEST/$SHA_TO_TEST/clickhouse_build_check/clang-18_relwithdebuginfo_none_unsplitted_disable_False_binary/clickhouse"} +CLICKHOUSE_PACKAGE=${CLICKHOUSE_PACKAGE:="https://clickhouse-builds.s3.amazonaws.com/$PR_TO_TEST/$SHA_TO_TEST/clickhouse_build_check/clang-19_relwithdebuginfo_none_unsplitted_disable_False_binary/clickhouse"} CLICKHOUSE_REPO_PATH=${CLICKHOUSE_REPO_PATH:=""} diff --git a/docker/test/libfuzzer/requirements.txt b/docker/test/libfuzzer/requirements.txt index 3dce93e023b..e51da509b27 100644 --- a/docker/test/libfuzzer/requirements.txt +++ b/docker/test/libfuzzer/requirements.txt @@ -18,7 +18,7 @@ pip==24.1.1 pipdeptree==2.23.0 PyJWT==2.3.0 pyparsing==2.4.7 -python-apt==2.4.0+ubuntu3 +# python-apt==2.4.0+ubuntu3 SecretStorage==3.3.1 setuptools==59.6.0 six==1.16.0 diff --git a/docker/test/performance-comparison/requirements.txt b/docker/test/performance-comparison/requirements.txt index 932527cc022..2db604d6829 100644 --- a/docker/test/performance-comparison/requirements.txt +++ b/docker/test/performance-comparison/requirements.txt @@ -19,7 +19,7 @@ pipdeptree==2.23.0 Pygments==2.11.2 PyJWT==2.3.0 pyparsing==2.4.7 -python-apt==2.4.0+ubuntu3 +# python-apt==2.4.0+ubuntu3 pytz==2023.4 PyYAML==6.0.1 scipy==1.12.0 diff --git a/docker/test/server-jepsen/run.sh b/docker/test/server-jepsen/run.sh index 0d3372b43be..07048686048 100644 --- a/docker/test/server-jepsen/run.sh +++ b/docker/test/server-jepsen/run.sh @@ -2,7 +2,7 @@ set -euo pipefail -CLICKHOUSE_PACKAGE=${CLICKHOUSE_PACKAGE:="https://clickhouse-builds.s3.amazonaws.com/$PR_TO_TEST/$SHA_TO_TEST/clickhouse_build_check/clang-18_relwithdebuginfo_none_unsplitted_disable_False_binary/clickhouse"} +CLICKHOUSE_PACKAGE=${CLICKHOUSE_PACKAGE:="https://clickhouse-builds.s3.amazonaws.com/$PR_TO_TEST/$SHA_TO_TEST/clickhouse_build_check/clang-19_relwithdebuginfo_none_unsplitted_disable_False_binary/clickhouse"} CLICKHOUSE_REPO_PATH=${CLICKHOUSE_REPO_PATH:=""} diff --git a/docker/test/sqllogic/requirements.txt b/docker/test/sqllogic/requirements.txt index abc0a368659..d5091aaa01b 100644 --- a/docker/test/sqllogic/requirements.txt +++ b/docker/test/sqllogic/requirements.txt @@ -20,7 +20,7 @@ pipdeptree==2.23.0 PyJWT==2.3.0 pyodbc==5.1.0 pyparsing==2.4.7 -python-apt==2.4.0+ubuntu3 +# python-apt==2.4.0+ubuntu3 SecretStorage==3.3.1 setuptools==59.6.0 six==1.16.0 diff --git a/docker/test/sqltest/requirements.txt b/docker/test/sqltest/requirements.txt index 4a0ae3edbac..e2fe5b34463 100644 --- a/docker/test/sqltest/requirements.txt +++ b/docker/test/sqltest/requirements.txt @@ -17,7 +17,7 @@ pip==24.1.1 pipdeptree==2.23.0 PyJWT==2.3.0 pyparsing==2.4.7 -python-apt==2.4.0+ubuntu3 +# python-apt==2.4.0+ubuntu3 pytz==2024.1 PyYAML==6.0.1 SecretStorage==3.3.1 diff --git a/docker/test/sqltest/run.sh b/docker/test/sqltest/run.sh index 7edc1341d7d..175e8f2aa49 100755 --- a/docker/test/sqltest/run.sh +++ b/docker/test/sqltest/run.sh @@ -6,7 +6,7 @@ set -e set -u set -o pipefail -BINARY_TO_DOWNLOAD=${BINARY_TO_DOWNLOAD:="clang-18_debug_none_unsplitted_disable_False_binary"} +BINARY_TO_DOWNLOAD=${BINARY_TO_DOWNLOAD:="clang-19_debug_none_unsplitted_disable_False_binary"} BINARY_URL_TO_DOWNLOAD=${BINARY_URL_TO_DOWNLOAD:="https://clickhouse-builds.s3.amazonaws.com/$PR_TO_TEST/$SHA_TO_TEST/clickhouse_build_check/$BINARY_TO_DOWNLOAD/clickhouse"} function wget_with_retry diff --git a/docker/test/stateless/requirements.txt b/docker/test/stateless/requirements.txt index 74860d5fec3..af653835614 100644 --- a/docker/test/stateless/requirements.txt +++ b/docker/test/stateless/requirements.txt @@ -34,7 +34,7 @@ pyarrow==15.0.0 pyasn1==0.4.8 PyJWT==2.3.0 pyparsing==2.4.7 -python-apt==2.4.0+ubuntu3 +# python-apt==2.4.0+ubuntu3 python-dateutil==2.8.1 pytz==2024.1 PyYAML==6.0.1 diff --git a/docker/test/util/Dockerfile b/docker/test/util/Dockerfile index 6b9fb94a4c6..fe762060710 100644 --- a/docker/test/util/Dockerfile +++ b/docker/test/util/Dockerfile @@ -5,7 +5,7 @@ FROM ubuntu:22.04 ARG apt_archive="http://archive.ubuntu.com" RUN sed -i "s|http://archive.ubuntu.com|$apt_archive|g" /etc/apt/sources.list -ENV DEBIAN_FRONTEND=noninteractive LLVM_VERSION=18 +ENV DEBIAN_FRONTEND=noninteractive LLVM_VERSION=19 RUN apt-get update \ && apt-get install \ diff --git a/docs/en/development/build-cross-loongarch.md b/docs/en/development/build-cross-loongarch.md index 9ffe97d3da7..b7740b5f62a 100644 --- a/docs/en/development/build-cross-loongarch.md +++ b/docs/en/development/build-cross-loongarch.md @@ -11,7 +11,7 @@ This is for the case when you have Linux machine and want to use it to build `cl The cross-build for LoongArch64 is based on the [Build instructions](../development/build.md), follow them first. -## Install Clang-18 +## Install Clang-19 Follow the instructions from https://apt.llvm.org/ for your Ubuntu or Debian setup or do ``` @@ -21,11 +21,11 @@ sudo bash -c "$(wget -O - https://apt.llvm.org/llvm.sh)" ## Build ClickHouse {#build-clickhouse} -The llvm version required for building must be greater than or equal to 18.1.0. +The llvm version required for building must be greater than or equal to 19.1.0. ``` bash cd ClickHouse mkdir build-loongarch64 -CC=clang-18 CXX=clang++-18 cmake . -Bbuild-loongarch64 -G Ninja -DCMAKE_TOOLCHAIN_FILE=cmake/linux/toolchain-loongarch64.cmake +CC=clang-19 CXX=clang++-19 cmake . -Bbuild-loongarch64 -G Ninja -DCMAKE_TOOLCHAIN_FILE=cmake/linux/toolchain-loongarch64.cmake ninja -C build-loongarch64 ``` diff --git a/docs/en/development/build-cross-osx.md b/docs/en/development/build-cross-osx.md index 66c6e2c6912..0491b9dfbe9 100644 --- a/docs/en/development/build-cross-osx.md +++ b/docs/en/development/build-cross-osx.md @@ -13,14 +13,14 @@ The cross-build for macOS is based on the [Build instructions](../development/bu The following sections provide a walk-through for building ClickHouse for `x86_64` macOS. If you’re targeting ARM architecture, simply substitute all occurrences of `x86_64` with `aarch64`. For example, replace `x86_64-apple-darwin` with `aarch64-apple-darwin` throughout the steps. -## Install clang-18 +## Install clang-19 Follow the instructions from https://apt.llvm.org/ for your Ubuntu or Debian setup. For example the commands for Bionic are like: ``` bash sudo echo "deb [trusted=yes] http://apt.llvm.org/bionic/ llvm-toolchain-bionic-17 main" >> /etc/apt/sources.list -sudo apt-get install clang-18 +sudo apt-get install clang-19 ``` ## Install Cross-Compilation Toolset {#install-cross-compilation-toolset} @@ -59,7 +59,7 @@ curl -L 'https://github.com/phracker/MacOSX-SDKs/releases/download/11.3/MacOSX11 cd ClickHouse mkdir build-darwin cd build-darwin -CC=clang-18 CXX=clang++-18 cmake -DCMAKE_AR:FILEPATH=${CCTOOLS}/bin/x86_64-apple-darwin-ar -DCMAKE_INSTALL_NAME_TOOL=${CCTOOLS}/bin/x86_64-apple-darwin-install_name_tool -DCMAKE_RANLIB:FILEPATH=${CCTOOLS}/bin/x86_64-apple-darwin-ranlib -DLINKER_NAME=${CCTOOLS}/bin/x86_64-apple-darwin-ld -DCMAKE_TOOLCHAIN_FILE=cmake/darwin/toolchain-x86_64.cmake .. +CC=clang-19 CXX=clang++-19 cmake -DCMAKE_AR:FILEPATH=${CCTOOLS}/bin/x86_64-apple-darwin-ar -DCMAKE_INSTALL_NAME_TOOL=${CCTOOLS}/bin/x86_64-apple-darwin-install_name_tool -DCMAKE_RANLIB:FILEPATH=${CCTOOLS}/bin/x86_64-apple-darwin-ranlib -DLINKER_NAME=${CCTOOLS}/bin/x86_64-apple-darwin-ld -DCMAKE_TOOLCHAIN_FILE=cmake/darwin/toolchain-x86_64.cmake .. ninja ``` diff --git a/docs/en/development/build-cross-riscv.md b/docs/en/development/build-cross-riscv.md index dd97b6081e8..b37ef417eb4 100644 --- a/docs/en/development/build-cross-riscv.md +++ b/docs/en/development/build-cross-riscv.md @@ -11,7 +11,7 @@ This is for the case when you have Linux machine and want to use it to build `cl The cross-build for RISC-V 64 is based on the [Build instructions](../development/build.md), follow them first. -## Install Clang-18 +## Install Clang-19 Follow the instructions from https://apt.llvm.org/ for your Ubuntu or Debian setup or do ``` @@ -23,7 +23,7 @@ sudo bash -c "$(wget -O - https://apt.llvm.org/llvm.sh)" ``` bash cd ClickHouse mkdir build-riscv64 -CC=clang-18 CXX=clang++-18 cmake . -Bbuild-riscv64 -G Ninja -DCMAKE_TOOLCHAIN_FILE=cmake/linux/toolchain-riscv64.cmake -DGLIBC_COMPATIBILITY=OFF -DENABLE_LDAP=OFF -DOPENSSL_NO_ASM=ON -DENABLE_JEMALLOC=ON -DENABLE_PARQUET=OFF -DENABLE_GRPC=OFF -DENABLE_HDFS=OFF -DENABLE_MYSQL=OFF +CC=clang-19 CXX=clang++-19 cmake . -Bbuild-riscv64 -G Ninja -DCMAKE_TOOLCHAIN_FILE=cmake/linux/toolchain-riscv64.cmake -DGLIBC_COMPATIBILITY=OFF -DENABLE_LDAP=OFF -DOPENSSL_NO_ASM=ON -DENABLE_JEMALLOC=ON -DENABLE_PARQUET=OFF -DENABLE_GRPC=OFF -DENABLE_HDFS=OFF -DENABLE_MYSQL=OFF ninja -C build-riscv64 ``` diff --git a/docs/en/development/build.md b/docs/en/development/build.md index 227a4d62484..d9fcdec671a 100644 --- a/docs/en/development/build.md +++ b/docs/en/development/build.md @@ -54,8 +54,8 @@ to see what version you have installed before setting this environment variable. ::: ``` bash -export CC=clang-18 -export CXX=clang++-18 +export CC=clang-19 +export CXX=clang++-19 ``` ### Install Rust compiler @@ -109,7 +109,7 @@ The build requires the following components: - Git (used to checkout the sources, not needed for the build) - CMake 3.20 or newer -- Compiler: clang-18 or newer +- Compiler: clang-19 or newer - Linker: lld-17 or newer - Ninja - Yasm diff --git a/docs/en/development/continuous-integration.md b/docs/en/development/continuous-integration.md index ef487d16d73..711f4baafd5 100644 --- a/docs/en/development/continuous-integration.md +++ b/docs/en/development/continuous-integration.md @@ -156,7 +156,7 @@ Builds ClickHouse in various configurations for use in further steps. You have t ### Report Details -- **Compiler**: `clang-18`, optionally with the name of a target platform +- **Compiler**: `clang-19`, optionally with the name of a target platform - **Build type**: `Debug` or `RelWithDebInfo` (cmake). - **Sanitizer**: `none` (without sanitizers), `address` (ASan), `memory` (MSan), `undefined` (UBSan), or `thread` (TSan). - **Status**: `success` or `fail` @@ -180,7 +180,7 @@ Performs static analysis and code style checks using `clang-tidy`. The report is There is a convenience `packager` script that runs the clang-tidy build in docker ```sh mkdir build_tidy -./docker/packager/packager --output-dir=./build_tidy --package-type=binary --compiler=clang-18 --debug-build --clang-tidy +./docker/packager/packager --output-dir=./build_tidy --package-type=binary --compiler=clang-19 --debug-build --clang-tidy ``` diff --git a/docs/en/development/developer-instruction.md b/docs/en/development/developer-instruction.md index 64b6cd355ee..3d7eab24c2e 100644 --- a/docs/en/development/developer-instruction.md +++ b/docs/en/development/developer-instruction.md @@ -121,7 +121,7 @@ While inside the `build` directory, configure your build by running CMake. Befor export CC=clang CXX=clang++ cmake .. -If you installed clang using the automatic installation script above, also specify the version of clang installed in the first command, e.g. `export CC=clang-18 CXX=clang++-18`. The clang version will be in the script output. +If you installed clang using the automatic installation script above, also specify the version of clang installed in the first command, e.g. `export CC=clang-19 CXX=clang++-19`. The clang version will be in the script output. The `CC` variable specifies the compiler for C (short for C Compiler), and `CXX` variable instructs which C++ compiler is to be used for building. diff --git a/programs/disks/CommandGetCurrentDiskAndPath.cpp b/programs/disks/CommandGetCurrentDiskAndPath.cpp index 15f8ef5aae8..7836723045d 100644 --- a/programs/disks/CommandGetCurrentDiskAndPath.cpp +++ b/programs/disks/CommandGetCurrentDiskAndPath.cpp @@ -18,7 +18,7 @@ public: void executeImpl(const CommandLineOptions &, DisksClient & client) override { - auto disk = client.getCurrentDiskWithPath(); + const auto & disk = client.getCurrentDiskWithPath(); std::cout << "Disk: " << disk.getDisk()->getName() << "\nPath: " << disk.getCurrentPath() << std::endl; } }; diff --git a/programs/disks/CommandLink.cpp b/programs/disks/CommandLink.cpp index 11c196cafc5..e95137437df 100644 --- a/programs/disks/CommandLink.cpp +++ b/programs/disks/CommandLink.cpp @@ -20,7 +20,7 @@ public: void executeImpl(const CommandLineOptions & options, DisksClient & client) override { - auto disk = client.getCurrentDiskWithPath(); + const auto & disk = client.getCurrentDiskWithPath(); const String & path_from = disk.getRelativeFromRoot(getValueFromCommandLineOptionsThrow(options, "path-from")); const String & path_to = disk.getRelativeFromRoot(getValueFromCommandLineOptionsThrow(options, "path-to")); diff --git a/programs/disks/CommandList.cpp b/programs/disks/CommandList.cpp index 00ac5c5143b..9c8c8101938 100644 --- a/programs/disks/CommandList.cpp +++ b/programs/disks/CommandList.cpp @@ -23,7 +23,7 @@ public: { bool recursive = options.count("recursive"); bool show_hidden = options.count("all"); - auto disk = client.getCurrentDiskWithPath(); + const auto & disk = client.getCurrentDiskWithPath(); String path = getValueFromCommandLineOptionsWithDefault(options, "path", "."); if (recursive) diff --git a/programs/disks/CommandMkDir.cpp b/programs/disks/CommandMkDir.cpp index c6222f326d4..277e81cbf13 100644 --- a/programs/disks/CommandMkDir.cpp +++ b/programs/disks/CommandMkDir.cpp @@ -21,7 +21,7 @@ public: void executeImpl(const CommandLineOptions & options, DisksClient & client) override { bool recursive = options.count("parents"); - auto disk = client.getCurrentDiskWithPath(); + const auto & disk = client.getCurrentDiskWithPath(); String path = disk.getRelativeFromRoot(getValueFromCommandLineOptionsThrow(options, "path")); diff --git a/programs/disks/CommandRead.cpp b/programs/disks/CommandRead.cpp index f4504504752..69445fc09eb 100644 --- a/programs/disks/CommandRead.cpp +++ b/programs/disks/CommandRead.cpp @@ -22,7 +22,7 @@ public: void executeImpl(const CommandLineOptions & options, DisksClient & client) override { - auto disk = client.getCurrentDiskWithPath(); + const auto & disk = client.getCurrentDiskWithPath(); String path_from = disk.getRelativeFromRoot(getValueFromCommandLineOptionsThrow(options, "path-from")); std::optional path_to = getValueFromCommandLineOptionsWithOptional(options, "path-to"); diff --git a/programs/disks/CommandRemove.cpp b/programs/disks/CommandRemove.cpp index e6a85d4233d..2f8f6be8614 100644 --- a/programs/disks/CommandRemove.cpp +++ b/programs/disks/CommandRemove.cpp @@ -25,7 +25,7 @@ public: void executeImpl(const CommandLineOptions & options, DisksClient & client) override { - auto disk = client.getCurrentDiskWithPath(); + const auto & disk = client.getCurrentDiskWithPath(); const String & path = disk.getRelativeFromRoot(getValueFromCommandLineOptionsThrow(options, "path")); bool recursive = options.count("recursive"); if (disk.getDisk()->existsDirectory(path)) diff --git a/programs/disks/CommandTouch.cpp b/programs/disks/CommandTouch.cpp index c0bdb64cf9e..28f3e07aad4 100644 --- a/programs/disks/CommandTouch.cpp +++ b/programs/disks/CommandTouch.cpp @@ -20,7 +20,7 @@ public: void executeImpl(const CommandLineOptions & options, DisksClient & client) override { - auto disk = client.getCurrentDiskWithPath(); + const auto & disk = client.getCurrentDiskWithPath(); String path = getValueFromCommandLineOptionsThrow(options, "path"); disk.getDisk()->createFile(disk.getRelativeFromRoot(path)); diff --git a/programs/disks/DisksApp.cpp b/programs/disks/DisksApp.cpp index d6541e99288..ff0e0f52b8e 100644 --- a/programs/disks/DisksApp.cpp +++ b/programs/disks/DisksApp.cpp @@ -129,7 +129,7 @@ std::vector DisksApp::getCompletions(const String & prefix) const } if (arguments.size() == 1) { - String command_prefix = arguments[0]; + const String & command_prefix = arguments[0]; return getCommandsToComplete(command_prefix); } diff --git a/programs/git-import/git-import.cpp b/programs/git-import/git-import.cpp index 0a1a2e99dbc..b3caa0105d9 100644 --- a/programs/git-import/git-import.cpp +++ b/programs/git-import/git-import.cpp @@ -243,7 +243,7 @@ enum class FileChangeType : uint8_t Type, }; -void writeText(FileChangeType type, WriteBuffer & out) +static void writeText(FileChangeType type, WriteBuffer & out) { switch (type) { @@ -299,7 +299,7 @@ enum class LineType : uint8_t Code, }; -void writeText(LineType type, WriteBuffer & out) +static void writeText(LineType type, WriteBuffer & out) { switch (type) { @@ -429,7 +429,7 @@ using CommitDiff = std::map; /** Parsing helpers */ -void skipUntilWhitespace(ReadBuffer & buf) +static void skipUntilWhitespace(ReadBuffer & buf) { while (!buf.eof()) { @@ -444,7 +444,7 @@ void skipUntilWhitespace(ReadBuffer & buf) } } -void skipUntilNextLine(ReadBuffer & buf) +static void skipUntilNextLine(ReadBuffer & buf) { while (!buf.eof()) { @@ -462,7 +462,7 @@ void skipUntilNextLine(ReadBuffer & buf) } } -void readStringUntilNextLine(std::string & s, ReadBuffer & buf) +static void readStringUntilNextLine(std::string & s, ReadBuffer & buf) { s.clear(); while (!buf.eof()) @@ -680,7 +680,7 @@ using Snapshot = std::map; * - the author, time and commit of the previous change to every found line (blame). * And update the snapshot. */ -void updateSnapshot(Snapshot & snapshot, const Commit & commit, CommitDiff & file_changes) +static void updateSnapshot(Snapshot & snapshot, const Commit & commit, CommitDiff & file_changes) { /// Renames and copies. for (auto & elem : file_changes) @@ -755,7 +755,7 @@ void updateSnapshot(Snapshot & snapshot, const Commit & commit, CommitDiff & fil */ using DiffHashes = std::unordered_set; -UInt128 diffHash(const CommitDiff & file_changes) +static UInt128 diffHash(const CommitDiff & file_changes) { SipHash hasher; @@ -791,7 +791,7 @@ UInt128 diffHash(const CommitDiff & file_changes) * :100644 100644 828dedf6b5 828dedf6b5 R100 dbms/src/Functions/GeoUtils.h dbms/src/Functions/PolygonUtils.h * according to the output of 'git show --raw' */ -void processFileChanges( +static void processFileChanges( ReadBuffer & in, const Options & options, Commit & commit, @@ -883,7 +883,7 @@ void processFileChanges( * - we expect some specific format of the diff; but it may actually depend on git config; * - non-ASCII file names are not processed correctly (they will not be found and will be ignored). */ -void processDiffs( +static void processDiffs( ReadBuffer & in, std::optional size_limit, Commit & commit, @@ -1055,7 +1055,7 @@ void processDiffs( /** Process the "git show" result for a single commit. Append the result to tables. */ -void processCommit( +static void processCommit( ReadBuffer & in, const Options & options, size_t commit_num, @@ -1123,7 +1123,7 @@ void processCommit( /** Runs child process and allows to read the result. * Multiple processes can be run for parallel processing. */ -auto gitShow(const std::string & hash) +static auto gitShow(const std::string & hash) { std::string command = fmt::format( "git show --raw --pretty='format:%ct%x00%aN%x00%P%x00%s%x00' --patch --unified=0 {}", @@ -1135,7 +1135,7 @@ auto gitShow(const std::string & hash) /** Obtain the list of commits and process them. */ -void processLog(const Options & options) +static void processLog(const Options & options) { ResultWriter result; diff --git a/programs/keeper/keeper_main.cpp b/programs/keeper/keeper_main.cpp index a240f9699f2..d20552e6b81 100644 --- a/programs/keeper/keeper_main.cpp +++ b/programs/keeper/keeper_main.cpp @@ -63,7 +63,7 @@ int printHelp(int, char **) } -bool isClickhouseApp(std::string_view app_suffix, std::vector & argv) +static bool isClickhouseApp(std::string_view app_suffix, std::vector & argv) { /// Use app if the first arg 'app' is passed (the arg should be quietly removed) if (argv.size() >= 2) @@ -132,7 +132,7 @@ __attribute__((constructor(0))) void init_je_malloc_message() { malloc_message = /// /// extern bool inside_main; /// class C { C() { assert(inside_main); } }; -bool inside_main = false; +static bool inside_main = false; int main(int argc_, char ** argv_) { diff --git a/programs/obfuscator/Obfuscator.cpp b/programs/obfuscator/Obfuscator.cpp index 8aa91ebaae0..1d68f6e3ae2 100644 --- a/programs/obfuscator/Obfuscator.cpp +++ b/programs/obfuscator/Obfuscator.cpp @@ -136,7 +136,7 @@ using ModelPtr = std::unique_ptr; template -UInt64 hash(Ts... xs) +static UInt64 hash(Ts... xs) { SipHash hash; (hash.update(xs), ...); @@ -271,7 +271,7 @@ public: /// Pseudorandom permutation of mantissa. template -Float transformFloatMantissa(Float x, UInt64 seed) +static Float transformFloatMantissa(Float x, UInt64 seed) { using UInt = std::conditional_t, UInt32, UInt64>; constexpr size_t mantissa_num_bits = std::is_same_v ? 23 : 52; diff --git a/programs/static-files-disk-uploader/static-files-disk-uploader.cpp b/programs/static-files-disk-uploader/static-files-disk-uploader.cpp index 590e0364040..356cb2ececb 100644 --- a/programs/static-files-disk-uploader/static-files-disk-uploader.cpp +++ b/programs/static-files-disk-uploader/static-files-disk-uploader.cpp @@ -32,7 +32,7 @@ namespace ErrorCodes * If test-mode option is added, files will be put by given url via PUT request. */ -void processFile(const fs::path & file_path, const fs::path & dst_path, bool test_mode, bool link, WriteBuffer & metadata_buf) +static void processFile(const fs::path & file_path, const fs::path & dst_path, bool test_mode, bool link, WriteBuffer & metadata_buf) { String remote_path; RE2::FullMatch(file_path.string(), EXTRACT_PATH_PATTERN, &remote_path); @@ -77,7 +77,7 @@ void processFile(const fs::path & file_path, const fs::path & dst_path, bool tes } -void processTableFiles(const fs::path & data_path, fs::path dst_path, bool test_mode, bool link) +static void processTableFiles(const fs::path & data_path, fs::path dst_path, bool test_mode, bool link) { std::cerr << "Data path: " << data_path << ", destination path: " << dst_path << std::endl; diff --git a/programs/su/su.cpp b/programs/su/su.cpp index 40242d0687f..322d71eefa6 100644 --- a/programs/su/su.cpp +++ b/programs/su/su.cpp @@ -40,7 +40,7 @@ namespace ErrorCodes extern const int SYSTEM_ERROR; } -void setUserAndGroup(std::string arg_uid, std::string arg_gid) +static void setUserAndGroup(std::string arg_uid, std::string arg_gid) { static constexpr size_t buf_size = 16384; /// Linux man page says it is enough. Nevertheless, we will check if it's not enough and throw. std::unique_ptr buf(new char[buf_size]); diff --git a/src/Access/AccessEntityIO.cpp b/src/Access/AccessEntityIO.cpp index cc1b7eee807..5b77574d183 100644 --- a/src/Access/AccessEntityIO.cpp +++ b/src/Access/AccessEntityIO.cpp @@ -53,7 +53,7 @@ String serializeAccessEntity(const IAccessEntity & entity) return buf.str(); } -AccessEntityPtr deserializeAccessEntityImpl(const String & definition) +static AccessEntityPtr deserializeAccessEntityImpl(const String & definition) { ASTs queries; ParserAttachAccessEntity parser; diff --git a/src/Access/AuthenticationData.cpp b/src/Access/AuthenticationData.cpp index 37a4e356af8..f20d46eea18 100644 --- a/src/Access/AuthenticationData.cpp +++ b/src/Access/AuthenticationData.cpp @@ -80,7 +80,7 @@ AuthenticationData::Digest AuthenticationData::Util::encodeBcrypt(std::string_vi if (ret != 0) throw Exception(ErrorCodes::LOGICAL_ERROR, "BCrypt library failed: bcrypt_gensalt returned {}", ret); - ret = bcrypt_hashpw(text.data(), salt, reinterpret_cast(hash.data())); + ret = bcrypt_hashpw(text.data(), salt, reinterpret_cast(hash.data())); /// NOLINT(bugprone-suspicious-stringview-data-usage) if (ret != 0) throw Exception(ErrorCodes::LOGICAL_ERROR, "BCrypt library failed: bcrypt_hashpw returned {}", ret); @@ -95,7 +95,7 @@ AuthenticationData::Digest AuthenticationData::Util::encodeBcrypt(std::string_vi bool AuthenticationData::Util::checkPasswordBcrypt(std::string_view password [[maybe_unused]], const Digest & password_bcrypt [[maybe_unused]]) { #if USE_BCRYPT - int ret = bcrypt_checkpw(password.data(), reinterpret_cast(password_bcrypt.data())); + int ret = bcrypt_checkpw(password.data(), reinterpret_cast(password_bcrypt.data())); /// NOLINT(bugprone-suspicious-stringview-data-usage) /// Before 24.6 we didn't validate hashes on creation, so it could be that the stored hash is invalid /// and it could not be decoded by the library if (ret == -1) diff --git a/src/Access/ExternalAuthenticators.cpp b/src/Access/ExternalAuthenticators.cpp index 77812ac5eb5..91d0ff7ff0f 100644 --- a/src/Access/ExternalAuthenticators.cpp +++ b/src/Access/ExternalAuthenticators.cpp @@ -371,7 +371,7 @@ void ExternalAuthenticators::setConfiguration(const Poco::Util::AbstractConfigur } } -UInt128 computeParamsHash(const LDAPClient::Params & params, const LDAPClient::RoleSearchParamsList * role_search_params) +static UInt128 computeParamsHash(const LDAPClient::Params & params, const LDAPClient::RoleSearchParamsList * role_search_params) { SipHash hash; params.updateHash(hash); diff --git a/src/Access/SettingsProfileElement.cpp b/src/Access/SettingsProfileElement.cpp index 1de61771f93..4d7fc56a806 100644 --- a/src/Access/SettingsProfileElement.cpp +++ b/src/Access/SettingsProfileElement.cpp @@ -36,7 +36,7 @@ void SettingsProfileElement::init(const ASTSettingsProfileElement & ast, const A if (id_mode) return parse(name_); assert(access_control); - return access_control->getID(name_); + return access_control->getID(name_); /// NOLINT(clang-analyzer-core.CallAndMessage) }; if (!ast.parent_profile.empty()) diff --git a/src/AggregateFunctions/AggregateFunctionDistinctDynamicTypes.cpp b/src/AggregateFunctions/AggregateFunctionDistinctDynamicTypes.cpp index 57f7aecd316..75b817fcfa3 100644 --- a/src/AggregateFunctions/AggregateFunctionDistinctDynamicTypes.cpp +++ b/src/AggregateFunctions/AggregateFunctionDistinctDynamicTypes.cpp @@ -139,7 +139,7 @@ public: } }; -AggregateFunctionPtr createAggregateFunctionDistinctDynamicTypes( +static AggregateFunctionPtr createAggregateFunctionDistinctDynamicTypes( const std::string & name, const DataTypes & argument_types, const Array & parameters, const Settings *) { assertNoParameters(name, parameters); diff --git a/src/AggregateFunctions/AggregateFunctionDistinctJSONPaths.cpp b/src/AggregateFunctions/AggregateFunctionDistinctJSONPaths.cpp index 4e60e6fe60b..b4a6379adfd 100644 --- a/src/AggregateFunctions/AggregateFunctionDistinctJSONPaths.cpp +++ b/src/AggregateFunctions/AggregateFunctionDistinctJSONPaths.cpp @@ -327,7 +327,7 @@ private: }; template -AggregateFunctionPtr createAggregateFunctionDistinctJSONPathsAndTypes( +static AggregateFunctionPtr createAggregateFunctionDistinctJSONPathsAndTypes( const std::string & name, const DataTypes & argument_types, const Array & parameters, const Settings *) { assertNoParameters(name, parameters); diff --git a/src/AggregateFunctions/AggregateFunctionFlameGraph.cpp b/src/AggregateFunctions/AggregateFunctionFlameGraph.cpp index a0b5b798a45..069b832602a 100644 --- a/src/AggregateFunctions/AggregateFunctionFlameGraph.cpp +++ b/src/AggregateFunctions/AggregateFunctionFlameGraph.cpp @@ -217,7 +217,7 @@ static void fillColumn(DB::PaddedPODArray & chars, DB::PaddedPODArray & chars, DB::PaddedPODArray & offsets) @@ -630,7 +630,7 @@ static void check(const std::string & name, const DataTypes & argument_types, co name, argument_types[2]->getName()); } -AggregateFunctionPtr createAggregateFunctionFlameGraph(const std::string & name, const DataTypes & argument_types, const Array & params, const Settings * settings) +static AggregateFunctionPtr createAggregateFunctionFlameGraph(const std::string & name, const DataTypes & argument_types, const Array & params, const Settings * settings) { if (!(*settings)[Setting::allow_introspection_functions]) throw Exception(ErrorCodes::FUNCTION_NOT_ALLOWED, diff --git a/src/AggregateFunctions/AggregateFunctionGroupArray.cpp b/src/AggregateFunctions/AggregateFunctionGroupArray.cpp index 7dc5a4b86b3..7e711fcfc4a 100644 --- a/src/AggregateFunctions/AggregateFunctionGroupArray.cpp +++ b/src/AggregateFunctions/AggregateFunctionGroupArray.cpp @@ -95,7 +95,7 @@ struct GroupArraySamplerData /// With a large number of values, we will generate random numbers several times slower. if (lim <= static_cast(pcg32_fast::max())) - return rng() % lim; + return rng() % lim; /// NOLINT(clang-analyzer-core.DivideZero) return (static_cast(rng()) * (static_cast(pcg32::max()) + 1ULL) + static_cast(rng())) % lim; } @@ -494,7 +494,7 @@ class GroupArrayGeneralImpl final { static constexpr bool limit_num_elems = Trait::has_limit; using Data = GroupArrayGeneralData; - static Data & data(AggregateDataPtr __restrict place) { return *reinterpret_cast(place); } + static Data & data(AggregateDataPtr __restrict place) { return *reinterpret_cast(place); } /// NOLINT(readability-non-const-parameter) static const Data & data(ConstAggregateDataPtr __restrict place) { return *reinterpret_cast(place); } DataTypePtr & data_type; diff --git a/src/AggregateFunctions/AggregateFunctionSequenceNextNode.cpp b/src/AggregateFunctions/AggregateFunctionSequenceNextNode.cpp index 94dff52ade3..c7e2cf27fa8 100644 --- a/src/AggregateFunctions/AggregateFunctionSequenceNextNode.cpp +++ b/src/AggregateFunctions/AggregateFunctionSequenceNextNode.cpp @@ -179,7 +179,7 @@ class SequenceNextNodeImpl final using Self = SequenceNextNodeImpl; using Data = SequenceNextNodeGeneralData; - static Data & data(AggregateDataPtr __restrict place) { return *reinterpret_cast(place); } + static Data & data(AggregateDataPtr __restrict place) { return *reinterpret_cast(place); } /// NOLINT(readability-non-const-parameter) static const Data & data(ConstAggregateDataPtr __restrict place) { return *reinterpret_cast(place); } static constexpr size_t base_cond_column_idx = 2; diff --git a/src/AggregateFunctions/IAggregateFunction.h b/src/AggregateFunctions/IAggregateFunction.h index 4f1f5388032..bd1a8f876a4 100644 --- a/src/AggregateFunctions/IAggregateFunction.h +++ b/src/AggregateFunctions/IAggregateFunction.h @@ -694,7 +694,7 @@ class IAggregateFunctionDataHelper : public IAggregateFunctionHelper protected: using Data = T; - static Data & data(AggregateDataPtr __restrict place) { return *reinterpret_cast(place); } + static Data & data(AggregateDataPtr __restrict place) { return *reinterpret_cast(place); } /// NOLINT(readability-non-const-parameter) static const Data & data(ConstAggregateDataPtr __restrict place) { return *reinterpret_cast(place); } public: diff --git a/src/AggregateFunctions/ReservoirSampler.h b/src/AggregateFunctions/ReservoirSampler.h index 870cb429fb7..6de84c15131 100644 --- a/src/AggregateFunctions/ReservoirSampler.h +++ b/src/AggregateFunctions/ReservoirSampler.h @@ -259,7 +259,7 @@ private: /// With a large number of values, we will generate random numbers several times slower. if (limit <= static_cast(pcg32_fast::max())) - return rng() % limit; + return rng() % limit; /// NOLINT(clang-analyzer-core.DivideZero) return (static_cast(rng()) * (static_cast(pcg32_fast::max()) + 1ULL) + static_cast(rng())) % limit; } diff --git a/src/Client/LineReader.cpp b/src/Client/LineReader.cpp index e077343ada3..69f004baf01 100644 --- a/src/Client/LineReader.cpp +++ b/src/Client/LineReader.cpp @@ -100,13 +100,13 @@ replxx::Replxx::completions_t LineReader::Suggest::getCompletions(const String & range = std::equal_range( to_search.begin(), to_search.end(), last_word, [prefix_length](std::string_view s, std::string_view prefix_searched) { - return strncasecmp(s.data(), prefix_searched.data(), prefix_length) < 0; + return strncasecmp(s.data(), prefix_searched.data(), prefix_length) < 0; /// NOLINT(bugprone-suspicious-stringview-data-usage) }); else range = std::equal_range( to_search.begin(), to_search.end(), last_word, [prefix_length](std::string_view s, std::string_view prefix_searched) { - return strncmp(s.data(), prefix_searched.data(), prefix_length) < 0; + return strncmp(s.data(), prefix_searched.data(), prefix_length) < 0; /// NOLINT(bugprone-suspicious-stringview-data-usage) }); return replxx::Replxx::completions_t(range.first, range.second); diff --git a/src/Client/TerminalKeystrokeInterceptor.cpp b/src/Client/TerminalKeystrokeInterceptor.cpp index f59bf79fcaa..0b7fd0d42c7 100644 --- a/src/Client/TerminalKeystrokeInterceptor.cpp +++ b/src/Client/TerminalKeystrokeInterceptor.cpp @@ -121,7 +121,7 @@ void TerminalKeystrokeInterceptor::runImpl(const DB::TerminalKeystrokeIntercepto if (available <= 0) return; - if (read(fd, &ch, 1) > 0) + if (read(fd, &ch, 1) > 0) /// NOLINT(clang-analyzer-unix.BlockInCriticalSection) { auto it = map.find(ch); if (it != map.end()) diff --git a/src/Columns/ColumnObject.cpp b/src/Columns/ColumnObject.cpp index e56b5d34ae0..064145c5d4f 100644 --- a/src/Columns/ColumnObject.cpp +++ b/src/Columns/ColumnObject.cpp @@ -440,7 +440,7 @@ bool ColumnObject::tryInsert(const Field & x) column->popBack(column->size() - prev_size); } - if (shared_data_paths->size() != prev_paths_size) + if (shared_data_paths->size() != prev_paths_size) /// NOLINT(clang-analyzer-core.NullDereference) shared_data_paths->popBack(shared_data_paths->size() - prev_paths_size); if (shared_data_values->size() != prev_values_size) shared_data_values->popBack(shared_data_values->size() - prev_values_size); diff --git a/src/Columns/IColumn.h b/src/Columns/IColumn.h index c77b089812e..70e20a1ad7f 100644 --- a/src/Columns/IColumn.h +++ b/src/Columns/IColumn.h @@ -815,6 +815,15 @@ bool isColumnNullableOrLowCardinalityNullable(const IColumn & column); template class IColumnHelper : public Parent { +private: + using Self = IColumnHelper; + + friend Derived; + friend class COWHelper; + + IColumnHelper() = default; + IColumnHelper(const IColumnHelper &) = default; + /// Devirtualize insertFrom. MutableColumns scatter(IColumn::ColumnIndex num_columns, const IColumn::Selector & selector) const override; diff --git a/src/Common/AsyncLoader.cpp b/src/Common/AsyncLoader.cpp index 01a08053a03..6bd765a1419 100644 --- a/src/Common/AsyncLoader.cpp +++ b/src/Common/AsyncLoader.cpp @@ -724,14 +724,14 @@ void AsyncLoader::enqueue(Info & info, const LoadJobPtr & job, std::unique_lock< // (when high-priority job A function waits for a lower-priority job B, and B never starts due to its priority) // 4) Resolve "blocked pool" deadlocks -- spawn more workers // (when job A in pool P waits for another ready job B in P, but B never starts because there are no free workers in P) -thread_local LoadJob * current_load_job = nullptr; +static thread_local LoadJob * current_load_job = nullptr; size_t currentPoolOr(size_t pool) { return current_load_job ? current_load_job->executionPool() : pool; } -bool detectWaitDependentDeadlock(const LoadJobPtr & waited) +bool static detectWaitDependentDeadlock(const LoadJobPtr & waited) { if (waited.get() == current_load_job) return true; diff --git a/src/Common/COW.h b/src/Common/COW.h index ec9cfa6dba8..eba7323660e 100644 --- a/src/Common/COW.h +++ b/src/Common/COW.h @@ -75,10 +75,15 @@ template class COW : public boost::intrusive_ref_counter { + friend Derived; + private: Derived * derived() { return static_cast(this); } const Derived * derived() const { return static_cast(this); } + COW() = default; + COW(const COW&) = default; + protected: template class mutable_ptr : public boost::intrusive_ptr /// NOLINT @@ -271,10 +276,15 @@ public: template class COWHelper : public Base { + friend Derived; + private: Derived * derived() { return static_cast(this); } const Derived * derived() const { return static_cast(this); } + COWHelper() = default; + COWHelper(const COWHelper &) = default; + public: using Ptr = typename Base::template immutable_ptr; using MutablePtr = typename Base::template mutable_ptr; diff --git a/src/Common/ColumnsHashingImpl.h b/src/Common/ColumnsHashingImpl.h index 0e013decf1f..92964b92780 100644 --- a/src/Common/ColumnsHashingImpl.h +++ b/src/Common/ColumnsHashingImpl.h @@ -161,7 +161,7 @@ public: FindResultImpl(Mapped * value_, bool found_, size_t off) : FindResultImplBase(found_), FindResultImplOffsetBase(off), value(value_) {} - Mapped & getMapped() const { return *value; } + Mapped & getMapped() const { return *value; } /// NOLINT(clang-analyzer-core.uninitialized.UndefReturn) }; template diff --git a/src/Common/DNSResolver.cpp b/src/Common/DNSResolver.cpp index 5a18ef893e0..75f3b66af95 100644 --- a/src/Common/DNSResolver.cpp +++ b/src/Common/DNSResolver.cpp @@ -377,7 +377,7 @@ String DNSResolver::getHostName() return *impl->host_name; } -static const String & cacheElemToString(const String & str) { return str; } +static String cacheElemToString(String str) { return str; } static String cacheElemToString(const Poco::Net::IPAddress & addr) { return addr.toString(); } template diff --git a/src/Common/DateLUTImpl.cpp b/src/Common/DateLUTImpl.cpp index 355d39780f2..8b7388a3664 100644 --- a/src/Common/DateLUTImpl.cpp +++ b/src/Common/DateLUTImpl.cpp @@ -22,7 +22,7 @@ namespace ErrorCodes } /// Embedded timezones. -std::string_view getTimeZone(const char * name); +std::string_view getTimeZone(const char * name); /// NOLINT(misc-use-internal-linkage) namespace diff --git a/src/Common/Dwarf.cpp b/src/Common/Dwarf.cpp index 1b31903cde7..004516a9626 100644 --- a/src/Common/Dwarf.cpp +++ b/src/Common/Dwarf.cpp @@ -256,7 +256,7 @@ uint64_t readOffset(std::string_view & sp, bool is64_bit) std::string_view readBytes(std::string_view & sp, uint64_t len) { SAFE_CHECK(len <= sp.size(), "invalid string length: {} vs. {}", len, sp.size()); - std::string_view ret(sp.data(), len); + std::string_view ret(sp.data(), len); /// NOLINT(bugprone-suspicious-stringview-data-usage) sp.remove_prefix(len); return ret; } @@ -266,7 +266,7 @@ std::string_view readNullTerminated(std::string_view & sp) { const char * p = static_cast(memchr(sp.data(), 0, sp.size())); SAFE_CHECK(p, "invalid null-terminated string"); - std::string_view ret(sp.data(), p - sp.data()); + std::string_view ret(sp.data(), p - sp.data()); /// NOLINT(bugprone-suspicious-stringview-data-usage) sp = std::string_view(p + 1, sp.size()); return ret; } @@ -442,7 +442,7 @@ bool Dwarf::Section::next(std::string_view & chunk) is64_bit = (initial_length == uint32_t(-1)); auto length = is64_bit ? read(chunk) : initial_length; SAFE_CHECK(length <= chunk.size(), "invalid DWARF section"); - chunk = std::string_view(chunk.data(), length); + chunk = std::string_view(chunk.data(), length); /// NOLINT(bugprone-suspicious-stringview-data-usage) data = std::string_view(chunk.end(), data.end() - chunk.end()); return true; } @@ -937,7 +937,7 @@ bool Dwarf::findDebugInfoOffset(uintptr_t address, std::string_view aranges, uin // Padded to a multiple of 2 addresses. // Strangely enough, this is the only place in the DWARF spec that requires // padding. - skipPadding(chunk, aranges.data(), 2 * sizeof(uintptr_t)); + skipPadding(chunk, aranges.data(), 2 * sizeof(uintptr_t)); /// NOLINT(bugprone-suspicious-stringview-data-usage) for (;;) { auto start = read(chunk); @@ -1681,7 +1681,7 @@ struct LineNumberAttribute std::variant attr_value; }; -LineNumberAttribute readLineNumberAttribute( +LineNumberAttribute static readLineNumberAttribute( bool is64_bit, std::string_view & format, std::string_view & entries, std::string_view debugStr, std::string_view debugLineStr) { uint64_t content_type_code = readULEB(format); @@ -1817,7 +1817,7 @@ void Dwarf::LineNumberVM::init() } uint64_t header_length = readOffset(data_, is64Bit_); SAFE_CHECK(header_length <= data_.size(), "invalid line number VM header length"); - std::string_view header(data_.data(), header_length); + std::string_view header(data_.data(), header_length); /// NOLINT(bugprone-suspicious-stringview-data-usage) data_ = std::string_view(header.end(), data_.end() - header.end()); minLength_ = read(header); @@ -1846,7 +1846,7 @@ void Dwarf::LineNumberVM::init() { ++v4_.includeDirectoryCount; } - v4_.includeDirectories = {tmp, header.data()}; + v4_.includeDirectories = {tmp, header.data()}; /// NOLINT(bugprone-suspicious-stringview-data-usage) tmp = header.data(); FileName fn; @@ -1855,7 +1855,7 @@ void Dwarf::LineNumberVM::init() { ++v4_.fileNameCount; } - v4_.fileNames = {tmp, header.data()}; + v4_.fileNames = {tmp, header.data()}; /// NOLINT(bugprone-suspicious-stringview-data-usage) } else if (version_ == 5) { @@ -1868,7 +1868,7 @@ void Dwarf::LineNumberVM::init() readULEB(header); // A content type code readULEB(header); // A form code using the attribute form codes } - v5_.directoryEntryFormat = {tmp, header.data()}; + v5_.directoryEntryFormat = {tmp, header.data()}; /// NOLINT(bugprone-suspicious-stringview-data-usage) v5_.directoriesCount = readULEB(header); tmp = header.data(); for (uint64_t i = 0; i < v5_.directoriesCount; i++) @@ -1879,7 +1879,7 @@ void Dwarf::LineNumberVM::init() readLineNumberAttribute(is64Bit_, format, header, debugStr_, debugLineStr_); } } - v5_.directories = {tmp, header.data()}; + v5_.directories = {tmp, header.data()}; /// NOLINT(bugprone-suspicious-stringview-data-usage) v5_.fileNameEntryFormatCount = read(header); tmp = header.data(); @@ -1890,7 +1890,7 @@ void Dwarf::LineNumberVM::init() readULEB(header); // A content type code readULEB(header); // A form code using the attribute form codes } - v5_.fileNameEntryFormat = {tmp, header.data()}; + v5_.fileNameEntryFormat = {tmp, header.data()}; /// NOLINT(bugprone-suspicious-stringview-data-usage) v5_.fileNamesCount = readULEB(header); tmp = header.data(); for (uint64_t i = 0; i < v5_.fileNamesCount; i++) @@ -1901,7 +1901,7 @@ void Dwarf::LineNumberVM::init() readLineNumberAttribute(is64Bit_, format, header, debugStr_, debugLineStr_); } } - v5_.fileNames = {tmp, header.data()}; + v5_.fileNames = {tmp, header.data()}; /// NOLINT(bugprone-suspicious-stringview-data-usage) } } diff --git a/src/Common/ErrorCodes.cpp b/src/Common/ErrorCodes.cpp index 376ccf6f297..122877c7772 100644 --- a/src/Common/ErrorCodes.cpp +++ b/src/Common/ErrorCodes.cpp @@ -650,7 +650,7 @@ namespace ErrorCodes APPLY_FOR_ERROR_CODES(M) #undef M } - } error_codes_names; + } static error_codes_names; std::string_view getName(ErrorCode error_code) { diff --git a/src/Common/Exception.cpp b/src/Common/Exception.cpp index 644c9a19738..a08dabcf703 100644 --- a/src/Common/Exception.cpp +++ b/src/Common/Exception.cpp @@ -61,7 +61,7 @@ std::function -inline Key & ALWAYS_INLINE keyHolderGetKey(Key && key) { return key; } +inline Key & ALWAYS_INLINE keyHolderGetKey(Key && key) { return key; } /// NOLINT(bugprone-return-const-ref-from-parameter) /** * Make the key persistent. keyHolderGetKey() must return the persistent key diff --git a/src/Common/IFactoryWithAliases.h b/src/Common/IFactoryWithAliases.h index d3efbb6b761..43cccce1472 100644 --- a/src/Common/IFactoryWithAliases.h +++ b/src/Common/IFactoryWithAliases.h @@ -120,7 +120,7 @@ public: auto it = case_insensitive_name_mapping.find(Poco::toLower(name)); if (it != case_insensitive_name_mapping.end()) return it->second; - return name; + return name; /// NOLINT(bugprone-return-const-ref-from-parameter) } ~IFactoryWithAliases() override = default; diff --git a/src/Common/JSONParsers/SimdJSONParser.h b/src/Common/JSONParsers/SimdJSONParser.h index db679b14f52..09190334fcf 100644 --- a/src/Common/JSONParsers/SimdJSONParser.h +++ b/src/Common/JSONParsers/SimdJSONParser.h @@ -124,7 +124,7 @@ public: // At least for long strings, the following should be fast. We could // do better by integrating the checks and the insertion. - buffer.insert(unescaped.data(), unescaped.data() + i); + buffer.insert(unescaped.data(), unescaped.data() + i); /// NOLINT(bugprone-suspicious-stringview-data-usage) // We caught a control character if we enter this loop (slow). // Note that we are do not restart from the beginning, but rather we continue // from the point where we encountered something that requires escaping. diff --git a/src/Common/OpenTelemetryTraceContext.cpp b/src/Common/OpenTelemetryTraceContext.cpp index 1a2876823d4..26fd9d37a88 100644 --- a/src/Common/OpenTelemetryTraceContext.cpp +++ b/src/Common/OpenTelemetryTraceContext.cpp @@ -24,7 +24,7 @@ namespace OpenTelemetry { /// This code can be executed inside fibers, we should use fiber local tracing context. -thread_local FiberLocal current_trace_context; +thread_local static FiberLocal current_trace_context; bool Span::addAttribute(std::string_view name, UInt64 value) noexcept { diff --git a/src/Common/PageCache.cpp b/src/Common/PageCache.cpp index a8937c99c5a..75047351e43 100644 --- a/src/Common/PageCache.cpp +++ b/src/Common/PageCache.cpp @@ -525,7 +525,7 @@ PageChunk * PageCache::getFreeChunk() PageChunk * chunk = &lru.front(); lru.erase(lru.iterator_to(*chunk)); - size_t prev_pin_count = chunk->pin_count.fetch_add(1); + size_t prev_pin_count = chunk->pin_count.fetch_add(1); /// NOLINT(clang-analyzer-deadcode.DeadStores) chassert(prev_pin_count == 0); evictChunk(chunk); @@ -537,7 +537,7 @@ void PageCache::evictChunk(PageChunk * chunk) { if (chunk->key.has_value()) { - size_t erased = chunk_by_key.erase(chunk->key.value()); + size_t erased = chunk_by_key.erase(chunk->key.value()); /// NOLINT(clang-analyzer-deadcode.DeadStores) chassert(erased); chunk->key.reset(); } diff --git a/src/Common/ProfileEvents.cpp b/src/Common/ProfileEvents.cpp index 1534aa9bd4e..ec540dcf632 100644 --- a/src/Common/ProfileEvents.cpp +++ b/src/Common/ProfileEvents.cpp @@ -922,7 +922,7 @@ namespace ProfileEvents constexpr Event END = Event(__COUNTER__); /// Global variable, initialized by zeros. -Counter global_counters_array[END] {}; +static Counter global_counters_array[END] {}; /// Initialize global counters statically Counters global_counters(global_counters_array); diff --git a/src/Common/QueryFuzzer.cpp b/src/Common/QueryFuzzer.cpp index 3439836a278..ce5baad816f 100644 --- a/src/Common/QueryFuzzer.cpp +++ b/src/Common/QueryFuzzer.cpp @@ -1287,9 +1287,9 @@ void QueryFuzzer::addTableLike(ASTPtr ast) if (table_like_map.size() > AST_FUZZER_PART_TYPE_CAP) { const auto iter = std::next(table_like.begin(), fuzz_rand() % table_like.size()); - const auto ast_del = *iter; - table_like.erase(iter); + const auto & ast_del = *iter; table_like_map.erase(ast_del.first); + table_like.erase(iter); } const auto name = ast->formatForErrorMessage(); @@ -1308,9 +1308,9 @@ void QueryFuzzer::addColumnLike(ASTPtr ast) if (column_like_map.size() > AST_FUZZER_PART_TYPE_CAP) { const auto iter = std::next(column_like.begin(), fuzz_rand() % column_like.size()); - const auto ast_del = *iter; - column_like.erase(iter); + const auto & ast_del = *iter; column_like_map.erase(ast_del.first); + column_like.erase(iter); } const auto name = ast->formatForErrorMessage(); diff --git a/src/Common/QueryProfiler.h b/src/Common/QueryProfiler.h index e3ab0b2e094..b563af1a2cf 100644 --- a/src/Common/QueryProfiler.h +++ b/src/Common/QueryProfiler.h @@ -53,13 +53,15 @@ private: template class QueryProfilerBase { + friend ProfilerImpl; + public: - QueryProfilerBase(UInt64 thread_id, int clock_type, UInt64 period, int pause_signal_); ~QueryProfilerBase(); void setPeriod(UInt64 period_); private: + QueryProfilerBase(UInt64 thread_id, int clock_type, UInt64 period, int pause_signal_); void cleanup(); LoggerPtr log; diff --git a/src/Common/SharedMutexHelper.h b/src/Common/SharedMutexHelper.h index 8dddaab6c78..034c4b70c91 100644 --- a/src/Common/SharedMutexHelper.h +++ b/src/Common/SharedMutexHelper.h @@ -42,6 +42,11 @@ namespace DB template class TSA_CAPABILITY("SharedMutexHelper") SharedMutexHelper { + friend Derived; +private: + SharedMutexHelper() = default; + SharedMutexHelper(const SharedMutexHelper&) = default; + public: // Exclusive ownership void lock() TSA_ACQUIRE() /// NOLINT diff --git a/src/Common/ShellCommand.cpp b/src/Common/ShellCommand.cpp index 0d41669816c..c8a92dae6c4 100644 --- a/src/Common/ShellCommand.cpp +++ b/src/Common/ShellCommand.cpp @@ -2,7 +2,6 @@ #include #include #include -#include #include #include @@ -154,6 +153,9 @@ std::unique_ptr ShellCommand::executeImpl( std::vector> read_pipe_fds; std::vector> write_pipe_fds; + read_pipe_fds.reserve(config.read_fds.size()); + write_pipe_fds.reserve(config.write_fds.size()); + for (size_t i = 0; i < config.read_fds.size(); ++i) read_pipe_fds.emplace_back(std::make_unique()); diff --git a/src/Common/StackTrace.cpp b/src/Common/StackTrace.cpp index 3dce34803b2..f78bacf3e1d 100644 --- a/src/Common/StackTrace.cpp +++ b/src/Common/StackTrace.cpp @@ -60,7 +60,7 @@ void StackTrace::setShowAddresses(bool show) show_addresses.store(show, std::memory_order_relaxed); } -std::string SigsegvErrorString(const siginfo_t & info, [[maybe_unused]] const ucontext_t & context) +static std::string SigsegvErrorString(const siginfo_t & info, [[maybe_unused]] const ucontext_t & context) { using namespace std::string_literals; std::string address @@ -99,7 +99,7 @@ std::string SigsegvErrorString(const siginfo_t & info, [[maybe_unused]] const uc return fmt::format("Address: {}. Access: {}. {}.", std::move(address), access, message); } -constexpr std::string_view SigbusErrorString(int si_code) +static constexpr std::string_view SigbusErrorString(int si_code) { switch (si_code) { @@ -124,7 +124,7 @@ constexpr std::string_view SigbusErrorString(int si_code) } } -constexpr std::string_view SigfpeErrorString(int si_code) +static constexpr std::string_view SigfpeErrorString(int si_code) { switch (si_code) { @@ -149,7 +149,7 @@ constexpr std::string_view SigfpeErrorString(int si_code) } } -constexpr std::string_view SigillErrorString(int si_code) +static constexpr std::string_view SigillErrorString(int si_code) { switch (si_code) { @@ -389,7 +389,7 @@ constexpr std::pair replacements[] // Demangle @c symbol_name if it's not from __functional header (as such functions don't provide any useful // information but pollute stack traces). // Replace parts from @c replacements with shorter aliases -String demangleAndCollapseNames(std::optional file, const char * const symbol_name) +static String demangleAndCollapseNames(std::optional file, const char * const symbol_name) { if (!symbol_name) return "?"; @@ -436,7 +436,7 @@ struct StackTraceTriple template concept MaybeRef = std::is_same_v || std::is_same_v; -constexpr bool operator<(const MaybeRef auto & left, const MaybeRef auto & right) +static constexpr bool operator<(const MaybeRef auto & left, const MaybeRef auto & right) { return std::tuple{left.pointers, left.size, left.offset} < std::tuple{right.pointers, right.size, right.offset}; } @@ -542,7 +542,7 @@ static StackTraceCache cache; static DB::SharedMutex stacktrace_cache_mutex; -String toStringCached(const StackTrace::FramePointers & pointers, size_t offset, size_t size) +static String toStringCached(const StackTrace::FramePointers & pointers, size_t offset, size_t size) { const StackTraceRefTriple key{pointers, offset, size}; diff --git a/src/Common/StringUtils.cpp b/src/Common/StringUtils.cpp index 18577e64c01..3699ff408e8 100644 --- a/src/Common/StringUtils.cpp +++ b/src/Common/StringUtils.cpp @@ -23,7 +23,7 @@ bool endsWith(const std::string & s, const char * suffix, size_t suffix_size) } DECLARE_DEFAULT_CODE( -bool isAllASCII(const UInt8 * data, size_t size) +static bool isAllASCII(const UInt8 * data, size_t size) { UInt8 mask = 0; for (size_t i = 0; i < size; ++i) @@ -34,7 +34,7 @@ bool isAllASCII(const UInt8 * data, size_t size) DECLARE_SSE42_SPECIFIC_CODE( /// Copy from https://github.com/lemire/fastvalidate-utf-8/blob/master/include/simdasciicheck.h -bool isAllASCII(const UInt8 * data, size_t size) +static bool isAllASCII(const UInt8 * data, size_t size) { __m128i masks = _mm_setzero_si128(); @@ -55,7 +55,7 @@ bool isAllASCII(const UInt8 * data, size_t size) }) DECLARE_AVX2_SPECIFIC_CODE( -bool isAllASCII(const UInt8 * data, size_t size) +static bool isAllASCII(const UInt8 * data, size_t size) { __m256i masks = _mm256_setzero_si256(); diff --git a/src/Common/TargetSpecific.cpp b/src/Common/TargetSpecific.cpp index 4400d9a60b3..4d276a18413 100644 --- a/src/Common/TargetSpecific.cpp +++ b/src/Common/TargetSpecific.cpp @@ -6,7 +6,7 @@ namespace DB { -UInt32 getSupportedArchs() +static UInt32 getSupportedArchs() { UInt32 result = 0; if (CPU::CPUFlagsCache::have_SSE42) diff --git a/src/Common/TimerDescriptor.cpp b/src/Common/TimerDescriptor.cpp index 1622642c507..c63679092c0 100644 --- a/src/Common/TimerDescriptor.cpp +++ b/src/Common/TimerDescriptor.cpp @@ -132,7 +132,7 @@ void TimerDescriptor::drain() const throw ErrnoException(ErrorCodes::CANNOT_READ_FROM_SOCKET, "Cannot drain timer_fd {}", timer_fd); } - chassert(res == sizeof(buf)); + chassert(res == sizeof(buf)); /// NOLINT(bugprone-sizeof-expression) } } diff --git a/src/Common/TypePromotion.h b/src/Common/TypePromotion.h index 62d2de90e04..937d2cc04b4 100644 --- a/src/Common/TypePromotion.h +++ b/src/Common/TypePromotion.h @@ -15,7 +15,12 @@ namespace DB template class TypePromotion { + friend Base; + private: + TypePromotion() = default; + // TypePromotion(const TypePromotion &) = default; + /// Need a helper-struct to fight the lack of the function-template partial specialization. template > struct CastHelper; diff --git a/src/Common/Visitor.h b/src/Common/Visitor.h index 95e60a1f6df..4e4da041e25 100644 --- a/src/Common/Visitor.h +++ b/src/Common/Visitor.h @@ -159,7 +159,7 @@ protected: }; template -class Visitable : public Base +class Visitable : public Base /// NOLINT(bugprone-crtp-constructor-accessibility) { public: void accept(Visitor & visitor) override { visitor.visit(*static_cast(this)); } diff --git a/src/Common/filesystemHelpers.cpp b/src/Common/filesystemHelpers.cpp index 2d053c615d9..b4f5119715e 100644 --- a/src/Common/filesystemHelpers.cpp +++ b/src/Common/filesystemHelpers.cpp @@ -223,7 +223,7 @@ bool pathStartsWith(const std::filesystem::path & path, const std::filesystem::p return absolute_path.starts_with(absolute_prefix_path); } -bool fileOrSymlinkPathStartsWith(const std::filesystem::path & path, const std::filesystem::path & prefix_path) +static bool fileOrSymlinkPathStartsWith(const std::filesystem::path & path, const std::filesystem::path & prefix_path) { /// Differs from pathStartsWith in how `path` is normalized before comparison. /// Make `path` absolute if it was relative and put it into normalized form: remove diff --git a/src/Common/formatIPv6.cpp b/src/Common/formatIPv6.cpp index 341b3715d30..e20e43d75e1 100644 --- a/src/Common/formatIPv6.cpp +++ b/src/Common/formatIPv6.cpp @@ -40,7 +40,7 @@ namespace detail struct NumToString : Decompose {}; template - consteval std::array, sizeof...(ints)> str_make_array_impl(std::integer_sequence) + static consteval std::array, sizeof...(ints)> str_make_array_impl(std::integer_sequence) { return std::array, sizeof...(ints)> { std::pair {NumToString::value, NumToString::size}... }; } @@ -51,7 +51,7 @@ namespace detail * second - size of the string as would returned by strlen() */ template -consteval std::array, N> str_make_array() +static consteval std::array, N> str_make_array() { return detail::str_make_array_impl(std::make_integer_sequence{}); } diff --git a/src/Common/iota.cpp b/src/Common/iota.cpp index 82fe86618c9..603cad7427c 100644 --- a/src/Common/iota.cpp +++ b/src/Common/iota.cpp @@ -6,7 +6,7 @@ namespace DB { MULTITARGET_FUNCTION_AVX2_SSE42( - MULTITARGET_FUNCTION_HEADER(template void NO_INLINE), + MULTITARGET_FUNCTION_HEADER(template static void NO_INLINE), iotaImpl, MULTITARGET_FUNCTION_BODY((T * begin, size_t count, T first_value) /// NOLINT { for (size_t i = 0; i < count; i++) @@ -28,7 +28,7 @@ void iota(T * begin, size_t count, T first_value) } MULTITARGET_FUNCTION_AVX2_SSE42( - MULTITARGET_FUNCTION_HEADER(template void NO_INLINE), + MULTITARGET_FUNCTION_HEADER(template static void NO_INLINE), iotaWithStepImpl, MULTITARGET_FUNCTION_BODY((T * begin, size_t count, T first_value, T step) /// NOLINT { for (size_t i = 0; i < count; i++) diff --git a/src/Compression/CompressionCodecMultiple.cpp b/src/Compression/CompressionCodecMultiple.cpp index 6dc10677a3f..aae52a2d051 100644 --- a/src/Compression/CompressionCodecMultiple.cpp +++ b/src/Compression/CompressionCodecMultiple.cpp @@ -123,8 +123,11 @@ std::vector CompressionCodecMultiple::getCodecsBytesFromData(const char { std::vector result; uint8_t compression_methods_size = source[0]; + result.reserve(compression_methods_size); + for (size_t i = 0; i < compression_methods_size; ++i) result.push_back(source[1 + i]); + return result; } diff --git a/src/Coordination/Changelog.cpp b/src/Coordination/Changelog.cpp index 4f87f43a11f..d23dd30f7ae 100644 --- a/src/Coordination/Changelog.cpp +++ b/src/Coordination/Changelog.cpp @@ -848,7 +848,7 @@ void LogEntryStorage::startCommitLogsPrefetch(uint64_t last_committed_index) con LOG_TRACE(log, "Will prefetch {} commit log entries [{} - {}]", prefetch_to - prefetch_from + 1, prefetch_from, prefetch_to); current_prefetch_info->file_infos = std::move(file_infos); - auto inserted = prefetch_queue.push(current_prefetch_info); + auto inserted = prefetch_queue.push(current_prefetch_info); /// NOLINT(clang-analyzer-deadcode.DeadStores) chassert(inserted); } } diff --git a/src/Coordination/KeeperStorage.cpp b/src/Coordination/KeeperStorage.cpp index 1e1aed52c5a..b132ee010f4 100644 --- a/src/Coordination/KeeperStorage.cpp +++ b/src/Coordination/KeeperStorage.cpp @@ -538,7 +538,7 @@ struct Overloaded : Ts... // explicit deduction guide // https://en.cppreference.com/w/cpp/language/class_template_argument_deduction template -Overloaded(Ts...) -> Overloaded; +Overloaded(Ts...) -> Overloaded; /// NOLINT(misc-use-internal-linkage) template std::shared_ptr KeeperStorage::UncommittedState::tryGetNodeFromStorage(StringRef path, bool should_lock_storage) const diff --git a/src/Core/BaseSettingsProgramOptions.h b/src/Core/BaseSettingsProgramOptions.h index f482c08cf13..1f482d7782f 100644 --- a/src/Core/BaseSettingsProgramOptions.h +++ b/src/Core/BaseSettingsProgramOptions.h @@ -14,12 +14,12 @@ void addProgramOptionAsMultitoken(T &cmd_settings, boost::program_options::optio if (field.getTypeName() == "Bool") { options.add(boost::shared_ptr(new boost::program_options::option_description( - name.data(), boost::program_options::value()->multitoken()->composing()->implicit_value(std::vector{"1"}, "1")->notifier(on_program_option), field.getDescription()))); + name.data(), boost::program_options::value()->multitoken()->composing()->implicit_value(std::vector{"1"}, "1")->notifier(on_program_option), field.getDescription()))); /// NOLINT(bugprone-suspicious-stringview-data-usage) } else { options.add(boost::shared_ptr(new boost::program_options::option_description( - name.data(), boost::program_options::value()->multitoken()->composing()->notifier(on_program_option), field.getDescription()))); + name.data(), boost::program_options::value()->multitoken()->composing()->notifier(on_program_option), field.getDescription()))); /// NOLINT(bugprone-suspicious-stringview-data-usage) } } diff --git a/src/Core/Settings.cpp b/src/Core/Settings.cpp index 9f913a4ff9a..f01d0474a87 100644 --- a/src/Core/Settings.cpp +++ b/src/Core/Settings.cpp @@ -6164,7 +6164,7 @@ void SettingsImpl::applyCompatibilitySetting(const String & compatibility_value) namespace Setting { - LIST_OF_SETTINGS(INITIALIZE_SETTING_EXTERN, SKIP_ALIAS) + LIST_OF_SETTINGS(INITIALIZE_SETTING_EXTERN, SKIP_ALIAS) /// NOLINT (misc-use-internal-linkage) } #undef INITIALIZE_SETTING_EXTERN diff --git a/src/Daemon/SentryWriter.cpp b/src/Daemon/SentryWriter.cpp index 154b99ad541..01461711448 100644 --- a/src/Daemon/SentryWriter.cpp +++ b/src/Daemon/SentryWriter.cpp @@ -180,7 +180,7 @@ void SentryWriter::sendError(Type type, int sig_or_error, const std::string & er { int code = sig_or_error; /// Can be only LOGICAL_ERROR, but just in case. - sentry_set_tag("exception", DB::ErrorCodes::getName(code).data()); + sentry_set_tag("exception", DB::ErrorCodes::getName(code).data()); /// NOLINT(bugprone-suspicious-stringview-data-usage) sentry_set_extra("exception_code", sentry_value_new_int32(code)); break; } diff --git a/src/DataTypes/DataTypeArray.cpp b/src/DataTypes/DataTypeArray.cpp index 806a1577a21..63f9064763f 100644 --- a/src/DataTypes/DataTypeArray.cpp +++ b/src/DataTypes/DataTypeArray.cpp @@ -82,7 +82,7 @@ std::unique_ptr DataTypeArray::getDynamicSubcolum nested_data->type = nested_type; nested_data->column = data.column ? assert_cast(*data.column).getDataPtr() : nullptr; - auto nested_subcolumn_data = nested_type->getSubcolumnData(subcolumn_name, *nested_data, throw_if_null); + auto nested_subcolumn_data = DB::IDataType::getSubcolumnData(subcolumn_name, *nested_data, throw_if_null); if (!nested_subcolumn_data) return nullptr; diff --git a/src/DataTypes/DataTypeDynamic.cpp b/src/DataTypes/DataTypeDynamic.cpp index fb938f5fbd8..766c89e975c 100644 --- a/src/DataTypes/DataTypeDynamic.cpp +++ b/src/DataTypes/DataTypeDynamic.cpp @@ -128,7 +128,7 @@ std::pair splitSubcolumnName(std::string_vie if (pos == end) return {subcolumn_name, {}}; - return {std::string_view(subcolumn_name.data(), pos), std::string_view(pos + 1, end)}; + return {std::string_view(subcolumn_name.data(), pos), std::string_view(pos + 1, end)}; /// NOLINT(bugprone-suspicious-stringview-data-usage) } } diff --git a/src/DataTypes/DataTypeObject.cpp b/src/DataTypes/DataTypeObject.cpp index 30bf470083d..eaae7ffebca 100644 --- a/src/DataTypes/DataTypeObject.cpp +++ b/src/DataTypes/DataTypeObject.cpp @@ -450,7 +450,7 @@ std::unique_ptr DataTypeObject::getDynamicSubcolu /// Get subcolumn for Dynamic type if needed. if (!path_subcolumn.empty()) { - res = res->type->getSubcolumnData(path_subcolumn, *res, throw_if_null); + res = DB::IDataType::getSubcolumnData(path_subcolumn, *res, throw_if_null); if (!res) return nullptr; } diff --git a/src/DataTypes/Serializations/SerializationIPv4andIPv6.cpp b/src/DataTypes/Serializations/SerializationIPv4andIPv6.cpp index c1beceb4533..deac1f14344 100644 --- a/src/DataTypes/Serializations/SerializationIPv4andIPv6.cpp +++ b/src/DataTypes/Serializations/SerializationIPv4andIPv6.cpp @@ -18,7 +18,7 @@ void SerializationIP::deserializeText(DB::IColumn & column, DB::ReadBuffer assert_cast &>(column).getData().push_back(x); if (whole && !istr.eof()) - throwUnexpectedDataAfterParsedValue(column, istr, settings, TypeName.data()); + throwUnexpectedDataAfterParsedValue(column, istr, settings, {TypeName.data(), TypeName.size()}); } template @@ -79,7 +79,7 @@ void SerializationIP::deserializeTextJSON(DB::IColumn & column, DB::ReadBuf assertChar('"', istr); assert_cast &>(column).getData().push_back(x); if (*istr.position() != '"') - throwUnexpectedDataAfterParsedValue(column, istr, settings, TypeName.data()); + throwUnexpectedDataAfterParsedValue(column, istr, settings, {TypeName.data(), TypeName.size()}); istr.ignore(); } diff --git a/src/Databases/DDLRenamingVisitor.cpp b/src/Databases/DDLRenamingVisitor.cpp index 7556223b30e..06d410bddb6 100644 --- a/src/Databases/DDLRenamingVisitor.cpp +++ b/src/Databases/DDLRenamingVisitor.cpp @@ -353,7 +353,7 @@ void DDLRenamingMap::setNewDatabaseName(const String & old_database_name, const } -const String & DDLRenamingMap::getNewDatabaseName(const String & old_database_name) const +String DDLRenamingMap::getNewDatabaseName(const String & old_database_name) const { auto it = old_to_new_database_names.find(old_database_name); if (it != old_to_new_database_names.end()) diff --git a/src/Databases/DDLRenamingVisitor.h b/src/Databases/DDLRenamingVisitor.h index 44146a8ee6b..bcd655aecb7 100644 --- a/src/Databases/DDLRenamingVisitor.h +++ b/src/Databases/DDLRenamingVisitor.h @@ -27,7 +27,7 @@ public: void setNewDatabaseName(const String & old_database_name, const String & new_database_name); QualifiedTableName getNewTableName(const QualifiedTableName & old_table_name) const; - const String & getNewDatabaseName(const String & old_database_name) const; + String getNewDatabaseName(const String & old_database_name) const; private: std::unordered_map old_to_new_table_names; diff --git a/src/Disks/ObjectStorages/DiskObjectStorage.cpp b/src/Disks/ObjectStorages/DiskObjectStorage.cpp index fba45d5a0c9..9bd94a055dd 100644 --- a/src/Disks/ObjectStorages/DiskObjectStorage.cpp +++ b/src/Disks/ObjectStorages/DiskObjectStorage.cpp @@ -749,7 +749,7 @@ void DiskObjectStorage::restoreMetadataIfNeeded( { metadata_helper->restore(config, config_prefix, context); - auto current_schema_version = metadata_helper->readSchemaVersion(object_storage.get(), object_key_prefix); + auto current_schema_version = DB::DiskObjectStorageRemoteMetadataRestoreHelper::readSchemaVersion(object_storage.get(), object_key_prefix); if (current_schema_version < DiskObjectStorageRemoteMetadataRestoreHelper::RESTORABLE_SCHEMA_VERSION) metadata_helper->migrateToRestorableSchema(); diff --git a/src/Functions/FunctionHelpers.cpp b/src/Functions/FunctionHelpers.cpp index d37adee3e94..6c3e1de8d19 100644 --- a/src/Functions/FunctionHelpers.cpp +++ b/src/Functions/FunctionHelpers.cpp @@ -214,7 +214,7 @@ checkAndGetNestedArrayOffset(const IColumn ** columns, size_t num_arguments) else if (*offsets_i != *offsets) throw Exception(ErrorCodes::SIZES_OF_ARRAYS_DONT_MATCH, "Lengths of all arrays passed to aggregate function must be equal."); } - return {nested_columns, offsets->data()}; + return {nested_columns, offsets->data()}; /// NOLINT(clang-analyzer-core.CallAndMessage) } ColumnPtr diff --git a/src/Functions/FunctionsEmbeddedDictionaries.h b/src/Functions/FunctionsEmbeddedDictionaries.h index a89adfefd6b..716e86b5d72 100644 --- a/src/Functions/FunctionsEmbeddedDictionaries.h +++ b/src/Functions/FunctionsEmbeddedDictionaries.h @@ -122,7 +122,7 @@ struct IdentityDictionaryGetter static Dst & get(Src & src, const std::string & key) { if (key.empty()) - return src; + return src; /// NOLINT(bugprone-return-const-ref-from-parameter) throw Exception(ErrorCodes::BAD_ARGUMENTS, "Dictionary doesn't support 'point of view' keys."); } }; diff --git a/src/Functions/FunctionsExternalDictionaries.h b/src/Functions/FunctionsExternalDictionaries.h index b0f54b74091..2fc08097b3d 100644 --- a/src/Functions/FunctionsExternalDictionaries.h +++ b/src/Functions/FunctionsExternalDictionaries.h @@ -1219,7 +1219,7 @@ public: return result_type->createColumn(); auto dictionary = dictionary_helper->getDictionary(arguments[0].column); - const auto & hierarchical_attribute = dictionary_helper->getDictionaryHierarchicalAttribute(dictionary); + const auto & hierarchical_attribute = FunctionDictHelper::getDictionaryHierarchicalAttribute(dictionary); auto key_column = ColumnWithTypeAndName{arguments[1].column->convertToFullColumnIfConst(), arguments[1].type, arguments[1].name}; auto key_column_casted = castColumnAccurate(key_column, removeNullable(hierarchical_attribute.type)); @@ -1371,7 +1371,7 @@ public: } auto dictionary = dictionary_helper->getDictionary(arguments[0].column); - const auto & hierarchical_attribute = dictionary_helper->getDictionaryHierarchicalAttribute(dictionary); + const auto & hierarchical_attribute = FunctionDictHelper::getDictionaryHierarchicalAttribute(dictionary); return std::make_shared(removeNullable(hierarchical_attribute.type)); } diff --git a/src/Functions/GatherUtils/IArraySink.h b/src/Functions/GatherUtils/IArraySink.h index 81ced193e97..1e976d0a2cf 100644 --- a/src/Functions/GatherUtils/IArraySink.h +++ b/src/Functions/GatherUtils/IArraySink.h @@ -25,7 +25,7 @@ struct IArraySink }; template -class ArraySinkImpl : public Visitable {}; +class ArraySinkImpl : public Visitable {}; /// NOLINT(bugprone-crtp-constructor-accessibility) } diff --git a/src/Functions/GatherUtils/IArraySource.h b/src/Functions/GatherUtils/IArraySource.h index fc5653ddeac..b5b172e55ce 100644 --- a/src/Functions/GatherUtils/IArraySource.h +++ b/src/Functions/GatherUtils/IArraySource.h @@ -31,7 +31,7 @@ struct IArraySource }; template -class ArraySourceImpl : public Visitable {}; +class ArraySourceImpl : public Visitable {}; /// NOLINT(bugprone-crtp-constructor-accessibility) } diff --git a/src/Functions/GatherUtils/IValueSource.h b/src/Functions/GatherUtils/IValueSource.h index 8278b7ea19b..2f768b45520 100644 --- a/src/Functions/GatherUtils/IValueSource.h +++ b/src/Functions/GatherUtils/IValueSource.h @@ -27,7 +27,7 @@ struct IValueSource }; template -class ValueSourceImpl : public Visitable {}; +class ValueSourceImpl : public Visitable {}; /// NOLINT(bugprone-crtp-constructor-accessibility) } diff --git a/src/Functions/URL/domain.h b/src/Functions/URL/domain.h index 328df76b570..f37b26d5d74 100644 --- a/src/Functions/URL/domain.h +++ b/src/Functions/URL/domain.h @@ -277,7 +277,7 @@ struct ExtractDomain } else { - if (without_www && host.size() > 4 && !strncmp(host.data(), "www.", 4)) + if (without_www && host.size() > 4 && !strncmp(host.data(), "www.", 4)) /// NOLINT(bugprone-suspicious-stringview-data-usage) host = { host.data() + 4, host.size() - 4 }; res_data = host.data(); diff --git a/src/Functions/URL/topLevelDomain.cpp b/src/Functions/URL/topLevelDomain.cpp index b3e88832350..c788dd17ded 100644 --- a/src/Functions/URL/topLevelDomain.cpp +++ b/src/Functions/URL/topLevelDomain.cpp @@ -28,7 +28,7 @@ struct ExtractTopLevelDomain const auto * host_end = host.data() + host.size(); - Pos last_dot = find_last_symbols_or_null<'.'>(host.data(), host_end); + Pos last_dot = find_last_symbols_or_null<'.'>(host.data(), host_end); /// NOLINT(bugprone-suspicious-stringview-data-usage) if (!last_dot) return; diff --git a/src/Functions/extractAllGroups.h b/src/Functions/extractAllGroups.h index 06f03a2c26a..284233975ed 100644 --- a/src/Functions/extractAllGroups.h +++ b/src/Functions/extractAllGroups.h @@ -136,7 +136,7 @@ public: const auto * end = pos + current_row.size(); while (pos < end && regexp->Match({pos, static_cast(end - pos)}, - 0, end - pos, regexp->UNANCHORED, + 0, end - pos, RE2::UNANCHORED, matched_groups.data(), static_cast(matched_groups.size()))) { // 1 is to exclude group #0 which is whole re match. @@ -180,7 +180,7 @@ public: const auto * end = pos + current_row.size; while (pos < end && regexp->Match({pos, static_cast(end - pos)}, - 0, end - pos, regexp->UNANCHORED, matched_groups.data(), + 0, end - pos, RE2::UNANCHORED, matched_groups.data(), static_cast(matched_groups.size()))) { // 1 is to exclude group #0 which is whole re match. diff --git a/src/Functions/isIPAddressContainedIn.cpp b/src/Functions/isIPAddressContainedIn.cpp index f96ee2008ec..829e81ce899 100644 --- a/src/Functions/isIPAddressContainedIn.cpp +++ b/src/Functions/isIPAddressContainedIn.cpp @@ -86,7 +86,7 @@ IPAddressCIDR parseIPWithCIDR(std::string_view cidr_str) auto prefix_str = cidr_str.substr(pos_slash+1); const auto * prefix_str_end = prefix_str.data() + prefix_str.size(); - auto [parse_end, parse_error] = std::from_chars(prefix_str.data(), prefix_str_end, prefix); + auto [parse_end, parse_error] = std::from_chars(prefix_str.data(), prefix_str_end, prefix); /// NOLINT(bugprone-suspicious-stringview-data-usage) uint8_t max_prefix = (addr.asV6() ? IPV6_BINARY_LENGTH : IPV4_BINARY_LENGTH) * 8; bool has_error = parse_error != std::errc() || parse_end != prefix_str_end || prefix > max_prefix; if (has_error) diff --git a/src/IO/BufferWithOwnMemory.h b/src/IO/BufferWithOwnMemory.h index 79b1bb67aaa..75b5a653fba 100644 --- a/src/IO/BufferWithOwnMemory.h +++ b/src/IO/BufferWithOwnMemory.h @@ -75,7 +75,7 @@ struct Memory : boost::noncopyable, Allocator size_t size() const { return m_size; } const char & operator[](size_t i) const { return m_data[i]; } - char & operator[](size_t i) { return m_data[i]; } + char & operator[](size_t i) { return m_data[i]; } /// NOLINT(clang-analyzer-core.uninitialized.UndefReturn) const char * data() const { return m_data; } char * data() { return m_data; } diff --git a/src/IO/readDecimalText.h b/src/IO/readDecimalText.h index 0e9a8beae21..baa88191989 100644 --- a/src/IO/readDecimalText.h +++ b/src/IO/readDecimalText.h @@ -189,7 +189,7 @@ inline ReturnType readDecimalText(ReadBuffer & buf, T & x, uint32_t precision, u /// Too many digits after point. Just cut off excessive digits. auto divisor = intExp10OfSize(divisor_exp); assert(divisor > 0); /// This is for Clang Static Analyzer. It is not smart enough to infer it automatically. - x.value /= divisor; + x.value /= divisor; /// NOLINT(clang-analyzer-core.DivideZero) scale = 0; return ReturnType(true); } diff --git a/src/Interpreters/Aggregator.cpp b/src/Interpreters/Aggregator.cpp index 2f797d4810b..856175be083 100644 --- a/src/Interpreters/Aggregator.cpp +++ b/src/Interpreters/Aggregator.cpp @@ -2512,6 +2512,7 @@ void NO_INLINE Aggregator::mergeWithoutKeyDataImpl( { size_t size = non_empty_data.size(); std::vector data_vec; + data_vec.reserve(size); for (size_t result_num = 0; result_num < size; ++result_num) data_vec.emplace_back(non_empty_data[result_num]->without_key + offsets_of_aggregate_states[i]); diff --git a/src/Interpreters/Cluster.cpp b/src/Interpreters/Cluster.cpp index 41382137156..006e3f75937 100644 --- a/src/Interpreters/Cluster.cpp +++ b/src/Interpreters/Cluster.cpp @@ -284,7 +284,7 @@ Cluster::Address Cluster::Address::fromFullString(std::string_view full_string) secure = Protocol::Secure::Enable; } - const char * colon = strchr(full_string.data(), ':'); + const char * colon = strchr(full_string.data(), ':'); /// NOLINT(bugprone-suspicious-stringview-data-usage) if (!user_pw_end || !colon) throw Exception(ErrorCodes::SYNTAX_ERROR, "Incorrect user[:password]@host:port#default_database format {}", full_string); @@ -293,7 +293,7 @@ Cluster::Address Cluster::Address::fromFullString(std::string_view full_string) if (!host_end) throw Exception(ErrorCodes::SYNTAX_ERROR, "Incorrect address '{}', it does not contain port", full_string); - const char * has_db = strchr(full_string.data(), '#'); + const char * has_db = strchr(full_string.data(), '#'); /// NOLINT(bugprone-suspicious-stringview-data-usage) const char * port_end = has_db ? has_db : address_end; Address address; diff --git a/src/Interpreters/DDLWorker.cpp b/src/Interpreters/DDLWorker.cpp index 3079e9eaa8b..b1c7635b62b 100644 --- a/src/Interpreters/DDLWorker.cpp +++ b/src/Interpreters/DDLWorker.cpp @@ -990,7 +990,7 @@ void DDLWorker::cleanupQueue(Int64, const ZooKeeperPtr & zookeeper) /// No one has started to process the entry, so node_path/active and node_path/finished nodes were never created, node_path has no children. /// Entry became outdated, but we cannot remove remove it in a transaction with node_path/finished. chassert(res[0]->error == Coordination::Error::ZOK && res[1]->error == Coordination::Error::ZNONODE); - rm_entry_res = zookeeper->tryRemove(node_path); + rm_entry_res = zookeeper->tryRemove(node_path); /// NOLINT(clang-analyzer-deadcode.DeadStores) chassert(rm_entry_res != Coordination::Error::ZNOTEMPTY); continue; } diff --git a/src/Interpreters/EmbeddedDictionaries.cpp b/src/Interpreters/EmbeddedDictionaries.cpp index 1435d16cb07..c369df11c9a 100644 --- a/src/Interpreters/EmbeddedDictionaries.cpp +++ b/src/Interpreters/EmbeddedDictionaries.cpp @@ -72,17 +72,17 @@ bool EmbeddedDictionaries::reloadImpl(const bool throw_on_error, const bool forc bool was_exception = false; - DictionaryReloader reload_regions_hierarchies = [=, this] (const Poco::Util::AbstractConfiguration & config) + DictionaryReloader reload_regions_hierarchies = [=] (const Poco::Util::AbstractConfiguration & config) { - return geo_dictionaries_loader->reloadRegionsHierarchies(config); + return DB::GeoDictionariesLoader::reloadRegionsHierarchies(config); }; if (!reloadDictionary(regions_hierarchies, std::move(reload_regions_hierarchies), throw_on_error, force_reload)) was_exception = true; - DictionaryReloader reload_regions_names = [=, this] (const Poco::Util::AbstractConfiguration & config) + DictionaryReloader reload_regions_names = [=] (const Poco::Util::AbstractConfiguration & config) { - return geo_dictionaries_loader->reloadRegionsNames(config); + return DB::GeoDictionariesLoader::reloadRegionsNames(config); }; if (!reloadDictionary(regions_names, std::move(reload_regions_names), throw_on_error, force_reload)) diff --git a/src/Interpreters/HashJoin/HashJoin.cpp b/src/Interpreters/HashJoin/HashJoin.cpp index 1d0c6f75b8e..219a079425e 100644 --- a/src/Interpreters/HashJoin/HashJoin.cpp +++ b/src/Interpreters/HashJoin/HashJoin.cpp @@ -1005,6 +1005,7 @@ void HashJoin::joinBlock(Block & block, ExtraBlockPtr & not_processed) bool prefer_use_maps_all = table_join->getMixedJoinExpression() != nullptr; { std::vectormaps[0])> *> maps_vector; + maps_vector.reserve(table_join->getClauses().size()); for (size_t i = 0; i < table_join->getClauses().size(); ++i) maps_vector.push_back(&data->maps[i]); diff --git a/src/Parsers/ASTBackupQuery.cpp b/src/Parsers/ASTBackupQuery.cpp index 5a5cb97fa5f..0372d0e16da 100644 --- a/src/Parsers/ASTBackupQuery.cpp +++ b/src/Parsers/ASTBackupQuery.cpp @@ -286,7 +286,7 @@ ASTPtr ASTBackupQuery::getRewrittenASTWithoutOnCluster(const WithoutOnClusterAST auto new_query = std::static_pointer_cast(clone()); new_query->cluster.clear(); new_query->settings = rewriteSettingsWithoutOnCluster(new_query->settings, params); - new_query->setCurrentDatabase(new_query->elements, params.default_database); + ASTBackupQuery::setCurrentDatabase(new_query->elements, params.default_database); return new_query; } diff --git a/src/Parsers/Access/ParserCreateRowPolicyQuery.cpp b/src/Parsers/Access/ParserCreateRowPolicyQuery.cpp index b49e153c5b0..e1c43bc654c 100644 --- a/src/Parsers/Access/ParserCreateRowPolicyQuery.cpp +++ b/src/Parsers/Access/ParserCreateRowPolicyQuery.cpp @@ -98,7 +98,7 @@ namespace for (auto filter_type : collections::range(RowPolicyFilterType::MAX)) { std::string_view command = RowPolicyFilterTypeInfo::get(filter_type).command; - if (ParserKeyword::createDeprecated(command.data()).ignore(pos, expected)) + if (ParserKeyword::createDeprecated({command.data(), command.size()}).ignore(pos, expected)) { res_commands.emplace(command); return true; diff --git a/src/Planner/PlannerActionsVisitor.cpp b/src/Planner/PlannerActionsVisitor.cpp index 2cb2a242c35..46b1db3f660 100644 --- a/src/Planner/PlannerActionsVisitor.cpp +++ b/src/Planner/PlannerActionsVisitor.cpp @@ -866,7 +866,7 @@ PlannerActionsVisitorImpl::NodeNameAndNodeMinLevel PlannerActionsVisitorImpl::ma PreparedSets::toString(set_key, set_element_types)); ColumnWithTypeAndName column; - column.name = planner_context->createSetKey(in_first_argument->getResultType(), in_second_argument); + column.name = DB::PlannerContext::createSetKey(in_first_argument->getResultType(), in_second_argument); column.type = std::make_shared(); bool set_is_created = set->get() != nullptr; diff --git a/src/Processors/Chunk.h b/src/Processors/Chunk.h index fd3bae45c69..cc9dbc17f27 100644 --- a/src/Processors/Chunk.h +++ b/src/Processors/Chunk.h @@ -25,10 +25,13 @@ public: template class ChunkInfoCloneable : public ChunkInfo { -public: + friend Derived; + +private: ChunkInfoCloneable() = default; ChunkInfoCloneable(const ChunkInfoCloneable & other) = default; +public: Ptr clone() const override { return std::static_pointer_cast(std::make_shared(*static_cast(this))); diff --git a/src/Processors/Executors/PollingQueue.cpp b/src/Processors/Executors/PollingQueue.cpp index 447c102a195..55abef0b8eb 100644 --- a/src/Processors/Executors/PollingQueue.cpp +++ b/src/Processors/Executors/PollingQueue.cpp @@ -33,9 +33,9 @@ PollingQueue::PollingQueue() PollingQueue::~PollingQueue() { int err; - err = close(pipe_fd[0]); + err = close(pipe_fd[0]); /// NOLINT(clang-analyzer-deadcode.DeadStores) chassert(!err || errno == EINTR); - err = close(pipe_fd[1]); + err = close(pipe_fd[1]); /// NOLINT(clang-analyzer-deadcode.DeadStores) chassert(!err || errno == EINTR); } diff --git a/src/Processors/Merges/Algorithms/Graphite.cpp b/src/Processors/Merges/Algorithms/Graphite.cpp index 460f1bea4b2..ceb86d89500 100644 --- a/src/Processors/Merges/Algorithms/Graphite.cpp +++ b/src/Processors/Merges/Algorithms/Graphite.cpp @@ -287,7 +287,7 @@ std::string buildTaggedRegex(std::string regexp_str) /* remove empty elements */ using namespace std::string_literals; std::erase(tags, ""s); - if (tags[0].find('=') == tags[0].npos) + if (tags[0].find('=') == tags[0].npos) /// NOLINT(readability-static-accessed-through-instance) { if (tags.size() == 1) /* only name */ return "^" + tags[0] + "\\?"; diff --git a/src/Processors/QueryPlan/Optimizations/optimizeUseAggregateProjection.cpp b/src/Processors/QueryPlan/Optimizations/optimizeUseAggregateProjection.cpp index de8c42e0a1c..5427cddac72 100644 --- a/src/Processors/QueryPlan/Optimizations/optimizeUseAggregateProjection.cpp +++ b/src/Processors/QueryPlan/Optimizations/optimizeUseAggregateProjection.cpp @@ -394,7 +394,7 @@ std::optional analyzeAggregateProjection( // LOG_TRACE(getLogger("optimizeUseProjections"), "Folding actions by projection"); - auto proj_dag = query.dag->foldActionsByProjection(new_inputs, query_key_nodes); + auto proj_dag = ActionsDAG::foldActionsByProjection(new_inputs, query_key_nodes); appendAggregateFunctions(proj_dag, aggregates, *matched_aggregates); return proj_dag; } @@ -739,7 +739,7 @@ std::optional optimizeUseAggregateProjections(QueryPlan::Node & node, Qu AggregateDataPtr place = state.data(); agg_count->create(place); SCOPE_EXIT_MEMORY_SAFE(agg_count->destroy(place)); - agg_count->set(place, exact_count); + AggregateFunctionCount::set(place, exact_count); auto column = ColumnAggregateFunction::create(agg_count); column->insertFrom(place); diff --git a/src/Storages/MaterializedView/RefreshTask.cpp b/src/Storages/MaterializedView/RefreshTask.cpp index 3b893d4677a..7aa05ede737 100644 --- a/src/Storages/MaterializedView/RefreshTask.cpp +++ b/src/Storages/MaterializedView/RefreshTask.cpp @@ -491,7 +491,7 @@ void RefreshTask::refreshTask() znode.last_attempt_error = error_message; } - bool ok = updateCoordinationState(znode, false, zookeeper, lock); + bool ok = updateCoordinationState(znode, false, zookeeper, lock); /// NOLINT(clang-analyzer-deadcode.DeadStores) chassert(ok); chassert(lock.owns_lock()); diff --git a/src/Storages/MergeTree/IMergeTreeDataPart.cpp b/src/Storages/MergeTree/IMergeTreeDataPart.cpp index 390150e49e5..229c3389753 100644 --- a/src/Storages/MergeTree/IMergeTreeDataPart.cpp +++ b/src/Storages/MergeTree/IMergeTreeDataPart.cpp @@ -1298,7 +1298,7 @@ void IMergeTreeDataPart::appendFilesOfPartitionAndMinMaxIndex(Strings & files) c MergeTreePartition::appendFiles(storage, files); if (!parent_part) - minmax_idx->appendFiles(storage, files); + IMergeTreeDataPart::MinMaxIndex::appendFiles(storage, files); } void IMergeTreeDataPart::loadChecksums(bool require) diff --git a/src/Storages/MergeTree/MergeTreeMarksLoader.cpp b/src/Storages/MergeTree/MergeTreeMarksLoader.cpp index a271af578cc..30fe5080ee5 100644 --- a/src/Storages/MergeTree/MergeTreeMarksLoader.cpp +++ b/src/Storages/MergeTree/MergeTreeMarksLoader.cpp @@ -156,7 +156,7 @@ MarkCache::MappedPtr MergeTreeMarksLoader::loadMarksImpl() if (!index_granularity_info.mark_type.adaptive) { /// Read directly to marks. - chassert(expected_uncompressed_size == plain_marks.size() * sizeof(MarkInCompressedFile)); + chassert(expected_uncompressed_size == plain_marks.size() * sizeof(MarkInCompressedFile)); /// NOLINT(bugprone-sizeof-expression) reader->readStrict(reinterpret_cast(plain_marks.data()), expected_uncompressed_size); if (!reader->eof()) diff --git a/src/Storages/MergeTree/MergeTreeSettings.cpp b/src/Storages/MergeTree/MergeTreeSettings.cpp index 92e8d880417..fd832cd57f8 100644 --- a/src/Storages/MergeTree/MergeTreeSettings.cpp +++ b/src/Storages/MergeTree/MergeTreeSettings.cpp @@ -528,7 +528,7 @@ void MergeTreeColumnSettings::validate(const SettingsChanges & changes) namespace MergeTreeSetting { - LIST_OF_MERGE_TREE_SETTINGS(INITIALIZE_SETTING_EXTERN, SKIP_ALIAS) + LIST_OF_MERGE_TREE_SETTINGS(INITIALIZE_SETTING_EXTERN, SKIP_ALIAS) /// NOLINT(misc-use-internal-linkage) } #undef INITIALIZE_SETTING_EXTERN diff --git a/src/Storages/MergeTree/MergeTreeSplitPrewhereIntoReadSteps.cpp b/src/Storages/MergeTree/MergeTreeSplitPrewhereIntoReadSteps.cpp index 4870152d9ce..f629ea74400 100644 --- a/src/Storages/MergeTree/MergeTreeSplitPrewhereIntoReadSteps.cpp +++ b/src/Storages/MergeTree/MergeTreeSplitPrewhereIntoReadSteps.cpp @@ -158,7 +158,7 @@ const ActionsDAG::Node & addCast( const DataTypePtr & to_type) { if (!node_to_cast.result_type->equals(*to_type)) - return node_to_cast; + return node_to_cast; /// NOLINT(bugprone-return-const-ref-from-parameter) const auto & new_node = dag->addCast(node_to_cast, to_type, {}); return new_node; diff --git a/src/Storages/MergeTree/ParallelReplicasReadingCoordinator.cpp b/src/Storages/MergeTree/ParallelReplicasReadingCoordinator.cpp index 4d5ad806177..48f3c5ea30a 100644 --- a/src/Storages/MergeTree/ParallelReplicasReadingCoordinator.cpp +++ b/src/Storages/MergeTree/ParallelReplicasReadingCoordinator.cpp @@ -151,6 +151,7 @@ public: { String result = "Statistics: "; std::vector stats_by_replica; + stats_by_replica.resize(stats.size()); for (size_t i = 0; i < stats.size(); ++i) stats_by_replica.push_back(fmt::format( "replica {}{} - {{requests: {} marks: {} assigned_to_me: {} stolen_by_hash: {} stolen_unassigned: {}}}", diff --git a/src/Storages/MergeTree/ReplicatedMergeTreeCleanupThread.cpp b/src/Storages/MergeTree/ReplicatedMergeTreeCleanupThread.cpp index 8342fbed8b9..5014155b5b8 100644 --- a/src/Storages/MergeTree/ReplicatedMergeTreeCleanupThread.cpp +++ b/src/Storages/MergeTree/ReplicatedMergeTreeCleanupThread.cpp @@ -425,6 +425,7 @@ void ReplicatedMergeTreeCleanupThread::markLostReplicas(const std::unordered_map throw Exception(ErrorCodes::ALL_REPLICAS_LOST, "All replicas are stale: we won't mark any replica as lost"); std::vector futures; + futures.resize(candidate_lost_replicas.size()); for (size_t i = 0; i < candidate_lost_replicas.size(); ++i) futures.emplace_back(zookeeper->asyncTryMultiNoThrow(requests[i])); diff --git a/src/Storages/MergeTree/ReplicatedMergeTreeSink.cpp b/src/Storages/MergeTree/ReplicatedMergeTreeSink.cpp index c0e25a54bf3..3fec094e19b 100644 --- a/src/Storages/MergeTree/ReplicatedMergeTreeSink.cpp +++ b/src/Storages/MergeTree/ReplicatedMergeTreeSink.cpp @@ -118,6 +118,7 @@ std::vector testSelfDeduplicate(std::vector data, std::vector result; + result.resize(col->size()); for (size_t i = 0; i < col->size(); i++) { result.push_back(col->getInt(i)); diff --git a/src/Storages/ObjectStorage/DataLakes/IcebergMetadata.cpp b/src/Storages/ObjectStorage/DataLakes/IcebergMetadata.cpp index f0a80a41d4e..5d8bdc5bc93 100644 --- a/src/Storages/ObjectStorage/DataLakes/IcebergMetadata.cpp +++ b/src/Storages/ObjectStorage/DataLakes/IcebergMetadata.cpp @@ -505,7 +505,7 @@ Strings IcebergMetadata::getDataFiles() const String schema_json_string = String(reinterpret_cast(schema_json.data()), schema_json.size()); Poco::JSON::Parser parser; Poco::Dynamic::Var json = parser.parse(schema_json_string); - Poco::JSON::Object::Ptr schema_object = json.extract(); + const Poco::JSON::Object::Ptr & schema_object = json.extract(); if (!context->getSettingsRef()[Setting::iceberg_engine_ignore_schema_evolution] && schema_object->getValue("schema-id") != current_schema_id) throw Exception( diff --git a/src/Storages/StorageFactory.cpp b/src/Storages/StorageFactory.cpp index 10746a6666c..afc35225b17 100644 --- a/src/Storages/StorageFactory.cpp +++ b/src/Storages/StorageFactory.cpp @@ -233,7 +233,7 @@ StoragePtr StorageFactory::get( { /// Storage creator modified empty arguments list, so we should modify the query assert(storage_def && storage_def->engine && !storage_def->engine->arguments); - storage_def->engine->arguments = std::make_shared(); + storage_def->engine->arguments = std::make_shared(); /// NOLINT(clang-analyzer-core.NullDereference) storage_def->engine->children.push_back(storage_def->engine->arguments); storage_def->engine->arguments->children = empty_engine_args; } diff --git a/src/Storages/StorageMergeTreeIndex.cpp b/src/Storages/StorageMergeTreeIndex.cpp index 35e966b5489..2ced59751f0 100644 --- a/src/Storages/StorageMergeTreeIndex.cpp +++ b/src/Storages/StorageMergeTreeIndex.cpp @@ -163,7 +163,7 @@ private: if (isWidePart(part)) { - if (auto stream_name = part->getStreamNameOrHash(column_name, part->checksums)) + if (auto stream_name = IMergeTreeDataPart::getStreamNameOrHash(column_name, part->checksums)) { col_idx = 0; has_marks_in_part = true; diff --git a/src/Storages/VirtualColumnUtils.cpp b/src/Storages/VirtualColumnUtils.cpp index 706950f32cf..5837ab9be16 100644 --- a/src/Storages/VirtualColumnUtils.cpp +++ b/src/Storages/VirtualColumnUtils.cpp @@ -130,7 +130,7 @@ void filterBlockWithExpression(const ExpressionActionsPtr & actions, Block & blo } } -NamesAndTypesList getCommonVirtualsForFileLikeStorage() +static NamesAndTypesList getCommonVirtualsForFileLikeStorage() { return {{"_path", std::make_shared(std::make_shared())}, {"_file", std::make_shared(std::make_shared())}, @@ -144,7 +144,7 @@ NameSet getVirtualNamesForFileLikeStorage() return getCommonVirtualsForFileLikeStorage().getNameSet(); } -std::unordered_map parseHivePartitioningKeysAndValues(const String & path) +static std::unordered_map parseHivePartitioningKeysAndValues(const String & path) { std::string pattern = "([^/]+)=([^/]+)/"; re2::StringPiece input_piece(path); diff --git a/tests/ci/ci_config.py b/tests/ci/ci_config.py index a170dfdd8d1..3f80c7969b4 100644 --- a/tests/ci/ci_config.py +++ b/tests/ci/ci_config.py @@ -87,7 +87,7 @@ class CI: BuildNames.PACKAGE_RELEASE: CommonJobConfigs.BUILD.with_properties( build_config=BuildConfig( name=BuildNames.PACKAGE_RELEASE, - compiler="clang-18", + compiler="clang-19", package_type="deb", static_binary_name="amd64", additional_pkgs=True, @@ -96,7 +96,7 @@ class CI: BuildNames.PACKAGE_AARCH64: CommonJobConfigs.BUILD.with_properties( build_config=BuildConfig( name=BuildNames.PACKAGE_AARCH64, - compiler="clang-18-aarch64", + compiler="clang-19-aarch64", package_type="deb", static_binary_name="aarch64", additional_pkgs=True, @@ -115,7 +115,7 @@ class CI: BuildNames.PACKAGE_ASAN: CommonJobConfigs.BUILD.with_properties( build_config=BuildConfig( name=BuildNames.PACKAGE_ASAN, - compiler="clang-18", + compiler="clang-19", sanitizer="address", package_type="deb", ), @@ -123,7 +123,7 @@ class CI: BuildNames.PACKAGE_UBSAN: CommonJobConfigs.BUILD.with_properties( build_config=BuildConfig( name=BuildNames.PACKAGE_UBSAN, - compiler="clang-18", + compiler="clang-19", sanitizer="undefined", package_type="deb", ), @@ -131,7 +131,7 @@ class CI: BuildNames.PACKAGE_TSAN: CommonJobConfigs.BUILD.with_properties( build_config=BuildConfig( name=BuildNames.PACKAGE_TSAN, - compiler="clang-18", + compiler="clang-19", sanitizer="thread", package_type="deb", ), @@ -139,7 +139,7 @@ class CI: BuildNames.PACKAGE_MSAN: CommonJobConfigs.BUILD.with_properties( build_config=BuildConfig( name=BuildNames.PACKAGE_MSAN, - compiler="clang-18", + compiler="clang-19", sanitizer="memory", package_type="deb", ), @@ -147,7 +147,7 @@ class CI: BuildNames.PACKAGE_DEBUG: CommonJobConfigs.BUILD.with_properties( build_config=BuildConfig( name=BuildNames.PACKAGE_DEBUG, - compiler="clang-18", + compiler="clang-19", debug_build=True, package_type="deb", sparse_checkout=True, # Check that it works with at least one build, see also update-submodules.sh @@ -156,7 +156,7 @@ class CI: BuildNames.PACKAGE_RELEASE_COVERAGE: CommonJobConfigs.BUILD.with_properties( build_config=BuildConfig( name=BuildNames.PACKAGE_RELEASE_COVERAGE, - compiler="clang-18", + compiler="clang-19", coverage=True, package_type="deb", ), @@ -164,14 +164,14 @@ class CI: BuildNames.BINARY_RELEASE: CommonJobConfigs.BUILD.with_properties( build_config=BuildConfig( name=BuildNames.BINARY_RELEASE, - compiler="clang-18", + compiler="clang-19", package_type="binary", ), ), BuildNames.BINARY_TIDY: CommonJobConfigs.BUILD.with_properties( build_config=BuildConfig( name=BuildNames.BINARY_TIDY, - compiler="clang-18", + compiler="clang-19", debug_build=True, package_type="binary", static_binary_name="debug-amd64", @@ -183,7 +183,7 @@ class CI: BuildNames.BINARY_DARWIN: CommonJobConfigs.BUILD.with_properties( build_config=BuildConfig( name=BuildNames.BINARY_DARWIN, - compiler="clang-18-darwin", + compiler="clang-19-darwin", package_type="binary", static_binary_name="macos", ), @@ -191,14 +191,14 @@ class CI: BuildNames.BINARY_AARCH64: CommonJobConfigs.BUILD.with_properties( build_config=BuildConfig( name=BuildNames.BINARY_AARCH64, - compiler="clang-18-aarch64", + compiler="clang-19-aarch64", package_type="binary", ), ), BuildNames.BINARY_AARCH64_V80COMPAT: CommonJobConfigs.BUILD.with_properties( build_config=BuildConfig( name=BuildNames.BINARY_AARCH64_V80COMPAT, - compiler="clang-18-aarch64-v80compat", + compiler="clang-19-aarch64-v80compat", package_type="binary", static_binary_name="aarch64v80compat", comment="For ARMv8.1 and older", @@ -207,7 +207,7 @@ class CI: BuildNames.BINARY_FREEBSD: CommonJobConfigs.BUILD.with_properties( build_config=BuildConfig( name=BuildNames.BINARY_FREEBSD, - compiler="clang-18-freebsd", + compiler="clang-19-freebsd", package_type="binary", static_binary_name="freebsd", ), @@ -215,7 +215,7 @@ class CI: BuildNames.BINARY_DARWIN_AARCH64: CommonJobConfigs.BUILD.with_properties( build_config=BuildConfig( name=BuildNames.BINARY_DARWIN_AARCH64, - compiler="clang-18-darwin-aarch64", + compiler="clang-19-darwin-aarch64", package_type="binary", static_binary_name="macos-aarch64", ), @@ -223,7 +223,7 @@ class CI: BuildNames.BINARY_PPC64LE: CommonJobConfigs.BUILD.with_properties( build_config=BuildConfig( name=BuildNames.BINARY_PPC64LE, - compiler="clang-18-ppc64le", + compiler="clang-19-ppc64le", package_type="binary", static_binary_name="powerpc64le", ), @@ -231,7 +231,7 @@ class CI: BuildNames.BINARY_AMD64_COMPAT: CommonJobConfigs.BUILD.with_properties( build_config=BuildConfig( name=BuildNames.BINARY_AMD64_COMPAT, - compiler="clang-18-amd64-compat", + compiler="clang-19-amd64-compat", package_type="binary", static_binary_name="amd64compat", comment="SSE2-only build", @@ -240,7 +240,7 @@ class CI: BuildNames.BINARY_AMD64_MUSL: CommonJobConfigs.BUILD.with_properties( build_config=BuildConfig( name=BuildNames.BINARY_AMD64_MUSL, - compiler="clang-18-amd64-musl", + compiler="clang-19-amd64-musl", package_type="binary", static_binary_name="amd64musl", comment="Build with Musl", @@ -249,7 +249,7 @@ class CI: BuildNames.BINARY_RISCV64: CommonJobConfigs.BUILD.with_properties( build_config=BuildConfig( name=BuildNames.BINARY_RISCV64, - compiler="clang-18-riscv64", + compiler="clang-19-riscv64", package_type="binary", static_binary_name="riscv64", ), @@ -257,7 +257,7 @@ class CI: BuildNames.BINARY_S390X: CommonJobConfigs.BUILD.with_properties( build_config=BuildConfig( name=BuildNames.BINARY_S390X, - compiler="clang-18-s390x", + compiler="clang-19-s390x", package_type="binary", static_binary_name="s390x", ), @@ -265,7 +265,7 @@ class CI: BuildNames.BINARY_LOONGARCH64: CommonJobConfigs.BUILD.with_properties( build_config=BuildConfig( name=BuildNames.BINARY_LOONGARCH64, - compiler="clang-18-loongarch64", + compiler="clang-19-loongarch64", package_type="binary", static_binary_name="loongarch64", ), @@ -273,7 +273,7 @@ class CI: BuildNames.FUZZERS: CommonJobConfigs.BUILD.with_properties( build_config=BuildConfig( name=BuildNames.FUZZERS, - compiler="clang-18", + compiler="clang-19", package_type="fuzzers", ), run_by_labels=[Tags.libFuzzer], diff --git a/tests/docker_scripts/fasttest_runner.sh b/tests/docker_scripts/fasttest_runner.sh index b8b72aaf866..2b0e5428e85 100755 --- a/tests/docker_scripts/fasttest_runner.sh +++ b/tests/docker_scripts/fasttest_runner.sh @@ -9,7 +9,7 @@ trap 'kill $(jobs -pr) ||:' EXIT stage=${stage:-} # Compiler version, normally set by Dockerfile -export LLVM_VERSION=${LLVM_VERSION:-18} +export LLVM_VERSION=${LLVM_VERSION:-19} # A variable to pass additional flags to CMake. # Here we explicitly default it to nothing so that bash doesn't complain about From fd5cc97aad58043c6e7ff0ce306721f7119637ee Mon Sep 17 00:00:00 2001 From: Konstantin Bogdanov Date: Tue, 15 Oct 2024 19:33:29 +0200 Subject: [PATCH 06/31] Remove useless requirement --- docker/test/fasttest/requirements.txt | 1 - docker/test/fuzzer/requirements.txt | 1 - docker/test/integration/base/requirements.txt | 1 - docker/test/libfuzzer/requirements.txt | 1 - docker/test/performance-comparison/requirements.txt | 1 - docker/test/sqllogic/requirements.txt | 1 - docker/test/sqltest/requirements.txt | 1 - docker/test/stateless/requirements.txt | 1 - 8 files changed, 8 deletions(-) diff --git a/docker/test/fasttest/requirements.txt b/docker/test/fasttest/requirements.txt index 6fbcf7cccd4..1f8053d07fb 100644 --- a/docker/test/fasttest/requirements.txt +++ b/docker/test/fasttest/requirements.txt @@ -27,7 +27,6 @@ pandas==1.5.3 pip==24.1.1 pipdeptree==2.23.0 pyparsing==2.4.7 -# python-apt==2.4.0+ubuntu3 python-dateutil==2.9.0.post0 pytz==2024.1 requests==2.32.3 diff --git a/docker/test/fuzzer/requirements.txt b/docker/test/fuzzer/requirements.txt index e51da509b27..2e0b7a6c4c3 100644 --- a/docker/test/fuzzer/requirements.txt +++ b/docker/test/fuzzer/requirements.txt @@ -18,7 +18,6 @@ pip==24.1.1 pipdeptree==2.23.0 PyJWT==2.3.0 pyparsing==2.4.7 -# python-apt==2.4.0+ubuntu3 SecretStorage==3.3.1 setuptools==59.6.0 six==1.16.0 diff --git a/docker/test/integration/base/requirements.txt b/docker/test/integration/base/requirements.txt index a8ef35cd13f..99717f467a8 100644 --- a/docker/test/integration/base/requirements.txt +++ b/docker/test/integration/base/requirements.txt @@ -17,7 +17,6 @@ pipdeptree==2.23.0 pycurl==7.45.3 PyJWT==2.3.0 pyparsing==2.4.7 -# python-apt==2.4.0+ubuntu3 SecretStorage==3.3.1 setuptools==59.6.0 six==1.16.0 diff --git a/docker/test/libfuzzer/requirements.txt b/docker/test/libfuzzer/requirements.txt index e51da509b27..2e0b7a6c4c3 100644 --- a/docker/test/libfuzzer/requirements.txt +++ b/docker/test/libfuzzer/requirements.txt @@ -18,7 +18,6 @@ pip==24.1.1 pipdeptree==2.23.0 PyJWT==2.3.0 pyparsing==2.4.7 -# python-apt==2.4.0+ubuntu3 SecretStorage==3.3.1 setuptools==59.6.0 six==1.16.0 diff --git a/docker/test/performance-comparison/requirements.txt b/docker/test/performance-comparison/requirements.txt index 2db604d6829..894a9a58f2a 100644 --- a/docker/test/performance-comparison/requirements.txt +++ b/docker/test/performance-comparison/requirements.txt @@ -19,7 +19,6 @@ pipdeptree==2.23.0 Pygments==2.11.2 PyJWT==2.3.0 pyparsing==2.4.7 -# python-apt==2.4.0+ubuntu3 pytz==2023.4 PyYAML==6.0.1 scipy==1.12.0 diff --git a/docker/test/sqllogic/requirements.txt b/docker/test/sqllogic/requirements.txt index d5091aaa01b..bb2eabc8eb2 100644 --- a/docker/test/sqllogic/requirements.txt +++ b/docker/test/sqllogic/requirements.txt @@ -20,7 +20,6 @@ pipdeptree==2.23.0 PyJWT==2.3.0 pyodbc==5.1.0 pyparsing==2.4.7 -# python-apt==2.4.0+ubuntu3 SecretStorage==3.3.1 setuptools==59.6.0 six==1.16.0 diff --git a/docker/test/sqltest/requirements.txt b/docker/test/sqltest/requirements.txt index e2fe5b34463..13ab18a695a 100644 --- a/docker/test/sqltest/requirements.txt +++ b/docker/test/sqltest/requirements.txt @@ -17,7 +17,6 @@ pip==24.1.1 pipdeptree==2.23.0 PyJWT==2.3.0 pyparsing==2.4.7 -# python-apt==2.4.0+ubuntu3 pytz==2024.1 PyYAML==6.0.1 SecretStorage==3.3.1 diff --git a/docker/test/stateless/requirements.txt b/docker/test/stateless/requirements.txt index af653835614..cae5b805e1b 100644 --- a/docker/test/stateless/requirements.txt +++ b/docker/test/stateless/requirements.txt @@ -34,7 +34,6 @@ pyarrow==15.0.0 pyasn1==0.4.8 PyJWT==2.3.0 pyparsing==2.4.7 -# python-apt==2.4.0+ubuntu3 python-dateutil==2.8.1 pytz==2024.1 PyYAML==6.0.1 From 60d49d769c0e4149048b72c5f759ba83465f58d5 Mon Sep 17 00:00:00 2001 From: Konstantin Bogdanov Date: Tue, 15 Oct 2024 21:47:58 +0200 Subject: [PATCH 07/31] Fix .resize() -> .reserve() --- .../MergeTree/ParallelReplicasReadingCoordinator.cpp | 2 +- src/Storages/MergeTree/ReplicatedMergeTreeCleanupThread.cpp | 3 ++- src/Storages/MergeTree/ReplicatedMergeTreeSink.cpp | 5 ++++- 3 files changed, 7 insertions(+), 3 deletions(-) diff --git a/src/Storages/MergeTree/ParallelReplicasReadingCoordinator.cpp b/src/Storages/MergeTree/ParallelReplicasReadingCoordinator.cpp index 48f3c5ea30a..7d62d92ae6c 100644 --- a/src/Storages/MergeTree/ParallelReplicasReadingCoordinator.cpp +++ b/src/Storages/MergeTree/ParallelReplicasReadingCoordinator.cpp @@ -151,7 +151,7 @@ public: { String result = "Statistics: "; std::vector stats_by_replica; - stats_by_replica.resize(stats.size()); + stats_by_replica.reserve(stats.size()); for (size_t i = 0; i < stats.size(); ++i) stats_by_replica.push_back(fmt::format( "replica {}{} - {{requests: {} marks: {} assigned_to_me: {} stolen_by_hash: {} stolen_unassigned: {}}}", diff --git a/src/Storages/MergeTree/ReplicatedMergeTreeCleanupThread.cpp b/src/Storages/MergeTree/ReplicatedMergeTreeCleanupThread.cpp index 5014155b5b8..ad03995d273 100644 --- a/src/Storages/MergeTree/ReplicatedMergeTreeCleanupThread.cpp +++ b/src/Storages/MergeTree/ReplicatedMergeTreeCleanupThread.cpp @@ -425,7 +425,8 @@ void ReplicatedMergeTreeCleanupThread::markLostReplicas(const std::unordered_map throw Exception(ErrorCodes::ALL_REPLICAS_LOST, "All replicas are stale: we won't mark any replica as lost"); std::vector futures; - futures.resize(candidate_lost_replicas.size()); + futures.reserve(candidate_lost_replicas.size()); + for (size_t i = 0; i < candidate_lost_replicas.size(); ++i) futures.emplace_back(zookeeper->asyncTryMultiNoThrow(requests[i])); diff --git a/src/Storages/MergeTree/ReplicatedMergeTreeSink.cpp b/src/Storages/MergeTree/ReplicatedMergeTreeSink.cpp index 3fec094e19b..bb168893a83 100644 --- a/src/Storages/MergeTree/ReplicatedMergeTreeSink.cpp +++ b/src/Storages/MergeTree/ReplicatedMergeTreeSink.cpp @@ -117,12 +117,15 @@ std::vector testSelfDeduplicate(std::vector data, std::vector result; - result.resize(col->size()); + result.reserve(col->size()); + for (size_t i = 0; i < col->size(); i++) { result.push_back(col->getInt(i)); } + return result; } From 43cf88a86e01174920e7335b3c45e05638470246 Mon Sep 17 00:00:00 2001 From: Konstantin Bogdanov Date: Wed, 16 Oct 2024 01:34:37 +0200 Subject: [PATCH 08/31] Fix tidy errors --- src/Common/IntervalTree.h | 2 +- src/Common/examples/array_cache.cpp | 1 + utils/memcpy-bench/memcpy-bench.cpp | 2 +- 3 files changed, 3 insertions(+), 2 deletions(-) diff --git a/src/Common/IntervalTree.h b/src/Common/IntervalTree.h index db7f5238921..1c24098aadf 100644 --- a/src/Common/IntervalTree.h +++ b/src/Common/IntervalTree.h @@ -633,7 +633,7 @@ private: static const Interval & getInterval(const IntervalWithValue & interval_with_value) { if constexpr (is_empty_value) - return interval_with_value; + return interval_with_value; /// NOLINT(bugprone-return-const-ref-from-parameter) else return interval_with_value.first; } diff --git a/src/Common/examples/array_cache.cpp b/src/Common/examples/array_cache.cpp index c1267d3c9cf..227f14b3bfe 100644 --- a/src/Common/examples/array_cache.cpp +++ b/src/Common/examples/array_cache.cpp @@ -54,6 +54,7 @@ int main(int argc, char ** argv) Cache cache(cache_size); std::vector threads; + threads.reserve(num_threads); for (size_t i = 0; i < num_threads; ++i) { threads.emplace_back([&] diff --git a/utils/memcpy-bench/memcpy-bench.cpp b/utils/memcpy-bench/memcpy-bench.cpp index bb571200d07..6e862daf4ce 100644 --- a/utils/memcpy-bench/memcpy-bench.cpp +++ b/utils/memcpy-bench/memcpy-bench.cpp @@ -70,7 +70,7 @@ uint64_t test(uint8_t * dst, uint8_t * src, size_t size, size_t iterations, size iteration % 2 ? &dst[begin] : &src[begin], end - begin, [rng = RNG(), &generator]() mutable { return generator(rng); }, - std::forward(impl)); + std::forward(impl)); /// NOLINT(bugprone-use-after-move) } }); } From e88fe37d9205edaeeccffd5947226cc092d20568 Mon Sep 17 00:00:00 2001 From: Konstantin Bogdanov Date: Wed, 16 Oct 2024 18:13:53 +0200 Subject: [PATCH 09/31] Fix freebsd build --- src/Common/StackTrace.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/Common/StackTrace.cpp b/src/Common/StackTrace.cpp index f78bacf3e1d..78f5c9fee01 100644 --- a/src/Common/StackTrace.cpp +++ b/src/Common/StackTrace.cpp @@ -389,7 +389,7 @@ constexpr std::pair replacements[] // Demangle @c symbol_name if it's not from __functional header (as such functions don't provide any useful // information but pollute stack traces). // Replace parts from @c replacements with shorter aliases -static String demangleAndCollapseNames(std::optional file, const char * const symbol_name) +String demangleAndCollapseNames(std::optional file, const char * const symbol_name) { if (!symbol_name) return "?"; From 4d0bc462a8ecb187039fd548f4179d2d04d40484 Mon Sep 17 00:00:00 2001 From: Konstantin Bogdanov Date: Wed, 16 Oct 2024 18:18:52 +0200 Subject: [PATCH 10/31] Another batch of clang-tidy fixes --- src/Columns/tests/gtest_column_dump_structure.cpp | 1 + src/Common/ZooKeeper/examples/zkutil_test_async.cpp | 2 ++ src/Common/tests/gtest_log.cpp | 1 + src/IO/tests/gtest_DateTime64_parsing_and_writing.cpp | 2 +- src/IO/tests/gtest_file_encryption.cpp | 2 +- src/Interpreters/tests/gtest_page_cache.cpp | 3 +++ src/Storages/MergeTree/tests/gtest_async_inserts.cpp | 2 +- utils/memcpy-bench/memcpy-bench.cpp | 2 +- 8 files changed, 11 insertions(+), 4 deletions(-) diff --git a/src/Columns/tests/gtest_column_dump_structure.cpp b/src/Columns/tests/gtest_column_dump_structure.cpp index d9647147157..209a1703b88 100644 --- a/src/Columns/tests/gtest_column_dump_structure.cpp +++ b/src/Columns/tests/gtest_column_dump_structure.cpp @@ -13,6 +13,7 @@ TEST(IColumn, dumpStructure) String expected_structure = "LowCardinality(size = 0, UInt8(size = 0), Unique(size = 1, String(size = 1)))"; std::vector threads; + threads.reserve(6); for (size_t i = 0; i < 6; ++i) { threads.emplace_back([&] diff --git a/src/Common/ZooKeeper/examples/zkutil_test_async.cpp b/src/Common/ZooKeeper/examples/zkutil_test_async.cpp index 7a9498542d2..f056c9b97fb 100644 --- a/src/Common/ZooKeeper/examples/zkutil_test_async.cpp +++ b/src/Common/ZooKeeper/examples/zkutil_test_async.cpp @@ -17,6 +17,8 @@ try size_t num_threads = DB::parse(argv[1]); std::vector threads; + threads.reserve(num_threads); + for (size_t i = 0; i < num_threads; ++i) { threads.emplace_back([&] diff --git a/src/Common/tests/gtest_log.cpp b/src/Common/tests/gtest_log.cpp index 6d2bd56ad77..d531ed2b124 100644 --- a/src/Common/tests/gtest_log.cpp +++ b/src/Common/tests/gtest_log.cpp @@ -150,6 +150,7 @@ TEST(Logger, SharedLoggersThreadSafety) size_t loggers_size_before = names.size(); std::vector threads; + threads.reserve(threads_count); for (size_t thread_index = 0; thread_index < threads_count; ++thread_index) { diff --git a/src/IO/tests/gtest_DateTime64_parsing_and_writing.cpp b/src/IO/tests/gtest_DateTime64_parsing_and_writing.cpp index 83c20701353..9c010a6f908 100644 --- a/src/IO/tests/gtest_DateTime64_parsing_and_writing.cpp +++ b/src/IO/tests/gtest_DateTime64_parsing_and_writing.cpp @@ -63,7 +63,7 @@ TEST_P(DateTime64StringWriteTest, WriteText) EXPECT_NO_THROW(writeDateTimeText(param.dt64, param.scale, write_buffer, param.timezone)); write_buffer.finalize(); - EXPECT_STREQ(param.string.data(), actual_string.data()); + EXPECT_STREQ(param.string.data(), actual_string.data()); /// NOLINT(bugprone-suspicious-stringview-data-usage) } TEST_P(DateTime64StringParseBestEffortTest, parse) diff --git a/src/IO/tests/gtest_file_encryption.cpp b/src/IO/tests/gtest_file_encryption.cpp index 2b3d7ce81c5..18fe7a27a97 100644 --- a/src/IO/tests/gtest_file_encryption.cpp +++ b/src/IO/tests/gtest_file_encryption.cpp @@ -117,7 +117,7 @@ TEST_P(FileEncryptionCipherTest, Encryption) { WriteBufferFromOwnString buf; encryptor.setOffset(base_offset); - encryptor.encrypt(input.data(), i, buf); + encryptor.encrypt(input.data(), i, buf); /// NOLINT(bugprone-suspicious-stringview-data-usage) ASSERT_EQ(expected.substr(0, i), buf.str()); } } diff --git a/src/Interpreters/tests/gtest_page_cache.cpp b/src/Interpreters/tests/gtest_page_cache.cpp index 30fa3b921c9..a81ef5e7384 100644 --- a/src/Interpreters/tests/gtest_page_cache.cpp +++ b/src/Interpreters/tests/gtest_page_cache.cpp @@ -146,6 +146,7 @@ TEST(PageCache, DISABLED_Stress) auto start_time = std::chrono::steady_clock::now(); std::vector threads; + threads.reserve(num_threads); for (size_t i = 0; i < num_threads; ++i) threads.emplace_back(thread_func); @@ -247,6 +248,8 @@ TEST(PageCache, DISABLED_HitsBench) auto start_time = std::chrono::steady_clock::now(); std::vector threads; + threads.reserve(num_threads); + for (size_t i = 0; i < num_threads; ++i) threads.emplace_back(thread_func); diff --git a/src/Storages/MergeTree/tests/gtest_async_inserts.cpp b/src/Storages/MergeTree/tests/gtest_async_inserts.cpp index 2d8cd0acc3e..ae96b916000 100644 --- a/src/Storages/MergeTree/tests/gtest_async_inserts.cpp +++ b/src/Storages/MergeTree/tests/gtest_async_inserts.cpp @@ -28,7 +28,7 @@ TEST(AsyncInsertsTest, testScatterOffsetsBySelector) ASSERT_EQ(results.size(), expected.size()); for (size_t i = 0; i < results.size(); i++) { - auto result = results[i]; + const auto & result = results[i]; auto expect = expected[i]; ASSERT_EQ(result->offsets.size(), expect.size()); ASSERT_EQ(result->tokens.size(), expect.size()); diff --git a/utils/memcpy-bench/memcpy-bench.cpp b/utils/memcpy-bench/memcpy-bench.cpp index 6e862daf4ce..3025ba6e854 100644 --- a/utils/memcpy-bench/memcpy-bench.cpp +++ b/utils/memcpy-bench/memcpy-bench.cpp @@ -70,7 +70,7 @@ uint64_t test(uint8_t * dst, uint8_t * src, size_t size, size_t iterations, size iteration % 2 ? &dst[begin] : &src[begin], end - begin, [rng = RNG(), &generator]() mutable { return generator(rng); }, - std::forward(impl)); /// NOLINT(bugprone-use-after-move) + std::forward(impl)); /// NOLINT(bugprone-use-after-move,hicpp-invalid-access-moved) } }); } From 097792e9dc710920b1bbb2ec69fb9307b6d868d3 Mon Sep 17 00:00:00 2001 From: Konstantin Bogdanov Date: Wed, 16 Oct 2024 20:24:45 +0200 Subject: [PATCH 11/31] Poke CI From d80735a6974761d9b124221ca99a191180022e66 Mon Sep 17 00:00:00 2001 From: Konstantin Bogdanov Date: Wed, 16 Oct 2024 20:55:30 +0200 Subject: [PATCH 12/31] Poke CI From 0bf406d17f35e00c11d5a896f14e15f63273f4ac Mon Sep 17 00:00:00 2001 From: Konstantin Bogdanov Date: Thu, 17 Oct 2024 02:26:36 +0200 Subject: [PATCH 13/31] Try to use lld for riscv build --- cmake/linux/toolchain-riscv64.cmake | 7 +------ 1 file changed, 1 insertion(+), 6 deletions(-) diff --git a/cmake/linux/toolchain-riscv64.cmake b/cmake/linux/toolchain-riscv64.cmake index ae5a38f08eb..41e6cdb05a2 100644 --- a/cmake/linux/toolchain-riscv64.cmake +++ b/cmake/linux/toolchain-riscv64.cmake @@ -17,9 +17,4 @@ set (CMAKE_C_FLAGS "${CMAKE_C_FLAGS} --gcc-toolchain=${TOOLCHAIN_PATH}") set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} --gcc-toolchain=${TOOLCHAIN_PATH}") set (CMAKE_ASM_FLAGS "${CMAKE_ASM_FLAGS} --gcc-toolchain=${TOOLCHAIN_PATH}") -set (CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -fuse-ld=bfd") - -# Currently, lld does not work with the error: -# ld.lld: error: section size decrease is too large -# But GNU BinUtils work. -set (LINKER_NAME "riscv64-linux-gnu-ld.bfd" CACHE STRING "Linker name" FORCE) +set (CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -fuse-ld=lld") From b0fc8549cc8805875dc190d2485def86afc3f60c Mon Sep 17 00:00:00 2001 From: Konstantin Bogdanov Date: Thu, 17 Oct 2024 03:24:19 +0200 Subject: [PATCH 14/31] Another batch of clang-tidy fixes --- src/IO/tests/gtest_file_encryption.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/IO/tests/gtest_file_encryption.cpp b/src/IO/tests/gtest_file_encryption.cpp index 18fe7a27a97..47040a749d1 100644 --- a/src/IO/tests/gtest_file_encryption.cpp +++ b/src/IO/tests/gtest_file_encryption.cpp @@ -151,7 +151,7 @@ TEST_P(FileEncryptionCipherTest, Decryption) for (size_t i = 0; i <= expected.size(); ++i) { encryptor.setOffset(base_offset); - encryptor.decrypt(input.data(), i, buf.data()); + encryptor.decrypt(input.data(), i, buf.data()); /// NOINT(bugprone-suspicious-stringview-data-usage) ASSERT_EQ(expected.substr(0, i), buf.substr(0, i)); } } From 2ed06b791b71ae21165474194fc0d717e3be1e8f Mon Sep 17 00:00:00 2001 From: Konstantin Bogdanov Date: Thu, 17 Oct 2024 18:25:01 +0200 Subject: [PATCH 15/31] Typo --- src/IO/tests/gtest_file_encryption.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/IO/tests/gtest_file_encryption.cpp b/src/IO/tests/gtest_file_encryption.cpp index 47040a749d1..28f427c3a31 100644 --- a/src/IO/tests/gtest_file_encryption.cpp +++ b/src/IO/tests/gtest_file_encryption.cpp @@ -151,7 +151,7 @@ TEST_P(FileEncryptionCipherTest, Decryption) for (size_t i = 0; i <= expected.size(); ++i) { encryptor.setOffset(base_offset); - encryptor.decrypt(input.data(), i, buf.data()); /// NOINT(bugprone-suspicious-stringview-data-usage) + encryptor.decrypt(input.data(), i, buf.data()); /// NOLINT(bugprone-suspicious-stringview-data-usage) ASSERT_EQ(expected.substr(0, i), buf.substr(0, i)); } } From b4bdf5bbcfa09baaf81637b35ca7a35a51f6d44b Mon Sep 17 00:00:00 2001 From: Konstantin Bogdanov Date: Thu, 17 Oct 2024 23:24:13 +0200 Subject: [PATCH 16/31] Typos --- src/Daemon/BaseDaemon.cpp | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/Daemon/BaseDaemon.cpp b/src/Daemon/BaseDaemon.cpp index 8a8dd3c759c..7929f08fd58 100644 --- a/src/Daemon/BaseDaemon.cpp +++ b/src/Daemon/BaseDaemon.cpp @@ -630,8 +630,8 @@ void BaseDaemon::setupWatchdog() logger().setChannel(log); } - /// Cuncurrent writing logs to the same file from two threads is questionable on its own, - /// but rotating them from two threads is disastrous. + /// Concurrent writing logs to the same file from two threads is questionable on its own, + /// but rotating them from two threads is disastrous. if (auto * channel = dynamic_cast(logger().getChannel())) { channel->setChannelProperty("log", Poco::FileChannel::PROP_ROTATION, "never"); From 21be2d60ba9b0e951adb4510e0f57bffa2464908 Mon Sep 17 00:00:00 2001 From: Konstantin Bogdanov Date: Fri, 18 Oct 2024 03:09:47 +0200 Subject: [PATCH 17/31] Remove SIGPIPE from deadly signals --- src/Common/SignalHandlers.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/Common/SignalHandlers.cpp b/src/Common/SignalHandlers.cpp index 08261fb1cc1..8c623786f41 100644 --- a/src/Common/SignalHandlers.cpp +++ b/src/Common/SignalHandlers.cpp @@ -632,7 +632,7 @@ void HandledSignals::setupCommonDeadlySignalHandlers() { /// SIGTSTP is added for debugging purposes. To output a stack trace of any running thread at anytime. /// NOTE: that it is also used by clickhouse-test wrapper - addSignalHandler({SIGABRT, SIGSEGV, SIGILL, SIGBUS, SIGSYS, SIGFPE, SIGPIPE, SIGTSTP, SIGTRAP}, signalHandler, true); + addSignalHandler({SIGABRT, SIGSEGV, SIGILL, SIGBUS, SIGSYS, SIGFPE, SIGTSTP, SIGTRAP}, signalHandler, true); #if defined(SANITIZER) __sanitizer_set_death_callback(sanitizerDeathCallback); From e6522772792bebab66787b680b2d4efa0507ff0d Mon Sep 17 00:00:00 2001 From: Konstantin Bogdanov Date: Tue, 19 Nov 2024 16:53:10 +0100 Subject: [PATCH 18/31] Fix CI definition --- tests/ci/ci_config.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/ci/ci_config.py b/tests/ci/ci_config.py index 3f80c7969b4..1f4448b47d9 100644 --- a/tests/ci/ci_config.py +++ b/tests/ci/ci_config.py @@ -106,7 +106,7 @@ class CI: BuildNames.PACKAGE_AARCH64_ASAN: CommonJobConfigs.BUILD.with_properties( build_config=BuildConfig( name=BuildNames.PACKAGE_AARCH64_ASAN, - compiler="clang-18-aarch64", + compiler="clang-19-aarch64", sanitizer="address", package_type="deb", ), From 7db373a6a5a520ddc3ad1a9ee3359e880ae1838b Mon Sep 17 00:00:00 2001 From: Konstantin Bogdanov Date: Wed, 20 Nov 2024 14:10:42 +0100 Subject: [PATCH 19/31] Fix tidy --- ci/docker/fasttest/Dockerfile | 2 +- src/Coordination/KeeperServer.cpp | 2 +- src/Interpreters/FillingRow.cpp | 8 ++++---- src/Interpreters/HashJoin/HashJoin.cpp | 2 ++ .../MergeTree/MergeSelectors/SimpleMergeSelector.cpp | 6 ++++-- 5 files changed, 12 insertions(+), 8 deletions(-) diff --git a/ci/docker/fasttest/Dockerfile b/ci/docker/fasttest/Dockerfile index 66e48b163b8..7920b986d15 100644 --- a/ci/docker/fasttest/Dockerfile +++ b/ci/docker/fasttest/Dockerfile @@ -5,7 +5,7 @@ FROM ubuntu:22.04 ARG apt_archive="http://archive.ubuntu.com" RUN sed -i "s|http://archive.ubuntu.com|$apt_archive|g" /etc/apt/sources.list -ENV DEBIAN_FRONTEND=noninteractive LLVM_VERSION=18 +ENV DEBIAN_FRONTEND=noninteractive LLVM_VERSION=19 RUN apt-get update \ && apt-get install \ diff --git a/src/Coordination/KeeperServer.cpp b/src/Coordination/KeeperServer.cpp index f5f11e10a84..adad68da728 100644 --- a/src/Coordination/KeeperServer.cpp +++ b/src/Coordination/KeeperServer.cpp @@ -531,7 +531,7 @@ void KeeperServer::launchRaftServer(const Poco::Util::AbstractConfiguration & co nuraft::raft_server::limits raft_limits; raft_limits.reconnect_limit_ = getValueOrMaxInt32AndLogWarning(coordination_settings[CoordinationSetting::raft_limits_reconnect_limit], "raft_limits_reconnect_limit", log); raft_limits.response_limit_ = getValueOrMaxInt32AndLogWarning(coordination_settings[CoordinationSetting::raft_limits_response_limit], "response_limit", log); - raft_instance->set_raft_limits(raft_limits); + KeeperRaftServer::set_raft_limits(raft_limits); raft_instance->start_server(init_options.skip_initial_election_timeout_); diff --git a/src/Interpreters/FillingRow.cpp b/src/Interpreters/FillingRow.cpp index 384ad669206..95889c4f9b3 100644 --- a/src/Interpreters/FillingRow.cpp +++ b/src/Interpreters/FillingRow.cpp @@ -125,15 +125,15 @@ bool FillingRow::isConstraintsSatisfied(size_t pos) const static const Field & findBorder(const Field & constraint, const Field & next_original, int direction) { if (constraint.isNull()) - return next_original; + return next_original; /// NOLINT(bugprone-return-const-ref-from-parameter) if (next_original.isNull()) - return constraint; + return constraint; /// NOLINT(bugprone-return-const-ref-from-parameter) if (less(constraint, next_original, direction)) - return constraint; + return constraint; /// NOLINT(bugprone-return-const-ref-from-parameter) - return next_original; + return next_original; /// NOLINT(bugprone-return-const-ref-from-parameter) } bool FillingRow::next(const FillingRow & next_original_row, bool& value_changed) diff --git a/src/Interpreters/HashJoin/HashJoin.cpp b/src/Interpreters/HashJoin/HashJoin.cpp index 219a079425e..21885d4fab6 100644 --- a/src/Interpreters/HashJoin/HashJoin.cpp +++ b/src/Interpreters/HashJoin/HashJoin.cpp @@ -1069,6 +1069,8 @@ void HashJoin::joinBlock(ScatteredBlock & block, ScatteredBlock & remaining_bloc } std::vectormaps[0])> *> maps_vector; + maps_vector.reserve(table_join->getClauses().size()); + for (size_t i = 0; i < table_join->getClauses().size(); ++i) maps_vector.push_back(&data->maps[i]); diff --git a/src/Storages/MergeTree/MergeSelectors/SimpleMergeSelector.cpp b/src/Storages/MergeTree/MergeSelectors/SimpleMergeSelector.cpp index 4f786215cbe..d7e47fb860b 100644 --- a/src/Storages/MergeTree/MergeSelectors/SimpleMergeSelector.cpp +++ b/src/Storages/MergeTree/MergeSelectors/SimpleMergeSelector.cpp @@ -166,11 +166,13 @@ size_t calculateRangeWithStochasticSliding(size_t parts_count, size_t parts_thre { auto mean = static_cast(parts_count); std::normal_distribution distribution{mean, mean / 4}; + size_t right_boundary = static_cast(distribution(thread_local_rng)); if (right_boundary > parts_count) right_boundary = 2 * parts_count - right_boundary; - if (right_boundary < parts_threshold) - right_boundary = parts_threshold; + + right_boundary = std::max(right_boundary, parts_threshold); + return right_boundary - parts_threshold; } From b8a525f61f6a67ae1a281fdfb1fddb01faf2cefe Mon Sep 17 00:00:00 2001 From: Konstantin Bogdanov Date: Wed, 20 Nov 2024 17:15:57 +0100 Subject: [PATCH 20/31] Fix LLVM APT version --- ci/docker/fasttest/Dockerfile | 8 ++------ docker/test/fasttest/Dockerfile | 4 ---- docker/test/util/Dockerfile | 4 ++-- 3 files changed, 4 insertions(+), 12 deletions(-) diff --git a/ci/docker/fasttest/Dockerfile b/ci/docker/fasttest/Dockerfile index 7920b986d15..398229c0f88 100644 --- a/ci/docker/fasttest/Dockerfile +++ b/ci/docker/fasttest/Dockerfile @@ -5,7 +5,7 @@ FROM ubuntu:22.04 ARG apt_archive="http://archive.ubuntu.com" RUN sed -i "s|http://archive.ubuntu.com|$apt_archive|g" /etc/apt/sources.list -ENV DEBIAN_FRONTEND=noninteractive LLVM_VERSION=19 +ENV DEBIAN_FRONTEND=noninteractive LLVM_VERSION=19 LLVM_APT_VERSION="1:19.1.4~++20241119083634+aadaa00de76e-1~exp1~20241119083649.64" RUN apt-get update \ && apt-get install \ @@ -26,7 +26,7 @@ RUN apt-get update \ && echo "deb https://apt.llvm.org/${CODENAME}/ llvm-toolchain-${CODENAME}-${LLVM_VERSION} main" >> \ /etc/apt/sources.list \ && apt-get update \ - && apt-get install --yes --no-install-recommends --verbose-versions llvm-${LLVM_VERSION} \ + && apt-get install --yes --no-install-recommends --verbose-versions llvm-${LLVM_VERSION}=${LLVM_APT_VERSION} \ && apt-get clean \ && rm -rf /var/lib/apt/lists/* /var/cache/debconf /tmp/* @@ -72,10 +72,6 @@ RUN ln -s /usr/bin/lld-${LLVM_VERSION} /usr/bin/ld.lld # https://salsa.debian.org/pkg-llvm-team/llvm-toolchain/-/commit/992e52c0b156a5ba9c6a8a54f8c4857ddd3d371d RUN sed -i '/_IMPORT_CHECK_FILES_FOR_\(mlir-\|llvm-bolt\|merge-fdata\|MLIR\)/ {s|^|#|}' /usr/lib/llvm-${LLVM_VERSION}/lib/cmake/llvm/LLVMExports-*.cmake -# LLVM changes paths for compiler-rt libraries. For some reason clang-18.1.8 cannot catch up libraries from default install path. -# It's very dirty workaround, better to build compiler and LLVM ourself and use it. Details: https://github.com/llvm/llvm-project/issues/95792 -RUN test ! -d /usr/lib/llvm-18/lib/clang/18/lib/x86_64-pc-linux-gnu || ln -s /usr/lib/llvm-18/lib/clang/18/lib/x86_64-pc-linux-gnu /usr/lib/llvm-18/lib/clang/18/lib/x86_64-unknown-linux-gnu - ARG TARGETARCH ARG SCCACHE_VERSION=v0.7.7 ENV SCCACHE_IGNORE_SERVER_IO_ERROR=1 diff --git a/docker/test/fasttest/Dockerfile b/docker/test/fasttest/Dockerfile index 703ab54c242..318f437a108 100644 --- a/docker/test/fasttest/Dockerfile +++ b/docker/test/fasttest/Dockerfile @@ -40,10 +40,6 @@ RUN ln -s /usr/bin/lld-${LLVM_VERSION} /usr/bin/ld.lld # https://salsa.debian.org/pkg-llvm-team/llvm-toolchain/-/commit/992e52c0b156a5ba9c6a8a54f8c4857ddd3d371d RUN sed -i '/_IMPORT_CHECK_FILES_FOR_\(mlir-\|llvm-bolt\|merge-fdata\|MLIR\)/ {s|^|#|}' /usr/lib/llvm-${LLVM_VERSION}/lib/cmake/llvm/LLVMExports-*.cmake -# LLVM changes paths for compiler-rt libraries. For some reason clang-18.1.8 cannot catch up libraries from default install path. -# It's very dirty workaround, better to build compiler and LLVM ourself and use it. Details: https://github.com/llvm/llvm-project/issues/95792 -RUN test ! -d /usr/lib/llvm-19/lib/clang/19/lib/x86_64-pc-linux-gnu || ln -s /usr/lib/llvm-19/lib/clang/19/lib/x86_64-pc-linux-gnu /usr/lib/llvm-19/lib/clang/19/lib/x86_64-unknown-linux-gnu - ARG CCACHE_VERSION=4.6.1 RUN mkdir /tmp/ccache \ && cd /tmp/ccache \ diff --git a/docker/test/util/Dockerfile b/docker/test/util/Dockerfile index fe762060710..6fa54848204 100644 --- a/docker/test/util/Dockerfile +++ b/docker/test/util/Dockerfile @@ -5,7 +5,7 @@ FROM ubuntu:22.04 ARG apt_archive="http://archive.ubuntu.com" RUN sed -i "s|http://archive.ubuntu.com|$apt_archive|g" /etc/apt/sources.list -ENV DEBIAN_FRONTEND=noninteractive LLVM_VERSION=19 +ENV DEBIAN_FRONTEND=noninteractive LLVM_VERSION=19 LLVM_APT_VERSION="1:19.1.4~++20241119083634+aadaa00de76e-1~exp1~20241119083649.64" RUN apt-get update \ && apt-get install \ @@ -28,7 +28,7 @@ RUN apt-get update \ && echo "deb https://apt.llvm.org/${CODENAME}/ llvm-toolchain-${CODENAME}-${LLVM_VERSION} main" >> \ /etc/apt/sources.list \ && apt-get update \ - && apt-get install --yes --no-install-recommends --verbose-versions llvm-${LLVM_VERSION} \ + && apt-get install --yes --no-install-recommends --verbose-versions llvm-${LLVM_VERSION}=${LLVM_APT_VERSION} \ && apt-get clean \ && rm -rf /var/lib/apt/lists/* /var/cache/debconf /tmp/* From 0aeb16a8c1a4c9994074b9bb5e0a5f5a83b780be Mon Sep 17 00:00:00 2001 From: Konstantin Bogdanov Date: Wed, 20 Nov 2024 17:28:40 +0100 Subject: [PATCH 21/31] Fix LLVM APT version better --- ci/docker/fasttest/Dockerfile | 3 ++- docker/test/util/Dockerfile | 3 ++- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/ci/docker/fasttest/Dockerfile b/ci/docker/fasttest/Dockerfile index 398229c0f88..749d6a0034d 100644 --- a/ci/docker/fasttest/Dockerfile +++ b/ci/docker/fasttest/Dockerfile @@ -4,8 +4,9 @@ FROM ubuntu:22.04 # ARG for quick switch to a given ubuntu mirror ARG apt_archive="http://archive.ubuntu.com" RUN sed -i "s|http://archive.ubuntu.com|$apt_archive|g" /etc/apt/sources.list +ARG LLVM_APT_VERSION="1:19.1.4~++20241119083512+aadaa00de76e-1~exp1~20241119083527.63" -ENV DEBIAN_FRONTEND=noninteractive LLVM_VERSION=19 LLVM_APT_VERSION="1:19.1.4~++20241119083634+aadaa00de76e-1~exp1~20241119083649.64" +ENV DEBIAN_FRONTEND=noninteractive LLVM_VERSION=19 RUN apt-get update \ && apt-get install \ diff --git a/docker/test/util/Dockerfile b/docker/test/util/Dockerfile index 6fa54848204..87340ee415d 100644 --- a/docker/test/util/Dockerfile +++ b/docker/test/util/Dockerfile @@ -4,8 +4,9 @@ FROM ubuntu:22.04 # ARG for quick switch to a given ubuntu mirror ARG apt_archive="http://archive.ubuntu.com" RUN sed -i "s|http://archive.ubuntu.com|$apt_archive|g" /etc/apt/sources.list +ARG LLVM_APT_VERSION="1:19.1.4~++20241119083512+aadaa00de76e-1~exp1~20241119083527.63" -ENV DEBIAN_FRONTEND=noninteractive LLVM_VERSION=19 LLVM_APT_VERSION="1:19.1.4~++20241119083634+aadaa00de76e-1~exp1~20241119083649.64" +ENV DEBIAN_FRONTEND=noninteractive LLVM_VERSION=19 RUN apt-get update \ && apt-get install \ From 9cc9733e728d0d9094d3bf8ecc3ed4b91437258e Mon Sep 17 00:00:00 2001 From: Konstantin Bogdanov Date: Wed, 20 Nov 2024 17:38:45 +0100 Subject: [PATCH 22/31] Use a glob in LLVM APT version --- ci/docker/fasttest/Dockerfile | 2 +- docker/test/util/Dockerfile | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/ci/docker/fasttest/Dockerfile b/ci/docker/fasttest/Dockerfile index 749d6a0034d..5e3fc562219 100644 --- a/ci/docker/fasttest/Dockerfile +++ b/ci/docker/fasttest/Dockerfile @@ -4,7 +4,7 @@ FROM ubuntu:22.04 # ARG for quick switch to a given ubuntu mirror ARG apt_archive="http://archive.ubuntu.com" RUN sed -i "s|http://archive.ubuntu.com|$apt_archive|g" /etc/apt/sources.list -ARG LLVM_APT_VERSION="1:19.1.4~++20241119083512+aadaa00de76e-1~exp1~20241119083527.63" +ARG LLVM_APT_VERSION="1:19.1.4~*" ENV DEBIAN_FRONTEND=noninteractive LLVM_VERSION=19 diff --git a/docker/test/util/Dockerfile b/docker/test/util/Dockerfile index 87340ee415d..9617c178593 100644 --- a/docker/test/util/Dockerfile +++ b/docker/test/util/Dockerfile @@ -4,7 +4,7 @@ FROM ubuntu:22.04 # ARG for quick switch to a given ubuntu mirror ARG apt_archive="http://archive.ubuntu.com" RUN sed -i "s|http://archive.ubuntu.com|$apt_archive|g" /etc/apt/sources.list -ARG LLVM_APT_VERSION="1:19.1.4~++20241119083512+aadaa00de76e-1~exp1~20241119083527.63" +ARG LLVM_APT_VERSION="1:19.1.4~*" ENV DEBIAN_FRONTEND=noninteractive LLVM_VERSION=19 From 447fb34a59b389da6be3c46cfd21127bb5e8c314 Mon Sep 17 00:00:00 2001 From: Konstantin Bogdanov Date: Wed, 20 Nov 2024 18:04:18 +0100 Subject: [PATCH 23/31] Use equal-or-higher in LLVM APT version --- ci/docker/fasttest/Dockerfile | 2 +- docker/test/util/Dockerfile | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/ci/docker/fasttest/Dockerfile b/ci/docker/fasttest/Dockerfile index 5e3fc562219..41aeb38619c 100644 --- a/ci/docker/fasttest/Dockerfile +++ b/ci/docker/fasttest/Dockerfile @@ -27,7 +27,7 @@ RUN apt-get update \ && echo "deb https://apt.llvm.org/${CODENAME}/ llvm-toolchain-${CODENAME}-${LLVM_VERSION} main" >> \ /etc/apt/sources.list \ && apt-get update \ - && apt-get install --yes --no-install-recommends --verbose-versions llvm-${LLVM_VERSION}=${LLVM_APT_VERSION} \ + && apt-get install --yes --no-install-recommends --verbose-versions llvm-${LLVM_VERSION}>=${LLVM_APT_VERSION} \ && apt-get clean \ && rm -rf /var/lib/apt/lists/* /var/cache/debconf /tmp/* diff --git a/docker/test/util/Dockerfile b/docker/test/util/Dockerfile index 9617c178593..75ca3448b2a 100644 --- a/docker/test/util/Dockerfile +++ b/docker/test/util/Dockerfile @@ -29,7 +29,7 @@ RUN apt-get update \ && echo "deb https://apt.llvm.org/${CODENAME}/ llvm-toolchain-${CODENAME}-${LLVM_VERSION} main" >> \ /etc/apt/sources.list \ && apt-get update \ - && apt-get install --yes --no-install-recommends --verbose-versions llvm-${LLVM_VERSION}=${LLVM_APT_VERSION} \ + && apt-get install --yes --no-install-recommends --verbose-versions llvm-${LLVM_VERSION}>=${LLVM_APT_VERSION} \ && apt-get clean \ && rm -rf /var/lib/apt/lists/* /var/cache/debconf /tmp/* From f37989f3d88d4bf11d991f72f19a8f1f4dd2ea59 Mon Sep 17 00:00:00 2001 From: Konstantin Bogdanov Date: Wed, 20 Nov 2024 23:09:34 +0100 Subject: [PATCH 24/31] Fix tidy --- src/Processors/tests/gtest_write_parquet_page_index.cpp | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/Processors/tests/gtest_write_parquet_page_index.cpp b/src/Processors/tests/gtest_write_parquet_page_index.cpp index 591b794a53f..630499ca874 100644 --- a/src/Processors/tests/gtest_write_parquet_page_index.cpp +++ b/src/Processors/tests/gtest_write_parquet_page_index.cpp @@ -217,6 +217,7 @@ TEST(Parquet, WriteParquetPageIndexParrelelPlainEnconding) std::vector> values; std::vector col; + col.reserve(100000); for (size_t i = 0; i < 100000; i++) { col.push_back(std::to_string(i)); @@ -293,6 +294,7 @@ TEST(Parquet, WriteParquetPageIndexSingleThread) std::vector> values; std::vector col; + col.reserve(1000); for (size_t i = 0; i < 1000; i++) { col.push_back(i % 10); From 9fc4f11c09593ab47985aa61803fb9c4ccae8cdc Mon Sep 17 00:00:00 2001 From: Konstantin Bogdanov Date: Thu, 21 Nov 2024 10:53:08 +0100 Subject: [PATCH 25/31] Poke CI From bd6f644b50dcd2d2c88b0f2234be8d21fc6dd751 Mon Sep 17 00:00:00 2001 From: Konstantin Bogdanov Date: Thu, 21 Nov 2024 12:23:12 +0100 Subject: [PATCH 26/31] Poke CI From 05076afc525cfc9e14d51318760d8c0038790120 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ra=C3=BAl=20Mar=C3=ADn?= Date: Thu, 21 Nov 2024 12:47:03 +0100 Subject: [PATCH 27/31] Ignore throwIf in 00002_log_and_exception_messages_formatting --- .../00002_log_and_exception_messages_formatting.sql | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/queries/0_stateless/00002_log_and_exception_messages_formatting.sql b/tests/queries/0_stateless/00002_log_and_exception_messages_formatting.sql index 917789aec10..20e2cf57371 100644 --- a/tests/queries/0_stateless/00002_log_and_exception_messages_formatting.sql +++ b/tests/queries/0_stateless/00002_log_and_exception_messages_formatting.sql @@ -62,13 +62,13 @@ SELECT WHERE length(message_format_string) = 0 AND (message like '%DB::Exception%' or message like '%Coordination::Exception%') - AND message not like '% Received from %' and message not like '%(SYNTAX_ERROR)%' and message not like '%Fault injection%' + AND message not like '% Received from %' and message not like '%(SYNTAX_ERROR)%' and message not like '%Fault injection%' and message not like '%throwIf%' GROUP BY message ORDER BY c LIMIT 10 )) FROM logs WHERE (message like '%DB::Exception%' or message like '%Coordination::Exception%') - AND message not like '% Received from %' and message not like '%(SYNTAX_ERROR)%' and message not like '%Fault injection%'; + AND message not like '% Received from %' and message not like '%(SYNTAX_ERROR)%' and message not like '%Fault injection%' and message not like '%throwIf%'; -- FIXME some of the following messages are not informative and it has to be fixed From 9c542799d83116eb620bdc283fb4efd283e80fa4 Mon Sep 17 00:00:00 2001 From: Max Kainov Date: Fri, 15 Nov 2024 12:32:48 +0100 Subject: [PATCH 28/31] CI: Enable Stress Tests with praktika --- .github/workflows/pr.yaml | 701 --------------------------- ci/jobs/functional_stateful_tests.py | 4 +- ci/praktika/json.html | 2 +- ci/praktika/native_jobs.py | 6 +- ci/workflows/pull_request.py | 54 ++- packages/build | 2 +- tests/ci/report.py | 8 +- tests/ci/stress_check.py | 3 +- 8 files changed, 51 insertions(+), 729 deletions(-) delete mode 100644 .github/workflows/pr.yaml diff --git a/.github/workflows/pr.yaml b/.github/workflows/pr.yaml deleted file mode 100644 index 925745b9dc0..00000000000 --- a/.github/workflows/pr.yaml +++ /dev/null @@ -1,701 +0,0 @@ -# generated by praktika - -name: PR - -on: - pull_request: - branches: ['master'] - -# Cancel the previous wf run in PRs. -concurrency: - group: ${{ github.workflow }}-${{ github.ref }} - cancel-in-progress: true - -env: - # Force the stdout and stderr streams to be unbuffered - PYTHONUNBUFFERED: 1 - GH_TOKEN: ${{ github.token }} - -# Allow updating GH commit statuses and PR comments to post an actual job reports link -permissions: write-all - -jobs: - - config_workflow: - runs-on: [ci_services] - needs: [] - name: "Config Workflow" - outputs: - data: ${{ steps.run.outputs.DATA }} - steps: - - name: Checkout code - uses: actions/checkout@v4 - with: - ref: ${{ github.head_ref }} - - - name: Prepare env script - run: | - cat > /tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' - export PYTHONPATH=./ci:. - - cat > /tmp/praktika/workflow_config_pr.json << 'EOF' - ${{ needs.config_workflow.outputs.data }} - EOF - cat > /tmp/praktika/workflow_status.json << 'EOF' - ${{ toJson(needs) }} - EOF - ENV_SETUP_SCRIPT_EOF - - rm -rf /tmp/praktika/input /tmp/praktika/output /tmp/praktika - mkdir -p /tmp/praktika /tmp/praktika/input /tmp/praktika/output - - - name: Run - id: run - run: | - . /tmp/praktika_setup_env.sh - set -o pipefail - if command -v ts &> /dev/null; then - python3 -m praktika run --job '''Config Workflow''' --workflow "PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee /tmp/praktika/praktika_run.log - else - python3 -m praktika run --job '''Config Workflow''' --workflow "PR" --ci |& tee /tmp/praktika/praktika_run.log - fi - - docker_builds: - runs-on: [ci_services_ebs] - needs: [config_workflow] - if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'RG9ja2VyIEJ1aWxkcw==') }} - name: "Docker Builds" - outputs: - data: ${{ steps.run.outputs.DATA }} - steps: - - name: Checkout code - uses: actions/checkout@v4 - with: - ref: ${{ github.head_ref }} - - - name: Prepare env script - run: | - cat > /tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' - export PYTHONPATH=./ci:. - - cat > /tmp/praktika/workflow_config_pr.json << 'EOF' - ${{ needs.config_workflow.outputs.data }} - EOF - cat > /tmp/praktika/workflow_status.json << 'EOF' - ${{ toJson(needs) }} - EOF - ENV_SETUP_SCRIPT_EOF - - rm -rf /tmp/praktika/input /tmp/praktika/output /tmp/praktika - mkdir -p /tmp/praktika /tmp/praktika/input /tmp/praktika/output - - - name: Run - id: run - run: | - . /tmp/praktika_setup_env.sh - set -o pipefail - if command -v ts &> /dev/null; then - python3 -m praktika run --job '''Docker Builds''' --workflow "PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee /tmp/praktika/praktika_run.log - else - python3 -m praktika run --job '''Docker Builds''' --workflow "PR" --ci |& tee /tmp/praktika/praktika_run.log - fi - - style_check: - runs-on: [ci_services] - needs: [config_workflow, docker_builds] - if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'U3R5bGUgQ2hlY2s=') }} - name: "Style Check" - outputs: - data: ${{ steps.run.outputs.DATA }} - steps: - - name: Checkout code - uses: actions/checkout@v4 - with: - ref: ${{ github.head_ref }} - - - name: Prepare env script - run: | - cat > /tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' - export PYTHONPATH=./ci:. - - cat > /tmp/praktika/workflow_config_pr.json << 'EOF' - ${{ needs.config_workflow.outputs.data }} - EOF - cat > /tmp/praktika/workflow_status.json << 'EOF' - ${{ toJson(needs) }} - EOF - ENV_SETUP_SCRIPT_EOF - - rm -rf /tmp/praktika/input /tmp/praktika/output /tmp/praktika - mkdir -p /tmp/praktika /tmp/praktika/input /tmp/praktika/output - - - name: Run - id: run - run: | - . /tmp/praktika_setup_env.sh - set -o pipefail - if command -v ts &> /dev/null; then - python3 -m praktika run --job '''Style Check''' --workflow "PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee /tmp/praktika/praktika_run.log - else - python3 -m praktika run --job '''Style Check''' --workflow "PR" --ci |& tee /tmp/praktika/praktika_run.log - fi - - fast_test: - runs-on: [builder] - needs: [config_workflow, docker_builds] - if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'RmFzdCB0ZXN0') }} - name: "Fast test" - outputs: - data: ${{ steps.run.outputs.DATA }} - steps: - - name: Checkout code - uses: actions/checkout@v4 - with: - ref: ${{ github.head_ref }} - - - name: Prepare env script - run: | - cat > /tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' - export PYTHONPATH=./ci:. - - cat > /tmp/praktika/workflow_config_pr.json << 'EOF' - ${{ needs.config_workflow.outputs.data }} - EOF - cat > /tmp/praktika/workflow_status.json << 'EOF' - ${{ toJson(needs) }} - EOF - ENV_SETUP_SCRIPT_EOF - - rm -rf /tmp/praktika/input /tmp/praktika/output /tmp/praktika - mkdir -p /tmp/praktika /tmp/praktika/input /tmp/praktika/output - - - name: Run - id: run - run: | - . /tmp/praktika_setup_env.sh - set -o pipefail - if command -v ts &> /dev/null; then - python3 -m praktika run --job '''Fast test''' --workflow "PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee /tmp/praktika/praktika_run.log - else - python3 -m praktika run --job '''Fast test''' --workflow "PR" --ci |& tee /tmp/praktika/praktika_run.log - fi - - build_amd_debug: - runs-on: [builder] - needs: [config_workflow, docker_builds] - if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'QnVpbGQgKGFtZF9kZWJ1Zyk=') }} - name: "Build (amd_debug)" - outputs: - data: ${{ steps.run.outputs.DATA }} - steps: - - name: Checkout code - uses: actions/checkout@v4 - with: - ref: ${{ github.head_ref }} - - - name: Prepare env script - run: | - cat > /tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' - export PYTHONPATH=./ci:. - - cat > /tmp/praktika/workflow_config_pr.json << 'EOF' - ${{ needs.config_workflow.outputs.data }} - EOF - cat > /tmp/praktika/workflow_status.json << 'EOF' - ${{ toJson(needs) }} - EOF - ENV_SETUP_SCRIPT_EOF - - rm -rf /tmp/praktika/input /tmp/praktika/output /tmp/praktika - mkdir -p /tmp/praktika /tmp/praktika/input /tmp/praktika/output - - - name: Run - id: run - run: | - . /tmp/praktika_setup_env.sh - set -o pipefail - if command -v ts &> /dev/null; then - python3 -m praktika run --job '''Build (amd_debug)''' --workflow "PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee /tmp/praktika/praktika_run.log - else - python3 -m praktika run --job '''Build (amd_debug)''' --workflow "PR" --ci |& tee /tmp/praktika/praktika_run.log - fi - - build_amd_release: - runs-on: [builder] - needs: [config_workflow, docker_builds] - if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'QnVpbGQgKGFtZF9yZWxlYXNlKQ==') }} - name: "Build (amd_release)" - outputs: - data: ${{ steps.run.outputs.DATA }} - steps: - - name: Checkout code - uses: actions/checkout@v4 - with: - ref: ${{ github.head_ref }} - - - name: Prepare env script - run: | - cat > /tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' - export PYTHONPATH=./ci:. - - cat > /tmp/praktika/workflow_config_pr.json << 'EOF' - ${{ needs.config_workflow.outputs.data }} - EOF - cat > /tmp/praktika/workflow_status.json << 'EOF' - ${{ toJson(needs) }} - EOF - ENV_SETUP_SCRIPT_EOF - - rm -rf /tmp/praktika/input /tmp/praktika/output /tmp/praktika - mkdir -p /tmp/praktika /tmp/praktika/input /tmp/praktika/output - - - name: Run - id: run - run: | - . /tmp/praktika_setup_env.sh - set -o pipefail - if command -v ts &> /dev/null; then - python3 -m praktika run --job '''Build (amd_release)''' --workflow "PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee /tmp/praktika/praktika_run.log - else - python3 -m praktika run --job '''Build (amd_release)''' --workflow "PR" --ci |& tee /tmp/praktika/praktika_run.log - fi - - build_arm_release: - runs-on: [builder-aarch64] - needs: [config_workflow, docker_builds] - if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'QnVpbGQgKGFybV9yZWxlYXNlKQ==') }} - name: "Build (arm_release)" - outputs: - data: ${{ steps.run.outputs.DATA }} - steps: - - name: Checkout code - uses: actions/checkout@v4 - with: - ref: ${{ github.head_ref }} - - - name: Prepare env script - run: | - cat > /tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' - export PYTHONPATH=./ci:. - - cat > /tmp/praktika/workflow_config_pr.json << 'EOF' - ${{ needs.config_workflow.outputs.data }} - EOF - cat > /tmp/praktika/workflow_status.json << 'EOF' - ${{ toJson(needs) }} - EOF - ENV_SETUP_SCRIPT_EOF - - rm -rf /tmp/praktika/input /tmp/praktika/output /tmp/praktika - mkdir -p /tmp/praktika /tmp/praktika/input /tmp/praktika/output - - - name: Run - id: run - run: | - . /tmp/praktika_setup_env.sh - set -o pipefail - if command -v ts &> /dev/null; then - python3 -m praktika run --job '''Build (arm_release)''' --workflow "PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee /tmp/praktika/praktika_run.log - else - python3 -m praktika run --job '''Build (arm_release)''' --workflow "PR" --ci |& tee /tmp/praktika/praktika_run.log - fi - - build_arm_asan: - runs-on: [builder-aarch64] - needs: [config_workflow, docker_builds] - if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'QnVpbGQgKGFybV9hc2FuKQ==') }} - name: "Build (arm_asan)" - outputs: - data: ${{ steps.run.outputs.DATA }} - steps: - - name: Checkout code - uses: actions/checkout@v4 - with: - ref: ${{ github.head_ref }} - - - name: Prepare env script - run: | - cat > /tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' - export PYTHONPATH=./ci:. - - cat > /tmp/praktika/workflow_config_pr.json << 'EOF' - ${{ needs.config_workflow.outputs.data }} - EOF - cat > /tmp/praktika/workflow_status.json << 'EOF' - ${{ toJson(needs) }} - EOF - ENV_SETUP_SCRIPT_EOF - - rm -rf /tmp/praktika/input /tmp/praktika/output /tmp/praktika - mkdir -p /tmp/praktika /tmp/praktika/input /tmp/praktika/output - - - name: Run - id: run - run: | - . /tmp/praktika_setup_env.sh - set -o pipefail - if command -v ts &> /dev/null; then - python3 -m praktika run --job '''Build (arm_asan)''' --workflow "PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee /tmp/praktika/praktika_run.log - else - python3 -m praktika run --job '''Build (arm_asan)''' --workflow "PR" --ci |& tee /tmp/praktika/praktika_run.log - fi - - stateless_tests_amd_debugparallel: - runs-on: [builder] - needs: [config_workflow, docker_builds, build_amd_debug] - if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhbWRfZGVidWcscGFyYWxsZWwp') }} - name: "Stateless tests (amd_debug,parallel)" - outputs: - data: ${{ steps.run.outputs.DATA }} - steps: - - name: Checkout code - uses: actions/checkout@v4 - with: - ref: ${{ github.head_ref }} - - - name: Prepare env script - run: | - cat > /tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' - export PYTHONPATH=./ci:. - - cat > /tmp/praktika/workflow_config_pr.json << 'EOF' - ${{ needs.config_workflow.outputs.data }} - EOF - cat > /tmp/praktika/workflow_status.json << 'EOF' - ${{ toJson(needs) }} - EOF - ENV_SETUP_SCRIPT_EOF - - rm -rf /tmp/praktika/input /tmp/praktika/output /tmp/praktika - mkdir -p /tmp/praktika /tmp/praktika/input /tmp/praktika/output - - - name: Run - id: run - run: | - . /tmp/praktika_setup_env.sh - set -o pipefail - if command -v ts &> /dev/null; then - python3 -m praktika run --job '''Stateless tests (amd_debug,parallel)''' --workflow "PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee /tmp/praktika/praktika_run.log - else - python3 -m praktika run --job '''Stateless tests (amd_debug,parallel)''' --workflow "PR" --ci |& tee /tmp/praktika/praktika_run.log - fi - - stateless_tests_amd_debugnon_parallel: - runs-on: [func-tester] - needs: [config_workflow, docker_builds, build_amd_debug] - if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhbWRfZGVidWcsbm9uLXBhcmFsbGVsKQ==') }} - name: "Stateless tests (amd_debug,non-parallel)" - outputs: - data: ${{ steps.run.outputs.DATA }} - steps: - - name: Checkout code - uses: actions/checkout@v4 - with: - ref: ${{ github.head_ref }} - - - name: Prepare env script - run: | - cat > /tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' - export PYTHONPATH=./ci:. - - cat > /tmp/praktika/workflow_config_pr.json << 'EOF' - ${{ needs.config_workflow.outputs.data }} - EOF - cat > /tmp/praktika/workflow_status.json << 'EOF' - ${{ toJson(needs) }} - EOF - ENV_SETUP_SCRIPT_EOF - - rm -rf /tmp/praktika/input /tmp/praktika/output /tmp/praktika - mkdir -p /tmp/praktika /tmp/praktika/input /tmp/praktika/output - - - name: Run - id: run - run: | - . /tmp/praktika_setup_env.sh - set -o pipefail - if command -v ts &> /dev/null; then - python3 -m praktika run --job '''Stateless tests (amd_debug,non-parallel)''' --workflow "PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee /tmp/praktika/praktika_run.log - else - python3 -m praktika run --job '''Stateless tests (amd_debug,non-parallel)''' --workflow "PR" --ci |& tee /tmp/praktika/praktika_run.log - fi - - stateless_tests_amd_releaseparallel: - runs-on: [builder] - needs: [config_workflow, docker_builds, build_amd_release] - if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhbWRfcmVsZWFzZSxwYXJhbGxlbCk=') }} - name: "Stateless tests (amd_release,parallel)" - outputs: - data: ${{ steps.run.outputs.DATA }} - steps: - - name: Checkout code - uses: actions/checkout@v4 - with: - ref: ${{ github.head_ref }} - - - name: Prepare env script - run: | - cat > /tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' - export PYTHONPATH=./ci:. - - cat > /tmp/praktika/workflow_config_pr.json << 'EOF' - ${{ needs.config_workflow.outputs.data }} - EOF - cat > /tmp/praktika/workflow_status.json << 'EOF' - ${{ toJson(needs) }} - EOF - ENV_SETUP_SCRIPT_EOF - - rm -rf /tmp/praktika/input /tmp/praktika/output /tmp/praktika - mkdir -p /tmp/praktika /tmp/praktika/input /tmp/praktika/output - - - name: Run - id: run - run: | - . /tmp/praktika_setup_env.sh - set -o pipefail - if command -v ts &> /dev/null; then - python3 -m praktika run --job '''Stateless tests (amd_release,parallel)''' --workflow "PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee /tmp/praktika/praktika_run.log - else - python3 -m praktika run --job '''Stateless tests (amd_release,parallel)''' --workflow "PR" --ci |& tee /tmp/praktika/praktika_run.log - fi - - stateless_tests_amd_releasenon_parallel: - runs-on: [func-tester] - needs: [config_workflow, docker_builds, build_amd_release] - if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhbWRfcmVsZWFzZSxub24tcGFyYWxsZWwp') }} - name: "Stateless tests (amd_release,non-parallel)" - outputs: - data: ${{ steps.run.outputs.DATA }} - steps: - - name: Checkout code - uses: actions/checkout@v4 - with: - ref: ${{ github.head_ref }} - - - name: Prepare env script - run: | - cat > /tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' - export PYTHONPATH=./ci:. - - cat > /tmp/praktika/workflow_config_pr.json << 'EOF' - ${{ needs.config_workflow.outputs.data }} - EOF - cat > /tmp/praktika/workflow_status.json << 'EOF' - ${{ toJson(needs) }} - EOF - ENV_SETUP_SCRIPT_EOF - - rm -rf /tmp/praktika/input /tmp/praktika/output /tmp/praktika - mkdir -p /tmp/praktika /tmp/praktika/input /tmp/praktika/output - - - name: Run - id: run - run: | - . /tmp/praktika_setup_env.sh - set -o pipefail - if command -v ts &> /dev/null; then - python3 -m praktika run --job '''Stateless tests (amd_release,non-parallel)''' --workflow "PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee /tmp/praktika/praktika_run.log - else - python3 -m praktika run --job '''Stateless tests (amd_release,non-parallel)''' --workflow "PR" --ci |& tee /tmp/praktika/praktika_run.log - fi - - stateless_tests_arm_asanparallel: - runs-on: [builder-aarch64] - needs: [config_workflow, docker_builds, build_arm_asan] - if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhcm1fYXNhbixwYXJhbGxlbCk=') }} - name: "Stateless tests (arm_asan,parallel)" - outputs: - data: ${{ steps.run.outputs.DATA }} - steps: - - name: Checkout code - uses: actions/checkout@v4 - with: - ref: ${{ github.head_ref }} - - - name: Prepare env script - run: | - cat > /tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' - export PYTHONPATH=./ci:. - - cat > /tmp/praktika/workflow_config_pr.json << 'EOF' - ${{ needs.config_workflow.outputs.data }} - EOF - cat > /tmp/praktika/workflow_status.json << 'EOF' - ${{ toJson(needs) }} - EOF - ENV_SETUP_SCRIPT_EOF - - rm -rf /tmp/praktika/input /tmp/praktika/output /tmp/praktika - mkdir -p /tmp/praktika /tmp/praktika/input /tmp/praktika/output - - - name: Run - id: run - run: | - . /tmp/praktika_setup_env.sh - set -o pipefail - if command -v ts &> /dev/null; then - python3 -m praktika run --job '''Stateless tests (arm_asan,parallel)''' --workflow "PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee /tmp/praktika/praktika_run.log - else - python3 -m praktika run --job '''Stateless tests (arm_asan,parallel)''' --workflow "PR" --ci |& tee /tmp/praktika/praktika_run.log - fi - - stateless_tests_arm_asannon_parallel: - runs-on: [func-tester-aarch64] - needs: [config_workflow, docker_builds, build_arm_asan] - if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhcm1fYXNhbixub24tcGFyYWxsZWwp') }} - name: "Stateless tests (arm_asan,non-parallel)" - outputs: - data: ${{ steps.run.outputs.DATA }} - steps: - - name: Checkout code - uses: actions/checkout@v4 - with: - ref: ${{ github.head_ref }} - - - name: Prepare env script - run: | - cat > /tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' - export PYTHONPATH=./ci:. - - cat > /tmp/praktika/workflow_config_pr.json << 'EOF' - ${{ needs.config_workflow.outputs.data }} - EOF - cat > /tmp/praktika/workflow_status.json << 'EOF' - ${{ toJson(needs) }} - EOF - ENV_SETUP_SCRIPT_EOF - - rm -rf /tmp/praktika/input /tmp/praktika/output /tmp/praktika - mkdir -p /tmp/praktika /tmp/praktika/input /tmp/praktika/output - - - name: Run - id: run - run: | - . /tmp/praktika_setup_env.sh - set -o pipefail - if command -v ts &> /dev/null; then - python3 -m praktika run --job '''Stateless tests (arm_asan,non-parallel)''' --workflow "PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee /tmp/praktika/praktika_run.log - else - python3 -m praktika run --job '''Stateless tests (arm_asan,non-parallel)''' --workflow "PR" --ci |& tee /tmp/praktika/praktika_run.log - fi - - stateful_tests_amd_releaseparallel: - runs-on: [builder] - needs: [config_workflow, docker_builds, build_amd_debug] - if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'U3RhdGVmdWwgdGVzdHMgKGFtZF9yZWxlYXNlLHBhcmFsbGVsKQ==') }} - name: "Stateful tests (amd_release,parallel)" - outputs: - data: ${{ steps.run.outputs.DATA }} - steps: - - name: Checkout code - uses: actions/checkout@v4 - with: - ref: ${{ github.head_ref }} - - - name: Prepare env script - run: | - cat > /tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' - export PYTHONPATH=./ci:. - - cat > /tmp/praktika/workflow_config_pr.json << 'EOF' - ${{ needs.config_workflow.outputs.data }} - EOF - cat > /tmp/praktika/workflow_status.json << 'EOF' - ${{ toJson(needs) }} - EOF - ENV_SETUP_SCRIPT_EOF - - rm -rf /tmp/praktika/input /tmp/praktika/output /tmp/praktika - mkdir -p /tmp/praktika /tmp/praktika/input /tmp/praktika/output - - - name: Run - id: run - run: | - . /tmp/praktika_setup_env.sh - set -o pipefail - if command -v ts &> /dev/null; then - python3 -m praktika run --job '''Stateful tests (amd_release,parallel)''' --workflow "PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee /tmp/praktika/praktika_run.log - else - python3 -m praktika run --job '''Stateful tests (amd_release,parallel)''' --workflow "PR" --ci |& tee /tmp/praktika/praktika_run.log - fi - - stress_tests_arm_release: - runs-on: [func-tester-aarch64] - needs: [config_workflow, docker_builds, build_arm_release] - if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'U3RyZXNzIHRlc3RzIChhcm1fcmVsZWFzZSk=') }} - name: "Stress tests (arm_release)" - outputs: - data: ${{ steps.run.outputs.DATA }} - steps: - - name: Checkout code - uses: actions/checkout@v4 - with: - ref: ${{ github.head_ref }} - - - name: Prepare env script - run: | - cat > /tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' - export PYTHONPATH=./ci:. - - cat > /tmp/praktika/workflow_config_pr.json << 'EOF' - ${{ needs.config_workflow.outputs.data }} - EOF - cat > /tmp/praktika/workflow_status.json << 'EOF' - ${{ toJson(needs) }} - EOF - ENV_SETUP_SCRIPT_EOF - - rm -rf /tmp/praktika/input /tmp/praktika/output /tmp/praktika - mkdir -p /tmp/praktika /tmp/praktika/input /tmp/praktika/output - - - name: Run - id: run - run: | - . /tmp/praktika_setup_env.sh - set -o pipefail - if command -v ts &> /dev/null; then - python3 -m praktika run --job '''Stress tests (arm_release)''' --workflow "PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee /tmp/praktika/praktika_run.log - else - python3 -m praktika run --job '''Stress tests (arm_release)''' --workflow "PR" --ci |& tee /tmp/praktika/praktika_run.log - fi - - finish_workflow: - runs-on: [ci_services] - needs: [config_workflow, docker_builds, style_check, fast_test, build_amd_debug, build_amd_release, build_arm_release, build_arm_asan, stateless_tests_amd_debugparallel, stateless_tests_amd_debugnon_parallel, stateless_tests_amd_releaseparallel, stateless_tests_amd_releasenon_parallel, stateless_tests_arm_asanparallel, stateless_tests_arm_asannon_parallel, stateful_tests_amd_releaseparallel, stress_tests_arm_release] - if: ${{ !cancelled() }} - name: "Finish Workflow" - outputs: - data: ${{ steps.run.outputs.DATA }} - steps: - - name: Checkout code - uses: actions/checkout@v4 - with: - ref: ${{ github.head_ref }} - - - name: Prepare env script - run: | - cat > /tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' - export PYTHONPATH=./ci:. - - cat > /tmp/praktika/workflow_config_pr.json << 'EOF' - ${{ needs.config_workflow.outputs.data }} - EOF - cat > /tmp/praktika/workflow_status.json << 'EOF' - ${{ toJson(needs) }} - EOF - ENV_SETUP_SCRIPT_EOF - - rm -rf /tmp/praktika/input /tmp/praktika/output /tmp/praktika - mkdir -p /tmp/praktika /tmp/praktika/input /tmp/praktika/output - - - name: Run - id: run - run: | - . /tmp/praktika_setup_env.sh - set -o pipefail - if command -v ts &> /dev/null; then - python3 -m praktika run --job '''Finish Workflow''' --workflow "PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee /tmp/praktika/praktika_run.log - else - python3 -m praktika run --job '''Finish Workflow''' --workflow "PR" --ci |& tee /tmp/praktika/praktika_run.log - fi diff --git a/ci/jobs/functional_stateful_tests.py b/ci/jobs/functional_stateful_tests.py index f78e158037f..4af1ab902ae 100644 --- a/ci/jobs/functional_stateful_tests.py +++ b/ci/jobs/functional_stateful_tests.py @@ -131,7 +131,9 @@ def main(): res = res and CH.start() res = res and CH.wait_ready() # TODO: Use --database-replicated optionally - res = res and Shell.check(f"./ci/jobs/scripts/functional_tests/setup_ch_cluster.sh") + res = res and Shell.check( + f"./ci/jobs/scripts/functional_tests/setup_ch_cluster.sh" + ) if res: print("ch started") logs_to_attach += [ diff --git a/ci/praktika/json.html b/ci/praktika/json.html index b11106719cd..717e6d39ac3 100644 --- a/ci/praktika/json.html +++ b/ci/praktika/json.html @@ -601,7 +601,7 @@ td.classList.add('time-column'); td.textContent = value ? formatDuration(value) : ''; } else if (column === 'info') { - td.textContent = value.includes('\n') ? '↵' : (value || ''); + td.textContent = value && value.includes('\n') ? '↵' : (value || ''); td.classList.add('info-column'); } diff --git a/ci/praktika/native_jobs.py b/ci/praktika/native_jobs.py index ce42b6a33fc..58cd440283a 100644 --- a/ci/praktika/native_jobs.py +++ b/ci/praktika/native_jobs.py @@ -310,7 +310,7 @@ def _finish_workflow(workflow, job_name): print(env.get_needs_statuses()) print("Check Workflow results") - _ResultS3.copy_result_from_s3( + version = _ResultS3.copy_result_from_s3_with_version( Result.file_name_static(workflow.name), ) workflow_result = Result.from_fs(workflow.name) @@ -358,9 +358,7 @@ def _finish_workflow(workflow, job_name): env.add_info(ResultInfo.GH_STATUS_ERROR) if update_final_report: - _ResultS3.copy_result_to_s3( - workflow_result, - ) + _ResultS3.copy_result_to_s3_with_version(workflow_result, version + 1) Result.from_fs(job_name).set_status(Result.Status.SUCCESS) diff --git a/ci/workflows/pull_request.py b/ci/workflows/pull_request.py index 541d530b6c5..1d1bcca9fa7 100644 --- a/ci/workflows/pull_request.py +++ b/ci/workflows/pull_request.py @@ -73,10 +73,26 @@ build_jobs = Job.Config( ).parametrize( parameter=["amd_debug", "amd_release", "arm_release", "arm_asan"], provides=[ - [ArtifactNames.CH_AMD_DEBUG, ArtifactNames.DEB_AMD_DEBUG, ArtifactNames.CH_ODBC_B_AMD_DEBUG], - [ArtifactNames.CH_AMD_RELEASE, ArtifactNames.DEB_AMD_RELEASE, ArtifactNames.CH_ODBC_B_AMD_RELEASE], - [ArtifactNames.CH_ARM_RELEASE, ArtifactNames.DEB_ARM_RELEASE, ArtifactNames.CH_ODBC_B_ARM_RELEASE], - [ArtifactNames.CH_ARM_ASAN, ArtifactNames.DEB_ARM_ASAN, ArtifactNames.CH_ODBC_B_ARM_ASAN], + [ + ArtifactNames.CH_AMD_DEBUG, + ArtifactNames.DEB_AMD_DEBUG, + ArtifactNames.CH_ODBC_B_AMD_DEBUG, + ], + [ + ArtifactNames.CH_AMD_RELEASE, + ArtifactNames.DEB_AMD_RELEASE, + ArtifactNames.CH_ODBC_B_AMD_RELEASE, + ], + [ + ArtifactNames.CH_ARM_RELEASE, + ArtifactNames.DEB_ARM_RELEASE, + ArtifactNames.CH_ODBC_B_ARM_RELEASE, + ], + [ + ArtifactNames.CH_ARM_ASAN, + ArtifactNames.DEB_ARM_ASAN, + ArtifactNames.CH_ODBC_B_ARM_ASAN, + ], ], runs_on=[ [RunnerLabels.BUILDER_AMD], @@ -188,24 +204,26 @@ workflow = Workflow.Config( name="...", type=Artifact.Type.S3, path=f"{Settings.TEMP_DIR}/build/programs/clickhouse", - ).parametrize(names=[ - ArtifactNames.CH_AMD_DEBUG, - ArtifactNames.CH_AMD_RELEASE, - ArtifactNames.CH_ARM_RELEASE, - ArtifactNames.CH_ARM_ASAN, - ]), - + ).parametrize( + names=[ + ArtifactNames.CH_AMD_DEBUG, + ArtifactNames.CH_AMD_RELEASE, + ArtifactNames.CH_ARM_RELEASE, + ArtifactNames.CH_ARM_ASAN, + ] + ), *Artifact.Config( name="...", type=Artifact.Type.S3, path=f"{Settings.TEMP_DIR}/build/programs/clickhouse-odbc-bridge", - ).parametrize(names=[ - ArtifactNames.CH_ODBC_B_AMD_DEBUG, - ArtifactNames.CH_ODBC_B_AMD_RELEASE, - ArtifactNames.CH_ODBC_B_ARM_RELEASE, - ArtifactNames.CH_ODBC_B_ARM_ASAN, - ]), - + ).parametrize( + names=[ + ArtifactNames.CH_ODBC_B_AMD_DEBUG, + ArtifactNames.CH_ODBC_B_AMD_RELEASE, + ArtifactNames.CH_ODBC_B_ARM_RELEASE, + ArtifactNames.CH_ODBC_B_ARM_ASAN, + ] + ), Artifact.Config( name=ArtifactNames.DEB_AMD_DEBUG, type=Artifact.Type.S3, diff --git a/packages/build b/packages/build index 17ea979622c..3c46c10948c 100755 --- a/packages/build +++ b/packages/build @@ -134,7 +134,7 @@ for config in clickhouse*.yaml; do fi if [ -n "$MAKE_DEB" ] || [ -n "$MAKE_TGZ" ]; then echo "Building deb package for $config" - nfpm package --target "$OUTPUT_DIR" --config "$config" --packager deb + PKG_PATH=$(nfpm package --target "$OUTPUT_DIR" --config "$config" --packager deb | tee /dev/stderr | grep "created package:" | sed 's/.*created package: //') fi if [ -n "$MAKE_APK" ]; then diff --git a/tests/ci/report.py b/tests/ci/report.py index 3e4f2ff2522..5f9184cb87a 100644 --- a/tests/ci/report.py +++ b/tests/ci/report.py @@ -422,7 +422,9 @@ class JobReport: current_dir = os.path.dirname(os.path.abspath(__file__)) if current_dir in sys.path: sys.path.remove(current_dir) - from praktika.result import Result + from praktika.result import ( # pylint: disable=import-error,import-outside-toplevel + Result, + ) if self.start_time: dt = datetime.datetime.strptime(self.start_time, "%Y-%m-%d %H:%M:%S") @@ -448,7 +450,9 @@ class JobReport: start_time=timestamp, duration=self.duration, results=sub_results, - files=[f for f in self.additional_files], + files=( + [str(f) for f in self.additional_files] if self.additional_files else [] + ), ) @staticmethod diff --git a/tests/ci/stress_check.py b/tests/ci/stress_check.py index c1d887ee536..b9b4b6886fb 100644 --- a/tests/ci/stress_check.py +++ b/tests/ci/stress_check.py @@ -9,13 +9,14 @@ import sys from pathlib import Path from typing import List, Tuple +from praktika.utils import Shell # pylint: disable=import-error + from build_download_helper import download_all_deb_packages from clickhouse_helper import CiLogsCredentials from docker_images_helper import DockerImage, get_docker_image, pull_image from env_helper import REPO_COPY, REPORT_PATH, TEMP_PATH from get_robot_token import get_parameter_from_ssm from pr_info import PRInfo -from praktika.utils import Shell from report import ERROR, JobReport, TestResults, read_test_results from stopwatch import Stopwatch from tee_popen import TeePopen From 1cc687f3b1de197c322964d7a33036fe792a3ddb Mon Sep 17 00:00:00 2001 From: Denny Crane Date: Thu, 21 Nov 2024 09:54:57 -0400 Subject: [PATCH 29/31] Update json-functions.md --- docs/en/sql-reference/functions/json-functions.md | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/docs/en/sql-reference/functions/json-functions.md b/docs/en/sql-reference/functions/json-functions.md index 26fe888ab49..b554c707561 100644 --- a/docs/en/sql-reference/functions/json-functions.md +++ b/docs/en/sql-reference/functions/json-functions.md @@ -488,7 +488,7 @@ JSONType(json [, indices_or_keys]...) **Returned value** -- Returns the type of a JSON value as a string, otherwise if the value doesn't exists it returns `Null`. [String](../data-types/string.md). +- Returns the type of a JSON value as a string, otherwise if the value doesn't exists it returns `Null`. [Enum](../data-types/enum.md). **Examples** @@ -520,7 +520,7 @@ JSONExtractUInt(json [, indices_or_keys]...) **Returned value** -- Returns a UInt value if it exists, otherwise it returns `Null`. [UInt64](../data-types/string.md). +- Returns a UInt value if it exists, otherwise it returns `0`. [UInt64](../data-types/int-uint.md). **Examples** @@ -560,7 +560,7 @@ JSONExtractInt(json [, indices_or_keys]...) **Returned value** -- Returns an Int value if it exists, otherwise it returns `Null`. [Int64](../data-types/int-uint.md). +- Returns an Int value if it exists, otherwise it returns `0`. [Int64](../data-types/int-uint.md). **Examples** @@ -600,7 +600,7 @@ JSONExtractFloat(json [, indices_or_keys]...) **Returned value** -- Returns an Float value if it exists, otherwise it returns `Null`. [Float64](../data-types/float.md). +- Returns an Float value if it exists, otherwise it returns `0`. [Float64](../data-types/float.md). **Examples** From c8c443f127655eaf93e1866087975b7018ef6f81 Mon Sep 17 00:00:00 2001 From: Denny Crane Date: Thu, 21 Nov 2024 10:03:38 -0400 Subject: [PATCH 30/31] Update json-functions.md --- docs/en/sql-reference/functions/json-functions.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/en/sql-reference/functions/json-functions.md b/docs/en/sql-reference/functions/json-functions.md index b554c707561..a92c26461e9 100644 --- a/docs/en/sql-reference/functions/json-functions.md +++ b/docs/en/sql-reference/functions/json-functions.md @@ -468,7 +468,7 @@ SELECT JSONLength('{"a": "hello", "b": [-100, 200.0, 300]}') = 2 ### JSONType -Return the type of a JSON value. If the value does not exist, `Null` will be returned. +Return the type of a JSON value. If the value does not exist, `Null` will be returned (not usual [Null](../data-types/nullable.md), but a special Null=0 of `Enum8('Null' = 0, 'String' = 34,...`). . **Syntax** @@ -488,7 +488,7 @@ JSONType(json [, indices_or_keys]...) **Returned value** -- Returns the type of a JSON value as a string, otherwise if the value doesn't exists it returns `Null`. [Enum](../data-types/enum.md). +- Returns the type of a JSON value as a string, otherwise if the value doesn't exists it returns `Null=0`. [Enum](../data-types/enum.md). **Examples** From 7b34b0eec5572abbc7e29dfbaacd1ac9b51f1f52 Mon Sep 17 00:00:00 2001 From: Denny Crane Date: Thu, 21 Nov 2024 10:10:44 -0400 Subject: [PATCH 31/31] Update json-functions.md --- docs/en/sql-reference/functions/json-functions.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/en/sql-reference/functions/json-functions.md b/docs/en/sql-reference/functions/json-functions.md index a92c26461e9..286dde68f45 100644 --- a/docs/en/sql-reference/functions/json-functions.md +++ b/docs/en/sql-reference/functions/json-functions.md @@ -468,7 +468,7 @@ SELECT JSONLength('{"a": "hello", "b": [-100, 200.0, 300]}') = 2 ### JSONType -Return the type of a JSON value. If the value does not exist, `Null` will be returned (not usual [Null](../data-types/nullable.md), but a special Null=0 of `Enum8('Null' = 0, 'String' = 34,...`). . +Return the type of a JSON value. If the value does not exist, `Null=0` will be returned (not usual [Null](../data-types/nullable.md), but `Null=0` of `Enum8('Null' = 0, 'String' = 34,...`). . **Syntax**