CI: Unit tests with praktika

This commit is contained in:
Max Kainov 2024-11-21 22:10:27 +01:00
parent 26d2ac7631
commit 70c983835d
25 changed files with 1975 additions and 645 deletions

901
.github/workflows/pr.yaml vendored Normal file
View File

@ -0,0 +1,901 @@
# generated by praktika
name: PR
on:
pull_request:
branches: ['master']
# Cancel the previous wf run in PRs.
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
env:
# Force the stdout and stderr streams to be unbuffered
PYTHONUNBUFFERED: 1
GH_TOKEN: ${{ github.token }}
# Allow updating GH commit statuses and PR comments to post an actual job reports link
permissions: write-all
jobs:
config_workflow:
runs-on: [ci_services]
needs: []
name: "Config Workflow"
outputs:
data: ${{ steps.run.outputs.DATA }}
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
ref: ${{ github.head_ref }}
- name: Prepare env script
run: |
cat > /tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF'
export PYTHONPATH=./ci:.
cat > /tmp/praktika/workflow_config_pr.json << 'EOF'
${{ needs.config_workflow.outputs.data }}
EOF
cat > /tmp/praktika/workflow_status.json << 'EOF'
${{ toJson(needs) }}
EOF
ENV_SETUP_SCRIPT_EOF
rm -rf /tmp/praktika/input /tmp/praktika/output /tmp/praktika
mkdir -p /tmp/praktika /tmp/praktika/input /tmp/praktika/output
- name: Run
id: run
run: |
. /tmp/praktika_setup_env.sh
set -o pipefail
if command -v ts &> /dev/null; then
python3 -m praktika run '''Config Workflow''' --workflow "PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee /tmp/praktika/praktika_run.log
else
python3 -m praktika run '''Config Workflow''' --workflow "PR" --ci |& tee /tmp/praktika/praktika_run.log
fi
docker_builds:
runs-on: [ci_services_ebs]
needs: [config_workflow]
if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'RG9ja2VyIEJ1aWxkcw==') }}
name: "Docker Builds"
outputs:
data: ${{ steps.run.outputs.DATA }}
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
ref: ${{ github.head_ref }}
- name: Prepare env script
run: |
cat > /tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF'
export PYTHONPATH=./ci:.
cat > /tmp/praktika/workflow_config_pr.json << 'EOF'
${{ needs.config_workflow.outputs.data }}
EOF
cat > /tmp/praktika/workflow_status.json << 'EOF'
${{ toJson(needs) }}
EOF
ENV_SETUP_SCRIPT_EOF
rm -rf /tmp/praktika/input /tmp/praktika/output /tmp/praktika
mkdir -p /tmp/praktika /tmp/praktika/input /tmp/praktika/output
- name: Run
id: run
run: |
. /tmp/praktika_setup_env.sh
set -o pipefail
if command -v ts &> /dev/null; then
python3 -m praktika run '''Docker Builds''' --workflow "PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee /tmp/praktika/praktika_run.log
else
python3 -m praktika run '''Docker Builds''' --workflow "PR" --ci |& tee /tmp/praktika/praktika_run.log
fi
style_check:
runs-on: [ci_services]
needs: [config_workflow, docker_builds]
if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'U3R5bGUgQ2hlY2s=') }}
name: "Style Check"
outputs:
data: ${{ steps.run.outputs.DATA }}
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
ref: ${{ github.head_ref }}
- name: Prepare env script
run: |
cat > /tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF'
export PYTHONPATH=./ci:.
cat > /tmp/praktika/workflow_config_pr.json << 'EOF'
${{ needs.config_workflow.outputs.data }}
EOF
cat > /tmp/praktika/workflow_status.json << 'EOF'
${{ toJson(needs) }}
EOF
ENV_SETUP_SCRIPT_EOF
rm -rf /tmp/praktika/input /tmp/praktika/output /tmp/praktika
mkdir -p /tmp/praktika /tmp/praktika/input /tmp/praktika/output
- name: Run
id: run
run: |
. /tmp/praktika_setup_env.sh
set -o pipefail
if command -v ts &> /dev/null; then
python3 -m praktika run '''Style Check''' --workflow "PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee /tmp/praktika/praktika_run.log
else
python3 -m praktika run '''Style Check''' --workflow "PR" --ci |& tee /tmp/praktika/praktika_run.log
fi
fast_test:
runs-on: [builder]
needs: [config_workflow, docker_builds]
if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'RmFzdCB0ZXN0') }}
name: "Fast test"
outputs:
data: ${{ steps.run.outputs.DATA }}
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
ref: ${{ github.head_ref }}
- name: Prepare env script
run: |
cat > /tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF'
export PYTHONPATH=./ci:.
cat > /tmp/praktika/workflow_config_pr.json << 'EOF'
${{ needs.config_workflow.outputs.data }}
EOF
cat > /tmp/praktika/workflow_status.json << 'EOF'
${{ toJson(needs) }}
EOF
ENV_SETUP_SCRIPT_EOF
rm -rf /tmp/praktika/input /tmp/praktika/output /tmp/praktika
mkdir -p /tmp/praktika /tmp/praktika/input /tmp/praktika/output
- name: Run
id: run
run: |
. /tmp/praktika_setup_env.sh
set -o pipefail
if command -v ts &> /dev/null; then
python3 -m praktika run '''Fast test''' --workflow "PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee /tmp/praktika/praktika_run.log
else
python3 -m praktika run '''Fast test''' --workflow "PR" --ci |& tee /tmp/praktika/praktika_run.log
fi
build_amd_debug:
runs-on: [builder]
needs: [config_workflow, docker_builds]
if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'QnVpbGQgKGFtZF9kZWJ1Zyk=') }}
name: "Build (amd_debug)"
outputs:
data: ${{ steps.run.outputs.DATA }}
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
ref: ${{ github.head_ref }}
- name: Prepare env script
run: |
cat > /tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF'
export PYTHONPATH=./ci:.
cat > /tmp/praktika/workflow_config_pr.json << 'EOF'
${{ needs.config_workflow.outputs.data }}
EOF
cat > /tmp/praktika/workflow_status.json << 'EOF'
${{ toJson(needs) }}
EOF
ENV_SETUP_SCRIPT_EOF
rm -rf /tmp/praktika/input /tmp/praktika/output /tmp/praktika
mkdir -p /tmp/praktika /tmp/praktika/input /tmp/praktika/output
- name: Run
id: run
run: |
. /tmp/praktika_setup_env.sh
set -o pipefail
if command -v ts &> /dev/null; then
python3 -m praktika run '''Build (amd_debug)''' --workflow "PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee /tmp/praktika/praktika_run.log
else
python3 -m praktika run '''Build (amd_debug)''' --workflow "PR" --ci |& tee /tmp/praktika/praktika_run.log
fi
build_amd_release:
runs-on: [builder]
needs: [config_workflow, docker_builds]
if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'QnVpbGQgKGFtZF9yZWxlYXNlKQ==') }}
name: "Build (amd_release)"
outputs:
data: ${{ steps.run.outputs.DATA }}
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
ref: ${{ github.head_ref }}
- name: Prepare env script
run: |
cat > /tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF'
export PYTHONPATH=./ci:.
cat > /tmp/praktika/workflow_config_pr.json << 'EOF'
${{ needs.config_workflow.outputs.data }}
EOF
cat > /tmp/praktika/workflow_status.json << 'EOF'
${{ toJson(needs) }}
EOF
ENV_SETUP_SCRIPT_EOF
rm -rf /tmp/praktika/input /tmp/praktika/output /tmp/praktika
mkdir -p /tmp/praktika /tmp/praktika/input /tmp/praktika/output
- name: Run
id: run
run: |
. /tmp/praktika_setup_env.sh
set -o pipefail
if command -v ts &> /dev/null; then
python3 -m praktika run '''Build (amd_release)''' --workflow "PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee /tmp/praktika/praktika_run.log
else
python3 -m praktika run '''Build (amd_release)''' --workflow "PR" --ci |& tee /tmp/praktika/praktika_run.log
fi
build_amd_asan:
runs-on: [builder]
needs: [config_workflow, docker_builds]
if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'QnVpbGQgKGFtZF9hc2FuKQ==') }}
name: "Build (amd_asan)"
outputs:
data: ${{ steps.run.outputs.DATA }}
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
ref: ${{ github.head_ref }}
- name: Prepare env script
run: |
cat > /tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF'
export PYTHONPATH=./ci:.
cat > /tmp/praktika/workflow_config_pr.json << 'EOF'
${{ needs.config_workflow.outputs.data }}
EOF
cat > /tmp/praktika/workflow_status.json << 'EOF'
${{ toJson(needs) }}
EOF
ENV_SETUP_SCRIPT_EOF
rm -rf /tmp/praktika/input /tmp/praktika/output /tmp/praktika
mkdir -p /tmp/praktika /tmp/praktika/input /tmp/praktika/output
- name: Run
id: run
run: |
. /tmp/praktika_setup_env.sh
set -o pipefail
if command -v ts &> /dev/null; then
python3 -m praktika run '''Build (amd_asan)''' --workflow "PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee /tmp/praktika/praktika_run.log
else
python3 -m praktika run '''Build (amd_asan)''' --workflow "PR" --ci |& tee /tmp/praktika/praktika_run.log
fi
build_amd_tsan:
runs-on: [builder]
needs: [config_workflow, docker_builds]
if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'QnVpbGQgKGFtZF90c2FuKQ==') }}
name: "Build (amd_tsan)"
outputs:
data: ${{ steps.run.outputs.DATA }}
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
ref: ${{ github.head_ref }}
- name: Prepare env script
run: |
cat > /tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF'
export PYTHONPATH=./ci:.
cat > /tmp/praktika/workflow_config_pr.json << 'EOF'
${{ needs.config_workflow.outputs.data }}
EOF
cat > /tmp/praktika/workflow_status.json << 'EOF'
${{ toJson(needs) }}
EOF
ENV_SETUP_SCRIPT_EOF
rm -rf /tmp/praktika/input /tmp/praktika/output /tmp/praktika
mkdir -p /tmp/praktika /tmp/praktika/input /tmp/praktika/output
- name: Run
id: run
run: |
. /tmp/praktika_setup_env.sh
set -o pipefail
if command -v ts &> /dev/null; then
python3 -m praktika run '''Build (amd_tsan)''' --workflow "PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee /tmp/praktika/praktika_run.log
else
python3 -m praktika run '''Build (amd_tsan)''' --workflow "PR" --ci |& tee /tmp/praktika/praktika_run.log
fi
build_amd_msan:
runs-on: [builder]
needs: [config_workflow, docker_builds]
if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'QnVpbGQgKGFtZF9tc2FuKQ==') }}
name: "Build (amd_msan)"
outputs:
data: ${{ steps.run.outputs.DATA }}
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
ref: ${{ github.head_ref }}
- name: Prepare env script
run: |
cat > /tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF'
export PYTHONPATH=./ci:.
cat > /tmp/praktika/workflow_config_pr.json << 'EOF'
${{ needs.config_workflow.outputs.data }}
EOF
cat > /tmp/praktika/workflow_status.json << 'EOF'
${{ toJson(needs) }}
EOF
ENV_SETUP_SCRIPT_EOF
rm -rf /tmp/praktika/input /tmp/praktika/output /tmp/praktika
mkdir -p /tmp/praktika /tmp/praktika/input /tmp/praktika/output
- name: Run
id: run
run: |
. /tmp/praktika_setup_env.sh
set -o pipefail
if command -v ts &> /dev/null; then
python3 -m praktika run '''Build (amd_msan)''' --workflow "PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee /tmp/praktika/praktika_run.log
else
python3 -m praktika run '''Build (amd_msan)''' --workflow "PR" --ci |& tee /tmp/praktika/praktika_run.log
fi
build_amd_ubsan:
runs-on: [builder]
needs: [config_workflow, docker_builds]
if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'QnVpbGQgKGFtZF91YnNhbik=') }}
name: "Build (amd_ubsan)"
outputs:
data: ${{ steps.run.outputs.DATA }}
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
ref: ${{ github.head_ref }}
- name: Prepare env script
run: |
cat > /tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF'
export PYTHONPATH=./ci:.
cat > /tmp/praktika/workflow_config_pr.json << 'EOF'
${{ needs.config_workflow.outputs.data }}
EOF
cat > /tmp/praktika/workflow_status.json << 'EOF'
${{ toJson(needs) }}
EOF
ENV_SETUP_SCRIPT_EOF
rm -rf /tmp/praktika/input /tmp/praktika/output /tmp/praktika
mkdir -p /tmp/praktika /tmp/praktika/input /tmp/praktika/output
- name: Run
id: run
run: |
. /tmp/praktika_setup_env.sh
set -o pipefail
if command -v ts &> /dev/null; then
python3 -m praktika run '''Build (amd_ubsan)''' --workflow "PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee /tmp/praktika/praktika_run.log
else
python3 -m praktika run '''Build (amd_ubsan)''' --workflow "PR" --ci |& tee /tmp/praktika/praktika_run.log
fi
build_amd_binary:
runs-on: [builder]
needs: [config_workflow, docker_builds]
if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'QnVpbGQgKGFtZF9iaW5hcnkp') }}
name: "Build (amd_binary)"
outputs:
data: ${{ steps.run.outputs.DATA }}
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
ref: ${{ github.head_ref }}
- name: Prepare env script
run: |
cat > /tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF'
export PYTHONPATH=./ci:.
cat > /tmp/praktika/workflow_config_pr.json << 'EOF'
${{ needs.config_workflow.outputs.data }}
EOF
cat > /tmp/praktika/workflow_status.json << 'EOF'
${{ toJson(needs) }}
EOF
ENV_SETUP_SCRIPT_EOF
rm -rf /tmp/praktika/input /tmp/praktika/output /tmp/praktika
mkdir -p /tmp/praktika /tmp/praktika/input /tmp/praktika/output
- name: Run
id: run
run: |
. /tmp/praktika_setup_env.sh
set -o pipefail
if command -v ts &> /dev/null; then
python3 -m praktika run '''Build (amd_binary)''' --workflow "PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee /tmp/praktika/praktika_run.log
else
python3 -m praktika run '''Build (amd_binary)''' --workflow "PR" --ci |& tee /tmp/praktika/praktika_run.log
fi
build_arm_release:
runs-on: [builder-aarch64]
needs: [config_workflow, docker_builds]
if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'QnVpbGQgKGFybV9yZWxlYXNlKQ==') }}
name: "Build (arm_release)"
outputs:
data: ${{ steps.run.outputs.DATA }}
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
ref: ${{ github.head_ref }}
- name: Prepare env script
run: |
cat > /tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF'
export PYTHONPATH=./ci:.
cat > /tmp/praktika/workflow_config_pr.json << 'EOF'
${{ needs.config_workflow.outputs.data }}
EOF
cat > /tmp/praktika/workflow_status.json << 'EOF'
${{ toJson(needs) }}
EOF
ENV_SETUP_SCRIPT_EOF
rm -rf /tmp/praktika/input /tmp/praktika/output /tmp/praktika
mkdir -p /tmp/praktika /tmp/praktika/input /tmp/praktika/output
- name: Run
id: run
run: |
. /tmp/praktika_setup_env.sh
set -o pipefail
if command -v ts &> /dev/null; then
python3 -m praktika run '''Build (arm_release)''' --workflow "PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee /tmp/praktika/praktika_run.log
else
python3 -m praktika run '''Build (arm_release)''' --workflow "PR" --ci |& tee /tmp/praktika/praktika_run.log
fi
build_arm_asan:
runs-on: [builder-aarch64]
needs: [config_workflow, docker_builds]
if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'QnVpbGQgKGFybV9hc2FuKQ==') }}
name: "Build (arm_asan)"
outputs:
data: ${{ steps.run.outputs.DATA }}
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
ref: ${{ github.head_ref }}
- name: Prepare env script
run: |
cat > /tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF'
export PYTHONPATH=./ci:.
cat > /tmp/praktika/workflow_config_pr.json << 'EOF'
${{ needs.config_workflow.outputs.data }}
EOF
cat > /tmp/praktika/workflow_status.json << 'EOF'
${{ toJson(needs) }}
EOF
ENV_SETUP_SCRIPT_EOF
rm -rf /tmp/praktika/input /tmp/praktika/output /tmp/praktika
mkdir -p /tmp/praktika /tmp/praktika/input /tmp/praktika/output
- name: Run
id: run
run: |
. /tmp/praktika_setup_env.sh
set -o pipefail
if command -v ts &> /dev/null; then
python3 -m praktika run '''Build (arm_asan)''' --workflow "PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee /tmp/praktika/praktika_run.log
else
python3 -m praktika run '''Build (arm_asan)''' --workflow "PR" --ci |& tee /tmp/praktika/praktika_run.log
fi
stateless_tests_amd_debugparallel:
runs-on: [builder]
needs: [config_workflow, docker_builds, build_amd_debug]
if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhbWRfZGVidWcscGFyYWxsZWwp') }}
name: "Stateless tests (amd_debug,parallel)"
outputs:
data: ${{ steps.run.outputs.DATA }}
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
ref: ${{ github.head_ref }}
- name: Prepare env script
run: |
cat > /tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF'
export PYTHONPATH=./ci:.
cat > /tmp/praktika/workflow_config_pr.json << 'EOF'
${{ needs.config_workflow.outputs.data }}
EOF
cat > /tmp/praktika/workflow_status.json << 'EOF'
${{ toJson(needs) }}
EOF
ENV_SETUP_SCRIPT_EOF
rm -rf /tmp/praktika/input /tmp/praktika/output /tmp/praktika
mkdir -p /tmp/praktika /tmp/praktika/input /tmp/praktika/output
- name: Run
id: run
run: |
. /tmp/praktika_setup_env.sh
set -o pipefail
if command -v ts &> /dev/null; then
python3 -m praktika run '''Stateless tests (amd_debug,parallel)''' --workflow "PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee /tmp/praktika/praktika_run.log
else
python3 -m praktika run '''Stateless tests (amd_debug,parallel)''' --workflow "PR" --ci |& tee /tmp/praktika/praktika_run.log
fi
stateless_tests_amd_debugnon_parallel:
runs-on: [func-tester]
needs: [config_workflow, docker_builds, build_amd_debug]
if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhbWRfZGVidWcsbm9uLXBhcmFsbGVsKQ==') }}
name: "Stateless tests (amd_debug,non-parallel)"
outputs:
data: ${{ steps.run.outputs.DATA }}
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
ref: ${{ github.head_ref }}
- name: Prepare env script
run: |
cat > /tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF'
export PYTHONPATH=./ci:.
cat > /tmp/praktika/workflow_config_pr.json << 'EOF'
${{ needs.config_workflow.outputs.data }}
EOF
cat > /tmp/praktika/workflow_status.json << 'EOF'
${{ toJson(needs) }}
EOF
ENV_SETUP_SCRIPT_EOF
rm -rf /tmp/praktika/input /tmp/praktika/output /tmp/praktika
mkdir -p /tmp/praktika /tmp/praktika/input /tmp/praktika/output
- name: Run
id: run
run: |
. /tmp/praktika_setup_env.sh
set -o pipefail
if command -v ts &> /dev/null; then
python3 -m praktika run '''Stateless tests (amd_debug,non-parallel)''' --workflow "PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee /tmp/praktika/praktika_run.log
else
python3 -m praktika run '''Stateless tests (amd_debug,non-parallel)''' --workflow "PR" --ci |& tee /tmp/praktika/praktika_run.log
fi
stateless_tests_amd_releaseparallel:
runs-on: [builder]
needs: [config_workflow, docker_builds, build_amd_release]
if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhbWRfcmVsZWFzZSxwYXJhbGxlbCk=') }}
name: "Stateless tests (amd_release,parallel)"
outputs:
data: ${{ steps.run.outputs.DATA }}
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
ref: ${{ github.head_ref }}
- name: Prepare env script
run: |
cat > /tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF'
export PYTHONPATH=./ci:.
cat > /tmp/praktika/workflow_config_pr.json << 'EOF'
${{ needs.config_workflow.outputs.data }}
EOF
cat > /tmp/praktika/workflow_status.json << 'EOF'
${{ toJson(needs) }}
EOF
ENV_SETUP_SCRIPT_EOF
rm -rf /tmp/praktika/input /tmp/praktika/output /tmp/praktika
mkdir -p /tmp/praktika /tmp/praktika/input /tmp/praktika/output
- name: Run
id: run
run: |
. /tmp/praktika_setup_env.sh
set -o pipefail
if command -v ts &> /dev/null; then
python3 -m praktika run '''Stateless tests (amd_release,parallel)''' --workflow "PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee /tmp/praktika/praktika_run.log
else
python3 -m praktika run '''Stateless tests (amd_release,parallel)''' --workflow "PR" --ci |& tee /tmp/praktika/praktika_run.log
fi
stateless_tests_amd_releasenon_parallel:
runs-on: [func-tester]
needs: [config_workflow, docker_builds, build_amd_release]
if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhbWRfcmVsZWFzZSxub24tcGFyYWxsZWwp') }}
name: "Stateless tests (amd_release,non-parallel)"
outputs:
data: ${{ steps.run.outputs.DATA }}
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
ref: ${{ github.head_ref }}
- name: Prepare env script
run: |
cat > /tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF'
export PYTHONPATH=./ci:.
cat > /tmp/praktika/workflow_config_pr.json << 'EOF'
${{ needs.config_workflow.outputs.data }}
EOF
cat > /tmp/praktika/workflow_status.json << 'EOF'
${{ toJson(needs) }}
EOF
ENV_SETUP_SCRIPT_EOF
rm -rf /tmp/praktika/input /tmp/praktika/output /tmp/praktika
mkdir -p /tmp/praktika /tmp/praktika/input /tmp/praktika/output
- name: Run
id: run
run: |
. /tmp/praktika_setup_env.sh
set -o pipefail
if command -v ts &> /dev/null; then
python3 -m praktika run '''Stateless tests (amd_release,non-parallel)''' --workflow "PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee /tmp/praktika/praktika_run.log
else
python3 -m praktika run '''Stateless tests (amd_release,non-parallel)''' --workflow "PR" --ci |& tee /tmp/praktika/praktika_run.log
fi
stateless_tests_arm_asanparallel:
runs-on: [builder-aarch64]
needs: [config_workflow, docker_builds, build_arm_asan]
if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhcm1fYXNhbixwYXJhbGxlbCk=') }}
name: "Stateless tests (arm_asan,parallel)"
outputs:
data: ${{ steps.run.outputs.DATA }}
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
ref: ${{ github.head_ref }}
- name: Prepare env script
run: |
cat > /tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF'
export PYTHONPATH=./ci:.
cat > /tmp/praktika/workflow_config_pr.json << 'EOF'
${{ needs.config_workflow.outputs.data }}
EOF
cat > /tmp/praktika/workflow_status.json << 'EOF'
${{ toJson(needs) }}
EOF
ENV_SETUP_SCRIPT_EOF
rm -rf /tmp/praktika/input /tmp/praktika/output /tmp/praktika
mkdir -p /tmp/praktika /tmp/praktika/input /tmp/praktika/output
- name: Run
id: run
run: |
. /tmp/praktika_setup_env.sh
set -o pipefail
if command -v ts &> /dev/null; then
python3 -m praktika run '''Stateless tests (arm_asan,parallel)''' --workflow "PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee /tmp/praktika/praktika_run.log
else
python3 -m praktika run '''Stateless tests (arm_asan,parallel)''' --workflow "PR" --ci |& tee /tmp/praktika/praktika_run.log
fi
stateless_tests_arm_asannon_parallel:
runs-on: [func-tester-aarch64]
needs: [config_workflow, docker_builds, build_arm_asan]
if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhcm1fYXNhbixub24tcGFyYWxsZWwp') }}
name: "Stateless tests (arm_asan,non-parallel)"
outputs:
data: ${{ steps.run.outputs.DATA }}
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
ref: ${{ github.head_ref }}
- name: Prepare env script
run: |
cat > /tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF'
export PYTHONPATH=./ci:.
cat > /tmp/praktika/workflow_config_pr.json << 'EOF'
${{ needs.config_workflow.outputs.data }}
EOF
cat > /tmp/praktika/workflow_status.json << 'EOF'
${{ toJson(needs) }}
EOF
ENV_SETUP_SCRIPT_EOF
rm -rf /tmp/praktika/input /tmp/praktika/output /tmp/praktika
mkdir -p /tmp/praktika /tmp/praktika/input /tmp/praktika/output
- name: Run
id: run
run: |
. /tmp/praktika_setup_env.sh
set -o pipefail
if command -v ts &> /dev/null; then
python3 -m praktika run '''Stateless tests (arm_asan,non-parallel)''' --workflow "PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee /tmp/praktika/praktika_run.log
else
python3 -m praktika run '''Stateless tests (arm_asan,non-parallel)''' --workflow "PR" --ci |& tee /tmp/praktika/praktika_run.log
fi
stateful_tests_amd_releaseparallel:
runs-on: [builder]
needs: [config_workflow, docker_builds, build_amd_debug]
if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'U3RhdGVmdWwgdGVzdHMgKGFtZF9yZWxlYXNlLHBhcmFsbGVsKQ==') }}
name: "Stateful tests (amd_release,parallel)"
outputs:
data: ${{ steps.run.outputs.DATA }}
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
ref: ${{ github.head_ref }}
- name: Prepare env script
run: |
cat > /tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF'
export PYTHONPATH=./ci:.
cat > /tmp/praktika/workflow_config_pr.json << 'EOF'
${{ needs.config_workflow.outputs.data }}
EOF
cat > /tmp/praktika/workflow_status.json << 'EOF'
${{ toJson(needs) }}
EOF
ENV_SETUP_SCRIPT_EOF
rm -rf /tmp/praktika/input /tmp/praktika/output /tmp/praktika
mkdir -p /tmp/praktika /tmp/praktika/input /tmp/praktika/output
- name: Run
id: run
run: |
. /tmp/praktika_setup_env.sh
set -o pipefail
if command -v ts &> /dev/null; then
python3 -m praktika run '''Stateful tests (amd_release,parallel)''' --workflow "PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee /tmp/praktika/praktika_run.log
else
python3 -m praktika run '''Stateful tests (amd_release,parallel)''' --workflow "PR" --ci |& tee /tmp/praktika/praktika_run.log
fi
stress_tests_arm_release:
runs-on: [func-tester-aarch64]
needs: [config_workflow, docker_builds, build_arm_release]
if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'U3RyZXNzIHRlc3RzIChhcm1fcmVsZWFzZSk=') }}
name: "Stress tests (arm_release)"
outputs:
data: ${{ steps.run.outputs.DATA }}
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
ref: ${{ github.head_ref }}
- name: Prepare env script
run: |
cat > /tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF'
export PYTHONPATH=./ci:.
cat > /tmp/praktika/workflow_config_pr.json << 'EOF'
${{ needs.config_workflow.outputs.data }}
EOF
cat > /tmp/praktika/workflow_status.json << 'EOF'
${{ toJson(needs) }}
EOF
ENV_SETUP_SCRIPT_EOF
rm -rf /tmp/praktika/input /tmp/praktika/output /tmp/praktika
mkdir -p /tmp/praktika /tmp/praktika/input /tmp/praktika/output
- name: Run
id: run
run: |
. /tmp/praktika_setup_env.sh
set -o pipefail
if command -v ts &> /dev/null; then
python3 -m praktika run '''Stress tests (arm_release)''' --workflow "PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee /tmp/praktika/praktika_run.log
else
python3 -m praktika run '''Stress tests (arm_release)''' --workflow "PR" --ci |& tee /tmp/praktika/praktika_run.log
fi
finish_workflow:
runs-on: [ci_services]
needs: [config_workflow, docker_builds, style_check, fast_test, build_amd_debug, build_amd_release, build_amd_asan, build_amd_tsan, build_amd_msan, build_amd_ubsan, build_amd_binary, build_arm_release, build_arm_asan, stateless_tests_amd_debugparallel, stateless_tests_amd_debugnon_parallel, stateless_tests_amd_releaseparallel, stateless_tests_amd_releasenon_parallel, stateless_tests_arm_asanparallel, stateless_tests_arm_asannon_parallel, stateful_tests_amd_releaseparallel, stress_tests_arm_release]
if: ${{ !cancelled() }}
name: "Finish Workflow"
outputs:
data: ${{ steps.run.outputs.DATA }}
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
ref: ${{ github.head_ref }}
- name: Prepare env script
run: |
cat > /tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF'
export PYTHONPATH=./ci:.
cat > /tmp/praktika/workflow_config_pr.json << 'EOF'
${{ needs.config_workflow.outputs.data }}
EOF
cat > /tmp/praktika/workflow_status.json << 'EOF'
${{ toJson(needs) }}
EOF
ENV_SETUP_SCRIPT_EOF
rm -rf /tmp/praktika/input /tmp/praktika/output /tmp/praktika
mkdir -p /tmp/praktika /tmp/praktika/input /tmp/praktika/output
- name: Run
id: run
run: |
. /tmp/praktika_setup_env.sh
set -o pipefail
if command -v ts &> /dev/null; then
python3 -m praktika run '''Finish Workflow''' --workflow "PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee /tmp/praktika/praktika_run.log
else
python3 -m praktika run '''Finish Workflow''' --workflow "PR" --ci |& tee /tmp/praktika/praktika_run.log
fi

View File

@ -0,0 +1,80 @@
# docker build -t clickhouse/binary-builder .
ARG FROM_TAG=latest
FROM clickhouse/fasttest:$FROM_TAG
ENV CC=clang-${LLVM_VERSION}
ENV CXX=clang++-${LLVM_VERSION}
# If the cctools is updated, then first build it in the CI, then update here in a different commit
COPY --from=clickhouse/cctools:d9e3596e706b /cctools /cctools
# Rust toolchain and libraries
ENV RUSTUP_HOME=/rust/rustup
ENV CARGO_HOME=/rust/cargo
ENV PATH="/rust/cargo/bin:${PATH}"
RUN curl https://sh.rustup.rs -sSf | bash -s -- -y && \
chmod 777 -R /rust && \
rustup toolchain install nightly-2024-04-01 && \
rustup default nightly-2024-04-01 && \
rustup toolchain remove stable && \
rustup component add rust-src && \
rustup target add x86_64-unknown-linux-gnu && \
rustup target add aarch64-unknown-linux-gnu && \
rustup target add x86_64-apple-darwin && \
rustup target add x86_64-unknown-freebsd && \
rustup target add aarch64-apple-darwin && \
rustup target add powerpc64le-unknown-linux-gnu && \
rustup target add x86_64-unknown-linux-musl && \
rustup target add aarch64-unknown-linux-musl && \
rustup target add riscv64gc-unknown-linux-gnu
# A cross-linker for RISC-V 64 (we need it, because LLVM's LLD does not work):
RUN apt-get update \
&& apt-get install software-properties-common --yes --no-install-recommends --verbose-versions
RUN add-apt-repository ppa:ubuntu-toolchain-r/test --yes \
&& apt-get update \
&& apt-get install --yes \
binutils-riscv64-linux-gnu \
build-essential \
python3-boto3 \
yasm \
zstd \
zip \
&& apt-get clean \
&& rm -rf /var/lib/apt/lists/* /var/cache/debconf /tmp/*
# Download toolchain and SDK for Darwin
RUN curl -sL -O https://github.com/phracker/MacOSX-SDKs/releases/download/11.3/MacOSX11.0.sdk.tar.xz
# Download and install mold 2.0 for s390x build
RUN curl -Lo /tmp/mold.tar.gz "https://github.com/rui314/mold/releases/download/v2.0.0/mold-2.0.0-x86_64-linux.tar.gz" \
&& mkdir /tmp/mold \
&& tar -xzf /tmp/mold.tar.gz -C /tmp/mold \
&& cp -r /tmp/mold/mold*/* /usr \
&& rm -rf /tmp/mold \
&& rm /tmp/mold.tar.gz
# Architecture of the image when BuildKit/buildx is used
ARG TARGETARCH
ARG NFPM_VERSION=2.20.0
RUN arch=${TARGETARCH:-amd64} \
&& curl -Lo /tmp/nfpm.deb "https://github.com/goreleaser/nfpm/releases/download/v${NFPM_VERSION}/nfpm_${arch}.deb" \
&& dpkg -i /tmp/nfpm.deb \
&& rm /tmp/nfpm.deb
ARG GO_VERSION=1.19.10
# We needed go for clickhouse-diagnostics (it is not used anymore)
RUN arch=${TARGETARCH:-amd64} \
&& curl -Lo /tmp/go.tgz "https://go.dev/dl/go${GO_VERSION}.linux-${arch}.tar.gz" \
&& tar -xzf /tmp/go.tgz -C /usr/local/ \
&& rm /tmp/go.tgz
ENV PATH="$PATH:/usr/local/go/bin"
ENV GOPATH=/workdir/go
ENV GOCACHE=/workdir/
ARG CLANG_TIDY_SHA1=c191254ea00d47ade11d7170ef82fe038c213774
RUN curl -Lo /usr/bin/clang-tidy-cache \
"https://raw.githubusercontent.com/matus-chochlik/ctcache/$CLANG_TIDY_SHA1/clang-tidy-cache" \
&& chmod +x /usr/bin/clang-tidy-cache

View File

@ -11,7 +11,8 @@ ARG odbc_driver_url="https://github.com/ClickHouse/clickhouse-odbc/releases/down
RUN mkdir /etc/clickhouse-server /etc/clickhouse-keeper /etc/clickhouse-client && chmod 777 /etc/clickhouse-* \ RUN mkdir /etc/clickhouse-server /etc/clickhouse-keeper /etc/clickhouse-client && chmod 777 /etc/clickhouse-* \
&& mkdir -p /var/lib/clickhouse /var/log/clickhouse-server && chmod 777 /var/log/clickhouse-server /var/lib/clickhouse && mkdir -p /var/lib/clickhouse /var/log/clickhouse-server && chmod 777 /var/log/clickhouse-server /var/lib/clickhouse
RUN addgroup --gid 1001 clickhouse && adduser --uid 1001 --gid 1001 --disabled-password clickhouse RUN addgroup --gid 1000 clickhouse && adduser --uid 1000 --gid 1000 --disabled-password clickhouse
RUN addgroup --gid 1001 clickhouse2 && adduser --uid 1001 --gid 1001 --disabled-password clickhouse2
# moreutils - provides ts fo FT # moreutils - provides ts fo FT
# expect, bzip2 - requried by FT # expect, bzip2 - requried by FT

View File

@ -6,6 +6,7 @@ RUN apt-get update && env DEBIAN_FRONTEND=noninteractive apt-get install --yes \
libxml2-utils \ libxml2-utils \
python3-pip \ python3-pip \
locales \ locales \
ripgrep \
git \ git \
&& apt-get clean \ && apt-get clean \
&& rm -rf /var/lib/apt/lists/* /var/cache/debconf /tmp/* && rm -rf /var/lib/apt/lists/* /var/cache/debconf /tmp/*

View File

@ -5,6 +5,7 @@ from praktika.settings import Settings
from praktika.utils import MetaClasses, Shell, Utils from praktika.utils import MetaClasses, Shell, Utils
from ci.jobs.scripts.clickhouse_version import CHVersion from ci.jobs.scripts.clickhouse_version import CHVersion
from ci.workflows.defs import CIFiles, ToolSet
class JobStages(metaclass=MetaClasses.WithIter): class JobStages(metaclass=MetaClasses.WithIter):
@ -13,6 +14,7 @@ class JobStages(metaclass=MetaClasses.WithIter):
UNSHALLOW = "unshallow" UNSHALLOW = "unshallow"
BUILD = "build" BUILD = "build"
PACKAGE = "package" PACKAGE = "package"
UNIT = "unit"
def parse_args(): def parse_args():
@ -36,12 +38,15 @@ CMAKE_CMD = """cmake --debug-trycompile -DCMAKE_VERBOSE_MAKEFILE=1 -LA \
-DENABLE_UTILS=0 -DCMAKE_FIND_PACKAGE_NO_PACKAGE_REGISTRY=ON -DCMAKE_INSTALL_PREFIX=/usr \ -DENABLE_UTILS=0 -DCMAKE_FIND_PACKAGE_NO_PACKAGE_REGISTRY=ON -DCMAKE_INSTALL_PREFIX=/usr \
-DCMAKE_INSTALL_SYSCONFDIR=/etc -DCMAKE_INSTALL_LOCALSTATEDIR=/var -DCMAKE_SKIP_INSTALL_ALL_DEPENDENCY=ON \ -DCMAKE_INSTALL_SYSCONFDIR=/etc -DCMAKE_INSTALL_LOCALSTATEDIR=/var -DCMAKE_SKIP_INSTALL_ALL_DEPENDENCY=ON \
{AUX_DEFS} \ {AUX_DEFS} \
-DCMAKE_C_COMPILER=clang-18 -DCMAKE_CXX_COMPILER=clang++-18 \ -DCMAKE_C_COMPILER={COMPILER} -DCMAKE_CXX_COMPILER={COMPILER_CPP} \
-DCOMPILER_CACHE={CACHE_TYPE} -DENABLE_BUILD_PROFILING=1 {DIR}""" -DCOMPILER_CACHE={CACHE_TYPE} -DENABLE_BUILD_PROFILING=1 {DIR}"""
# release: cmake --debug-trycompile -DCMAKE_VERBOSE_MAKEFILE=1 -LA -DCMAKE_BUILD_TYPE=None -DSANITIZE= -DENABLE_CHECK_HEAVY_BUILDS=1 -DENABLE_CLICKHOUSE_SELF_EXTRACTING=1 -DENABLE_TESTS=0 -DENABLE_UTILS=0 -DCMAKE_FIND_PACKAGE_NO_PACKAGE_REGISTRY=ON -DCMAKE_INSTALL_PREFIX=/usr -DCMAKE_INSTALL_SYSCONFDIR=/etc -DCMAKE_INSTALL_LOCALSTATEDIR=/var -DCMAKE_SKIP_INSTALL_ALL_DEPENDENCY=ON -DSPLIT_DEBUG_SYMBOLS=ON -DBUILD_STANDALONE_KEEPER=1 -DCMAKE_C_COMPILER=clang-18 -DCMAKE_CXX_COMPILER=clang++-18 -DCOMPILER_CACHE=sccache -DENABLE_BUILD_PROFILING=1 ..
# binary release: cmake --debug-trycompile -DCMAKE_VERBOSE_MAKEFILE=1 -LA -DCMAKE_BUILD_TYPE=None -DSANITIZE= -DENABLE_CHECK_HEAVY_BUILDS=1 -DENABLE_CLICKHOUSE_SELF_EXTRACTING=1 -DCMAKE_C_COMPILER=clang-18 -DCMAKE_CXX_COMPILER=clang++-18 -DCOMPILER_CACHE=sccache -DENABLE_BUILD_PROFILING=1 ..
# release coverage: cmake --debug-trycompile -DCMAKE_VERBOSE_MAKEFILE=1 -LA -DCMAKE_BUILD_TYPE=None -DSANITIZE= -DENABLE_CHECK_HEAVY_BUILDS=1 -DENABLE_CLICKHOUSE_SELF_EXTRACTING=1 -DENABLE_TESTS=0 -DENABLE_UTILS=0 -DCMAKE_FIND_PACKAGE_NO_PACKAGE_REGISTRY=ON -DCMAKE_INSTALL_PREFIX=/usr -DCMAKE_INSTALL_SYSCONFDIR=/etc -DCMAKE_INSTALL_LOCALSTATEDIR=/var -DCMAKE_SKIP_INSTALL_ALL_DEPENDENCY=ON -DCMAKE_C_COMPILER=clang-18 -DCMAKE_CXX_COMPILER=clang++-18 -DSANITIZE_COVERAGE=1 -DBUILD_STANDALONE_KEEPER=0 -DCOMPILER_CACHE=sccache -DENABLE_BUILD_PROFILING=1 ..
def main(): def main():
args = parse_args() args = parse_args()
stop_watch = Utils.Stopwatch() stop_watch = Utils.Stopwatch()
@ -65,30 +70,52 @@ def main():
BUILD_TYPE = "RelWithDebInfo" BUILD_TYPE = "RelWithDebInfo"
SANITIZER = "" SANITIZER = ""
AUX_DEFS = " -DENABLE_TESTS=0 " AUX_DEFS = " -DENABLE_TESTS=1 "
cmake_cmd = None
if "debug" in build_type: if "debug" in build_type:
print("Build type set: debug") print("Build type set: debug")
BUILD_TYPE = "Debug" BUILD_TYPE = "Debug"
AUX_DEFS = " -DENABLE_TESTS=1 " AUX_DEFS = " -DENABLE_TESTS=0 "
package_type = "debug"
elif "release" in build_type: elif "release" in build_type:
print("Build type set: release") print("Build type set: release")
AUX_DEFS = ( AUX_DEFS = (
" -DENABLE_TESTS=0 -DSPLIT_DEBUG_SYMBOLS=ON -DBUILD_STANDALONE_KEEPER=1 " " -DENABLE_TESTS=0 -DSPLIT_DEBUG_SYMBOLS=ON -DBUILD_STANDALONE_KEEPER=1 "
) )
package_type = "release"
elif "asan" in build_type: elif "asan" in build_type:
print("Sanitizer set: address") print("Sanitizer set: address")
SANITIZER = "address" SANITIZER = "address"
package_type = "asan"
elif "tsan" in build_type:
print("Sanitizer set: thread")
SANITIZER = "thread"
package_type = "tsan"
elif "msan" in build_type:
print("Sanitizer set: memory")
SANITIZER = "memory"
package_type = "msan"
elif "ubsan" in build_type:
print("Sanitizer set: undefined")
SANITIZER = "undefined"
package_type = "ubsan"
elif "binary" in build_type:
package_type = "binary"
cmake_cmd = f"cmake --debug-trycompile -DCMAKE_VERBOSE_MAKEFILE=1 -LA -DCMAKE_BUILD_TYPE=None -DSANITIZE= -DENABLE_CHECK_HEAVY_BUILDS=1 -DENABLE_CLICKHOUSE_SELF_EXTRACTING=1 -DCMAKE_C_COMPILER={ToolSet.COMPILER_C} -DCMAKE_CXX_COMPILER={ToolSet.COMPILER_CPP} -DCOMPILER_CACHE=sccache -DENABLE_BUILD_PROFILING=1 {Utils.cwd()}"
else: else:
assert False assert False
cmake_cmd = CMAKE_CMD.format( if not cmake_cmd:
BUILD_TYPE=BUILD_TYPE, cmake_cmd = CMAKE_CMD.format(
CACHE_TYPE=CACHE_TYPE, BUILD_TYPE=BUILD_TYPE,
SANITIZER=SANITIZER, CACHE_TYPE=CACHE_TYPE,
AUX_DEFS=AUX_DEFS, SANITIZER=SANITIZER,
DIR=Utils.cwd(), AUX_DEFS=AUX_DEFS,
) DIR=Utils.cwd(),
COMPILER=ToolSet.COMPILER_C,
COMPILER_CPP=ToolSet.COMPILER_CPP,
)
build_dir = f"{Settings.TEMP_DIR}/build" build_dir = f"{Settings.TEMP_DIR}/build"
@ -98,7 +125,7 @@ def main():
if res and JobStages.UNSHALLOW in stages: if res and JobStages.UNSHALLOW in stages:
results.append( results.append(
Result.create_from_command_execution( Result.from_commands_run(
name="Repo Unshallow", name="Repo Unshallow",
command="git rev-parse --is-shallow-repository | grep -q true && git fetch --depth 10000 --no-tags --filter=tree:0 origin $(git rev-parse --abbrev-ref HEAD)", command="git rev-parse --is-shallow-repository | grep -q true && git fetch --depth 10000 --no-tags --filter=tree:0 origin $(git rev-parse --abbrev-ref HEAD)",
with_log=True, with_log=True,
@ -119,7 +146,7 @@ def main():
if res and JobStages.CHECKOUT_SUBMODULES in stages: if res and JobStages.CHECKOUT_SUBMODULES in stages:
Shell.check(f"rm -rf {build_dir} && mkdir -p {build_dir}") Shell.check(f"rm -rf {build_dir} && mkdir -p {build_dir}")
results.append( results.append(
Result.create_from_command_execution( Result.from_commands_run(
name="Checkout Submodules", name="Checkout Submodules",
command=f"git submodule sync --recursive && git submodule init && git submodule update --depth 1 --recursive --jobs {min([Utils.cpu_count(), 20])}", command=f"git submodule sync --recursive && git submodule init && git submodule update --depth 1 --recursive --jobs {min([Utils.cpu_count(), 20])}",
) )
@ -128,7 +155,7 @@ def main():
if res and JobStages.CMAKE in stages: if res and JobStages.CMAKE in stages:
results.append( results.append(
Result.create_from_command_execution( Result.from_commands_run(
name="Cmake configuration", name="Cmake configuration",
command=cmake_cmd, command=cmake_cmd,
workdir=build_dir, workdir=build_dir,
@ -140,7 +167,7 @@ def main():
if res and JobStages.BUILD in stages: if res and JobStages.BUILD in stages:
Shell.check("sccache --show-stats") Shell.check("sccache --show-stats")
results.append( results.append(
Result.create_from_command_execution( Result.from_commands_run(
name="Build ClickHouse", name="Build ClickHouse",
command="ninja clickhouse-bundle clickhouse-odbc-bridge clickhouse-library-bridge", command="ninja clickhouse-bundle clickhouse-odbc-bridge clickhouse-library-bridge",
workdir=build_dir, workdir=build_dir,
@ -149,18 +176,13 @@ def main():
) )
Shell.check("sccache --show-stats") Shell.check("sccache --show-stats")
Shell.check(f"ls -l {build_dir}/programs/") Shell.check(f"ls -l {build_dir}/programs/")
Shell.check(f"pwd")
Shell.check(f"find {build_dir} -name unit_tests_dbms")
Shell.check(f"find . -name unit_tests_dbms")
res = results[-1].is_ok() res = results[-1].is_ok()
if res and JobStages.PACKAGE in stages: if res and JobStages.PACKAGE in stages and "binary" not in build_type:
if "debug" in build_type: assert package_type
package_type = "debug"
elif "release" in build_type:
package_type = "release"
elif "asan" in build_type:
package_type = "asan"
else:
assert False, "TODO"
if "amd" in build_type: if "amd" in build_type:
deb_arch = "amd64" deb_arch = "amd64"
else: else:
@ -170,7 +192,7 @@ def main():
assert Shell.check(f"rm -f {output_dir}/*.deb") assert Shell.check(f"rm -f {output_dir}/*.deb")
results.append( results.append(
Result.create_from_command_execution( Result.from_commands_run(
name="Build Packages", name="Build Packages",
command=[ command=[
f"DESTDIR={build_dir}/root ninja programs/install", f"DESTDIR={build_dir}/root ninja programs/install",
@ -183,6 +205,17 @@ def main():
) )
res = results[-1].is_ok() res = results[-1].is_ok()
if res and JobStages.UNIT in stages and (SANITIZER or "binary" in build_type):
# TODO: parallel execution
results.append(
Result.from_gtest_run(
name="Unit Tests",
unit_tests_path=CIFiles.UNIT_TESTS_BIN,
with_log=False,
)
)
res = results[-1].is_ok()
Result.create_from(results=results, stopwatch=stop_watch).complete_job() Result.create_from(results=results, stopwatch=stop_watch).complete_job()

View File

@ -1,3 +1,4 @@
import argparse
import math import math
import multiprocessing import multiprocessing
import os import os
@ -245,8 +246,18 @@ def check_file_names(files):
return "" return ""
def parse_args():
parser = argparse.ArgumentParser(description="ClickHouse Style Check Job")
# parser.add_argument("--param", help="Optional job start stage", default=None)
parser.add_argument("--test", help="Optional test name pattern", default="")
return parser.parse_args()
if __name__ == "__main__": if __name__ == "__main__":
results = [] results = []
args = parse_args()
testpattern = args.test
stop_watch = Utils.Stopwatch() stop_watch = Utils.Stopwatch()
all_files = Utils.traverse_paths( all_files = Utils.traverse_paths(
@ -296,87 +307,111 @@ if __name__ == "__main__":
) )
) )
results.append( testname = "Whitespace Check"
run_check_concurrent( if testpattern.lower() in testname.lower():
check_name="Whitespace Check", results.append(
check_function=check_whitespaces, run_check_concurrent(
files=cpp_files, check_name=testname,
check_function=check_whitespaces,
files=cpp_files,
)
) )
) testname = "YamlLint Check"
results.append( if testpattern.lower() in testname.lower():
run_check_concurrent( results.append(
check_name="YamlLint Check", run_check_concurrent(
check_function=check_yamllint, check_name=testname,
files=yaml_workflow_files, check_function=check_yamllint,
files=yaml_workflow_files,
)
) )
) testname = "XmlLint Check"
results.append( if testpattern.lower() in testname.lower():
run_check_concurrent( results.append(
check_name="XmlLint Check", run_check_concurrent(
check_function=check_xmllint, check_name=testname,
files=xml_files, check_function=check_xmllint,
files=xml_files,
)
) )
) testname = "Functional Tests scripts smoke check"
results.append( if testpattern.lower() in testname.lower():
run_check_concurrent( results.append(
check_name="Functional Tests scripts smoke check", run_check_concurrent(
check_function=check_functional_test_cases, check_name=testname,
files=functional_test_files, check_function=check_functional_test_cases,
files=functional_test_files,
)
) )
) testname = "Check Tests Numbers"
results.append( if testpattern.lower() in testname.lower():
Result.create_from_command_execution( results.append(
name="Check Tests Numbers", Result.from_commands_run(
command=check_gaps_in_tests_numbers, name=testname,
command_args=[functional_test_files], command=check_gaps_in_tests_numbers,
command_args=[functional_test_files],
)
) )
) testname = "Check Broken Symlinks"
results.append( if testpattern.lower() in testname.lower():
Result.create_from_command_execution( results.append(
name="Check Broken Symlinks", Result.from_commands_run(
command=check_broken_links, name=testname,
command_kwargs={ command=check_broken_links,
"path": "./", command_kwargs={
"exclude_paths": ["contrib/", "metadata/", "programs/server/data"], "path": "./",
}, "exclude_paths": ["contrib/", "metadata/", "programs/server/data"],
},
)
) )
) testname = "Check CPP code"
results.append( if testpattern.lower() in testname.lower():
Result.create_from_command_execution( results.append(
name="Check CPP code", Result.from_commands_run(
command=check_cpp_code, name=testname,
command=check_cpp_code,
)
) )
) testname = "Check Submodules"
results.append( if testpattern.lower() in testname.lower():
Result.create_from_command_execution( results.append(
name="Check Submodules", Result.from_commands_run(
command=check_repo_submodules, name=testname,
command=check_repo_submodules,
)
) )
) testname = "Check File Names"
results.append( if testpattern.lower() in testname.lower():
Result.create_from_command_execution( results.append(
name="Check File Names", Result.from_commands_run(
command=check_file_names, name=testname,
command_args=[all_files], command=check_file_names,
command_args=[all_files],
)
) )
) testname = "Check Many Different Things"
results.append( if testpattern.lower() in testname.lower():
Result.create_from_command_execution( results.append(
name="Check Many Different Things", Result.from_commands_run(
command=check_other, name=testname,
command=check_other,
)
) )
) testname = "Check Codespell"
results.append( if testpattern.lower() in testname.lower():
Result.create_from_command_execution( results.append(
name="Check Codespell", Result.from_commands_run(
command=check_codespell, name=testname,
command=check_codespell,
)
) )
) testname = "Check Aspell"
results.append( if testpattern.lower() in testname.lower():
Result.create_from_command_execution( results.append(
name="Check Aspell", Result.from_commands_run(
command=check_aspell, name=testname,
command=check_aspell,
)
) )
)
Result.create_from(results=results, stopwatch=stop_watch).complete_job() Result.create_from(results=results, stopwatch=stop_watch).complete_job()

View File

@ -6,6 +6,7 @@ from praktika.utils import MetaClasses, Shell, Utils
from ci.jobs.scripts.clickhouse_proc import ClickHouseProc from ci.jobs.scripts.clickhouse_proc import ClickHouseProc
from ci.jobs.scripts.functional_tests_results import FTResultsProcessor from ci.jobs.scripts.functional_tests_results import FTResultsProcessor
from ci.workflows.defs import ToolSet
def clone_submodules(): def clone_submodules():
@ -132,7 +133,7 @@ def main():
if res and JobStages.CHECKOUT_SUBMODULES in stages: if res and JobStages.CHECKOUT_SUBMODULES in stages:
Shell.check(f"rm -rf {build_dir} && mkdir -p {build_dir}") Shell.check(f"rm -rf {build_dir} && mkdir -p {build_dir}")
results.append( results.append(
Result.create_from_command_execution( Result.from_commands_run(
name="Checkout Submodules", name="Checkout Submodules",
command=clone_submodules, command=clone_submodules,
) )
@ -141,10 +142,12 @@ def main():
if res and JobStages.CMAKE in stages: if res and JobStages.CMAKE in stages:
results.append( results.append(
Result.create_from_command_execution( Result.from_commands_run(
name="Cmake configuration", name="Cmake configuration",
command=f"cmake {current_directory} -DCMAKE_CXX_COMPILER=clang++-18 -DCMAKE_C_COMPILER=clang-18 \ command=f"cmake {current_directory} -DCMAKE_CXX_COMPILER={ToolSet.COMPILER_CPP} \
-DCMAKE_TOOLCHAIN_FILE={current_directory}/cmake/linux/toolchain-x86_64-musl.cmake -DENABLE_LIBRARIES=0 \ -DCMAKE_C_COMPILER={ToolSet.COMPILER_C} \
-DCMAKE_TOOLCHAIN_FILE={current_directory}/cmake/linux/toolchain-x86_64-musl.cmake \
-DENABLE_LIBRARIES=0 \
-DENABLE_TESTS=0 -DENABLE_UTILS=0 -DENABLE_THINLTO=0 -DENABLE_NURAFT=1 -DENABLE_SIMDJSON=1 \ -DENABLE_TESTS=0 -DENABLE_UTILS=0 -DENABLE_THINLTO=0 -DENABLE_NURAFT=1 -DENABLE_SIMDJSON=1 \
-DENABLE_JEMALLOC=1 -DENABLE_LIBURING=1 -DENABLE_YAML_CPP=1 -DCOMPILER_CACHE=sccache", -DENABLE_JEMALLOC=1 -DENABLE_LIBURING=1 -DENABLE_YAML_CPP=1 -DCOMPILER_CACHE=sccache",
workdir=build_dir, workdir=build_dir,
@ -156,7 +159,7 @@ def main():
if res and JobStages.BUILD in stages: if res and JobStages.BUILD in stages:
Shell.check("sccache --show-stats") Shell.check("sccache --show-stats")
results.append( results.append(
Result.create_from_command_execution( Result.from_commands_run(
name="Build ClickHouse", name="Build ClickHouse",
command="ninja clickhouse-bundle clickhouse-stripped", command="ninja clickhouse-bundle clickhouse-stripped",
workdir=build_dir, workdir=build_dir,
@ -176,7 +179,7 @@ def main():
"clickhouse-test --help", "clickhouse-test --help",
] ]
results.append( results.append(
Result.create_from_command_execution( Result.from_commands_run(
name="Check and Compress binary", name="Check and Compress binary",
command=commands, command=commands,
workdir=build_dir, workdir=build_dir,
@ -195,7 +198,7 @@ def main():
update_path_ch_config, update_path_ch_config,
] ]
results.append( results.append(
Result.create_from_command_execution( Result.from_commands_run(
name="Install ClickHouse Config", name="Install ClickHouse Config",
command=commands, command=commands,
with_log=True, with_log=True,

View File

@ -1,4 +1,5 @@
import argparse import argparse
import os
import time import time
from pathlib import Path from pathlib import Path
@ -109,7 +110,7 @@ def main():
f"clickhouse-server --version", f"clickhouse-server --version",
] ]
results.append( results.append(
Result.create_from_command_execution( Result.from_commands_run(
name="Install ClickHouse", command=commands, with_log=True name="Install ClickHouse", command=commands, with_log=True
) )
) )
@ -153,6 +154,10 @@ def main():
stop_watch_ = Utils.Stopwatch() stop_watch_ = Utils.Stopwatch()
step_name = "Tests" step_name = "Tests"
print(step_name) print(step_name)
# TODO: fix tests dependent on this and remove:
os.environ["CLICKHOUSE_TMP"] = "tests/queries/1_stateful"
# assert Shell.check("clickhouse-client -q \"insert into system.zookeeper (name, path, value) values ('auxiliary_zookeeper2', '/test/chroot/', '')\"", verbose=True) # assert Shell.check("clickhouse-client -q \"insert into system.zookeeper (name, path, value) values ('auxiliary_zookeeper2', '/test/chroot/', '')\"", verbose=True)
run_test( run_test(
no_parallel=no_parallel, no_parallel=no_parallel,

View File

@ -118,7 +118,7 @@ def main():
f"chmod +x /tmp/praktika/input/clickhouse-odbc-bridge", f"chmod +x /tmp/praktika/input/clickhouse-odbc-bridge",
] ]
results.append( results.append(
Result.create_from_command_execution( Result.from_commands_run(
name="Install ClickHouse", command=commands, with_log=True name="Install ClickHouse", command=commands, with_log=True
) )
) )

View File

@ -15,7 +15,7 @@
LC_ALL="en_US.UTF-8" LC_ALL="en_US.UTF-8"
ROOT_PATH="." ROOT_PATH="."
EXCLUDE='build/|integration/|widechar_width/|glibc-compatibility/|poco/|memcpy/|consistent-hashing|benchmark|tests/.*.cpp|utils/keeper-bench/example.yaml' EXCLUDE='build/|integration/|widechar_width/|glibc-compatibility/|poco/|memcpy/|consistent-hashing|benchmark|tests/.*.cpp|utils/keeper-bench/example.yaml'
EXCLUDE_DOCS='Settings\.cpp|FormatFactorySettingsDeclaration\.h' EXCLUDE_DOCS='Settings\.cpp|FormatFactorySettings\.h'
# From [1]: # From [1]:
# But since array_to_string_internal() in array.c still loops over array # But since array_to_string_internal() in array.c still loops over array
@ -85,6 +85,8 @@ EXTERN_TYPES_EXCLUDES=(
CurrentMetrics::add CurrentMetrics::add
CurrentMetrics::sub CurrentMetrics::sub
CurrentMetrics::get CurrentMetrics::get
CurrentMetrics::getDocumentation
CurrentMetrics::getName
CurrentMetrics::set CurrentMetrics::set
CurrentMetrics::end CurrentMetrics::end
CurrentMetrics::Increment CurrentMetrics::Increment

View File

@ -8,12 +8,12 @@ from praktika.yaml_generator import YamlGenerator
def create_parser(): def create_parser():
parser = argparse.ArgumentParser(prog="python3 -m praktika") parser = argparse.ArgumentParser(prog="praktika")
subparsers = parser.add_subparsers(dest="command", help="Available subcommands") subparsers = parser.add_subparsers(dest="command", help="Available subcommands")
run_parser = subparsers.add_parser("run", help="Job Runner") run_parser = subparsers.add_parser("run", help="Job Runner")
run_parser.add_argument("--job", help="Job Name", type=str, required=True) run_parser.add_argument("job", help="Job Name", type=str)
run_parser.add_argument( run_parser.add_argument(
"--workflow", "--workflow",
help="Workflow Name (required if job name is not uniq per config)", help="Workflow Name (required if job name is not uniq per config)",
@ -75,7 +75,8 @@ def create_parser():
return parser return parser
if __name__ == "__main__": def main():
sys.path.append(".")
parser = create_parser() parser = create_parser()
args = parser.parse_args() args = parser.parse_args()
@ -120,3 +121,7 @@ if __name__ == "__main__":
else: else:
parser.print_help() parser.print_help()
sys.exit(1) sys.exit(1)
if __name__ == "__main__":
main()

View File

@ -128,9 +128,6 @@ class HtmlRunnerHooks:
for job in _workflow.jobs: for job in _workflow.jobs:
if job.name not in skip_jobs: if job.name not in skip_jobs:
result = Result.generate_pending(job.name) result = Result.generate_pending(job.name)
# Preemptively add the general job log to the result directory to ensure
# the post-job handler can upload it, even if the job is terminated unexpectedly
result.set_files([Settings.RUN_LOG])
else: else:
result = Result.generate_skipped(job.name, job_cache_records[job.name]) result = Result.generate_skipped(job.name, job_cache_records[job.name])
results.append(result) results.append(result)

View File

@ -529,7 +529,7 @@
const columnSymbols = { const columnSymbols = {
name: '🗂️', name: '🗂️',
status: '🧾', status: '',
start_time: '🕒', start_time: '🕒',
duration: '⏳', duration: '⏳',
info: '📝', info: '📝',

View File

@ -68,9 +68,7 @@ def _update_workflow_with_native_jobs(workflow):
print(f"Enable native job [{_docker_build_job.name}] for [{workflow.name}]") print(f"Enable native job [{_docker_build_job.name}] for [{workflow.name}]")
aux_job = copy.deepcopy(_docker_build_job) aux_job = copy.deepcopy(_docker_build_job)
if workflow.enable_cache: if workflow.enable_cache:
print( print(f"Add automatic digest config for [{aux_job.name}] job")
f"Add automatic digest config for [{aux_job.name}] job since cache is enabled"
)
docker_digest_config = Job.CacheDigestConfig() docker_digest_config = Job.CacheDigestConfig()
for docker_config in workflow.dockers: for docker_config in workflow.dockers:
docker_digest_config.include_paths.append(docker_config.path) docker_digest_config.include_paths.append(docker_config.path)

View File

@ -1,5 +1,6 @@
import dataclasses import dataclasses
import datetime import datetime
import json
import sys import sys
from pathlib import Path from pathlib import Path
from typing import Any, Dict, List, Optional, Union from typing import Any, Dict, List, Optional, Union
@ -80,12 +81,19 @@ class Result(MetaClasses.Serializable):
infos += info infos += info
if results and not status: if results and not status:
for result in results: for result in results:
if result.status not in (Result.Status.SUCCESS, Result.Status.FAILED): if result.status not in (
Result.Status.SUCCESS,
Result.Status.FAILED,
Result.Status.ERROR,
):
Utils.raise_with_error( Utils.raise_with_error(
f"Unexpected result status [{result.status}] for Result.create_from call" f"Unexpected result status [{result.status}] for Result.create_from call"
) )
if result.status != Result.Status.SUCCESS: if result.status != Result.Status.SUCCESS:
result_status = Result.Status.FAILED result_status = Result.Status.FAILED
if result.status == Result.Status.ERROR:
result_status = Result.Status.ERROR
break
if results: if results:
for result in results: for result in results:
if result.info and with_info_from_results: if result.info and with_info_from_results:
@ -166,17 +174,14 @@ class Result(MetaClasses.Serializable):
return Result(**obj) return Result(**obj)
def update_duration(self): def update_duration(self):
if not self.duration and self.start_time: if self.duration:
return self
if self.start_time:
self.duration = datetime.datetime.utcnow().timestamp() - self.start_time self.duration = datetime.datetime.utcnow().timestamp() - self.start_time
else: else:
if not self.duration: print(
print( f"NOTE: start_time is not set for job [{self.name}] Result - do not update duration"
f"NOTE: duration is set for job [{self.name}] Result - do not update by CI" )
)
else:
print(
f"NOTE: start_time is not set for job [{self.name}] Result - do not update duration"
)
return self return self
def set_timing(self, stopwatch: Utils.Stopwatch): def set_timing(self, stopwatch: Utils.Stopwatch):
@ -250,7 +255,21 @@ class Result(MetaClasses.Serializable):
) )
@classmethod @classmethod
def create_from_command_execution( def from_gtest_run(cls, name, unit_tests_path, with_log=False):
Shell.check(f"rm {ResultTranslator.GTEST_RESULT_FILE}")
result = Result.from_commands_run(
name=name,
command=[
f"{unit_tests_path} --gtest_output='json:{ResultTranslator.GTEST_RESULT_FILE}'"
],
with_log=with_log,
)
status, results, info = ResultTranslator.from_gtest()
result.set_status(status).set_results(results).set_info(info)
return result
@classmethod
def from_commands_run(
cls, cls,
name, name,
command, command,
@ -507,10 +526,11 @@ class _ResultS3:
# return True # return True
@classmethod @classmethod
def upload_result_files_to_s3(cls, result): def upload_result_files_to_s3(cls, result, s3_subprefix=""):
s3_subprefix = "/".join([s3_subprefix, Utils.normalize_string(result.name)])
if result.results: if result.results:
for result_ in result.results: for result_ in result.results:
cls.upload_result_files_to_s3(result_) cls.upload_result_files_to_s3(result_, s3_subprefix=s3_subprefix)
for file in result.files: for file in result.files:
if not Path(file).is_file(): if not Path(file).is_file():
print(f"ERROR: Invalid file [{file}] in [{result.name}] - skip upload") print(f"ERROR: Invalid file [{file}] in [{result.name}] - skip upload")
@ -529,7 +549,7 @@ class _ResultS3:
file, file,
upload_to_s3=True, upload_to_s3=True,
text=is_text, text=is_text,
s3_subprefix=Utils.normalize_string(result.name), s3_subprefix=s3_subprefix,
) )
result.links.append(file_link) result.links.append(file_link)
if result.files: if result.files:
@ -572,3 +592,138 @@ class _ResultS3:
return new_status return new_status
else: else:
return None return None
class ResultTranslator:
GTEST_RESULT_FILE = "/tmp/praktika/gtest.json"
@classmethod
def from_gtest(cls):
"""The json is described by the next proto3 scheme:
(It's wrong, but that's a copy/paste from
https://google.github.io/googletest/advanced.html#generating-a-json-report)
syntax = "proto3";
package googletest;
import "google/protobuf/timestamp.proto";
import "google/protobuf/duration.proto";
message UnitTest {
int32 tests = 1;
int32 failures = 2;
int32 disabled = 3;
int32 errors = 4;
google.protobuf.Timestamp timestamp = 5;
google.protobuf.Duration time = 6;
string name = 7;
repeated TestCase testsuites = 8;
}
message TestCase {
string name = 1;
int32 tests = 2;
int32 failures = 3;
int32 disabled = 4;
int32 errors = 5;
google.protobuf.Duration time = 6;
repeated TestInfo testsuite = 7;
}
message TestInfo {
string name = 1;
string file = 6;
int32 line = 7;
enum Status {
RUN = 0;
NOTRUN = 1;
}
Status status = 2;
google.protobuf.Duration time = 3;
string classname = 4;
message Failure {
string failures = 1;
string type = 2;
}
repeated Failure failures = 5;
}"""
test_results = [] # type: List[Result]
if not Path(cls.GTEST_RESULT_FILE).exists():
print(f"ERROR: No test result file [{cls.GTEST_RESULT_FILE}]")
return (
Result.Status.ERROR,
test_results,
f"No test result file [{cls.GTEST_RESULT_FILE}]",
)
with open(cls.GTEST_RESULT_FILE, "r", encoding="utf-8") as j:
report = json.load(j)
total_counter = report["tests"]
failed_counter = report["failures"]
error_counter = report["errors"]
description = ""
SEGFAULT = "Segmentation fault. "
SIGNAL = "Exit on signal. "
for suite in report["testsuites"]:
suite_name = suite["name"]
for test_case in suite["testsuite"]:
case_name = test_case["name"]
test_time = float(test_case["time"][:-1])
raw_logs = None
if "failures" in test_case:
raw_logs = ""
for failure in test_case["failures"]:
raw_logs += failure[Result.Status.FAILED]
if (
"Segmentation fault" in raw_logs # type: ignore
and SEGFAULT not in description
):
description += SEGFAULT
if (
"received signal SIG" in raw_logs # type: ignore
and SIGNAL not in description
):
description += SIGNAL
if test_case["status"] == "NOTRUN":
test_status = "SKIPPED"
elif raw_logs is None:
test_status = Result.Status.SUCCESS
else:
test_status = Result.Status.FAILED
test_results.append(
Result(
f"{suite_name}.{case_name}",
test_status,
duration=test_time,
info=raw_logs,
)
)
check_status = Result.Status.SUCCESS
tests_status = Result.Status.SUCCESS
tests_time = float(report["time"][:-1])
if failed_counter:
check_status = Result.Status.FAILED
test_status = Result.Status.FAILED
if error_counter:
check_status = Result.Status.ERROR
test_status = Result.Status.ERROR
test_results.append(Result(report["name"], tests_status, duration=tests_time))
if not description:
description += (
f"fail: {failed_counter + error_counter}, "
f"passed: {total_counter - failed_counter - error_counter}"
)
return (
check_status,
test_results,
description,
)

View File

@ -86,6 +86,7 @@ class Runner:
print("Read GH Environment") print("Read GH Environment")
env = _Environment.from_env() env = _Environment.from_env()
env.JOB_NAME = job.name env.JOB_NAME = job.name
os.environ["JOB_NAME"] = job.name
env.dump() env.dump()
print(env) print(env)
@ -200,13 +201,15 @@ class Runner:
ResultInfo.TIMEOUT ResultInfo.TIMEOUT
) )
elif result.is_running(): elif result.is_running():
info = f"ERROR: Job terminated with an error, exit code [{exit_code}] - set status to [{Result.Status.ERROR}]" info = f"ERROR: Job killed, exit code [{exit_code}] - set status to [{Result.Status.ERROR}]"
print(info) print(info)
result.set_status(Result.Status.ERROR).set_info(info) result.set_status(Result.Status.ERROR).set_info(info)
result.set_files([Settings.RUN_LOG])
else: else:
info = f"ERROR: Invalid status [{result.status}] for exit code [{exit_code}] - switch to [{Result.Status.ERROR}]" info = f"ERROR: Invalid status [{result.status}] for exit code [{exit_code}] - switch to [{Result.Status.ERROR}]"
print(info) print(info)
result.set_status(Result.Status.ERROR).set_info(info) result.set_status(Result.Status.ERROR).set_info(info)
result.set_files([Settings.RUN_LOG])
result.dump() result.dump()
return exit_code return exit_code
@ -257,10 +260,6 @@ class Runner:
info = f"ERROR: {ResultInfo.KILLED}" info = f"ERROR: {ResultInfo.KILLED}"
print(info) print(info)
result.set_info(info).set_status(Result.Status.ERROR).dump() result.set_info(info).set_status(Result.Status.ERROR).dump()
else:
# TODO: add setting with different ways of storing general praktika log: always, on error, never.
# now let's store it on error only
result.files = [file for file in result.files if file != Settings.RUN_LOG]
result.update_duration().dump() result.update_duration().dump()

View File

@ -227,8 +227,8 @@ class Shell:
proc = subprocess.Popen( proc = subprocess.Popen(
command, command,
shell=True, shell=True,
stderr=subprocess.STDOUT,
stdout=subprocess.PIPE, stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
stdin=subprocess.PIPE if stdin_str else None, stdin=subprocess.PIPE if stdin_str else None,
universal_newlines=True, universal_newlines=True,
start_new_session=True, # Start a new process group for signal handling start_new_session=True, # Start a new process group for signal handling
@ -248,11 +248,24 @@ class Shell:
proc.stdin.write(stdin_str) proc.stdin.write(stdin_str)
proc.stdin.close() proc.stdin.close()
# Process output in real-time # Process both stdout and stderr in real-time
if proc.stdout: def stream_output(stream, output_fp):
for line in proc.stdout: for line in iter(stream.readline, ""):
sys.stdout.write(line) sys.stdout.write(line)
log_fp.write(line) output_fp.write(line)
stdout_thread = Thread(
target=stream_output, args=(proc.stdout, log_fp)
)
stderr_thread = Thread(
target=stream_output, args=(proc.stderr, log_fp)
)
stdout_thread.start()
stderr_thread.start()
stdout_thread.join()
stderr_thread.join()
proc.wait() # Wait for the process to finish proc.wait() # Wait for the process to finish

View File

@ -105,9 +105,9 @@ jobs:
. /tmp/praktika_setup_env.sh . /tmp/praktika_setup_env.sh
set -o pipefail set -o pipefail
if command -v ts &> /dev/null; then if command -v ts &> /dev/null; then
python3 -m praktika run --job '''{JOB_NAME}''' --workflow "{WORKFLOW_NAME}" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee /tmp/praktika/praktika_run.log python3 -m praktika run '''{JOB_NAME}''' --workflow "{WORKFLOW_NAME}" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee /tmp/praktika/praktika_run.log
else else
python3 -m praktika run --job '''{JOB_NAME}''' --workflow "{WORKFLOW_NAME}" --ci |& tee /tmp/praktika/praktika_run.log python3 -m praktika run '''{JOB_NAME}''' --workflow "{WORKFLOW_NAME}" --ci |& tee /tmp/praktika/praktika_run.log
fi fi
{UPLOADS_GITHUB}\ {UPLOADS_GITHUB}\
""" """

View File

@ -1,245 +0,0 @@
from praktika import Docker, Secret
S3_BUCKET_NAME = "clickhouse-builds"
S3_BUCKET_HTTP_ENDPOINT = "clickhouse-builds.s3.amazonaws.com"
class RunnerLabels:
CI_SERVICES = "ci_services"
CI_SERVICES_EBS = "ci_services_ebs"
BUILDER_AMD = "builder"
BUILDER_ARM = "builder-aarch64"
FUNC_TESTER_AMD = "func-tester"
FUNC_TESTER_ARM = "func-tester-aarch64"
BASE_BRANCH = "master"
azure_secret = Secret.Config(
name="azure_connection_string",
type=Secret.Type.AWS_SSM_VAR,
)
SECRETS = [
Secret.Config(
name="dockerhub_robot_password",
type=Secret.Type.AWS_SSM_VAR,
),
azure_secret,
# Secret.Config(
# name="woolenwolf_gh_app.clickhouse-app-id",
# type=Secret.Type.AWS_SSM_SECRET,
# ),
# Secret.Config(
# name="woolenwolf_gh_app.clickhouse-app-key",
# type=Secret.Type.AWS_SSM_SECRET,
# ),
]
DOCKERS = [
# Docker.Config(
# name="clickhouse/binary-builder",
# path="./ci/docker/packager/binary-builder",
# platforms=Docker.Platforms.arm_amd,
# depends_on=[],
# ),
# Docker.Config(
# name="clickhouse/cctools",
# path="./ci/docker/packager/cctools",
# platforms=Docker.Platforms.arm_amd,
# depends_on=[],
# ),
# Docker.Config(
# name="clickhouse/test-old-centos",
# path="./ci/docker/test/compatibility/centos",
# platforms=Docker.Platforms.arm_amd,
# depends_on=[],
# ),
# Docker.Config(
# name="clickhouse/test-old-ubuntu",
# path="./ci/docker/test/compatibility/ubuntu",
# platforms=Docker.Platforms.arm_amd,
# depends_on=[],
# ),
# Docker.Config(
# name="clickhouse/test-util",
# path="./ci/docker/test/util",
# platforms=Docker.Platforms.arm_amd,
# depends_on=[],
# ),
# Docker.Config(
# name="clickhouse/integration-test",
# path="./ci/docker/test/integration/base",
# platforms=Docker.Platforms.arm_amd,
# depends_on=["clickhouse/test-base"],
# ),
# Docker.Config(
# name="clickhouse/fuzzer",
# path="./ci/docker/test/fuzzer",
# platforms=Docker.Platforms.arm_amd,
# depends_on=["clickhouse/test-base"],
# ),
# Docker.Config(
# name="clickhouse/performance-comparison",
# path="./ci/docker/test/performance-comparison",
# platforms=Docker.Platforms.arm_amd,
# depends_on=[],
# ),
Docker.Config(
name="clickhouse/fasttest",
path="./ci/docker/fasttest",
platforms=Docker.Platforms.arm_amd,
depends_on=[],
),
# Docker.Config(
# name="clickhouse/test-base",
# path="./ci/docker/test/base",
# platforms=Docker.Platforms.arm_amd,
# depends_on=["clickhouse/test-util"],
# ),
# Docker.Config(
# name="clickhouse/clickbench",
# path="./ci/docker/test/clickbench",
# platforms=Docker.Platforms.arm_amd,
# depends_on=["clickhouse/test-base"],
# ),
# Docker.Config(
# name="clickhouse/keeper-jepsen-test",
# path="./ci/docker/test/keeper-jepsen",
# platforms=Docker.Platforms.arm_amd,
# depends_on=["clickhouse/test-base"],
# ),
# Docker.Config(
# name="clickhouse/server-jepsen-test",
# path="./ci/docker/test/server-jepsen",
# platforms=Docker.Platforms.arm_amd,
# depends_on=["clickhouse/test-base"],
# ),
# Docker.Config(
# name="clickhouse/sqllogic-test",
# path="./ci/docker/test/sqllogic",
# platforms=Docker.Platforms.arm_amd,
# depends_on=["clickhouse/test-base"],
# ),
# Docker.Config(
# name="clickhouse/sqltest",
# path="./ci/docker/test/sqltest",
# platforms=Docker.Platforms.arm_amd,
# depends_on=["clickhouse/test-base"],
# ),
Docker.Config(
name="clickhouse/stateless-test",
path="./ci/docker/stateless-test",
platforms=Docker.Platforms.arm_amd,
depends_on=[],
),
Docker.Config(
name="clickhouse/stateful-test",
path="./ci/docker/stateful-test",
platforms=Docker.Platforms.arm_amd,
depends_on=["clickhouse/stateless-test"],
),
# Docker.Config(
# name="clickhouse/stress-test",
# path="./ci/docker/test/stress",
# platforms=Docker.Platforms.arm_amd,
# depends_on=["clickhouse/stateful-test"],
# ),
# Docker.Config(
# name="clickhouse/unit-test",
# path="./ci/docker/test/unit",
# platforms=Docker.Platforms.arm_amd,
# depends_on=["clickhouse/test-base"],
# ),
# Docker.Config(
# name="clickhouse/integration-tests-runner",
# path="./ci/docker/test/integration/runner",
# platforms=Docker.Platforms.arm_amd,
# depends_on=["clickhouse/test-base"],
# ),
Docker.Config(
name="clickhouse/style-test",
path="./ci/docker/style-test",
platforms=Docker.Platforms.arm_amd,
depends_on=[],
),
# Docker.Config(
# name="clickhouse/docs-builder",
# path="./ci/docker/docs/builder",
# platforms=Docker.Platforms.arm_amd,
# depends_on=["clickhouse/test-base"],
# ),
]
# TODO:
# "docker/test/integration/s3_proxy": {
# "name": "clickhouse/s3-proxy",
# "dependent": []
# },
# "docker/test/integration/resolver": {
# "name": "clickhouse/python-bottle",
# "dependent": []
# },
# "docker/test/integration/helper_container": {
# "name": "clickhouse/integration-helper",
# "dependent": []
# },
# "docker/test/integration/mysql_golang_client": {
# "name": "clickhouse/mysql-golang-client",
# "dependent": []
# },
# "docker/test/integration/dotnet_client": {
# "name": "clickhouse/dotnet-client",
# "dependent": []
# },
# "docker/test/integration/mysql_java_client": {
# "name": "clickhouse/mysql-java-client",
# "dependent": []
# },
# "docker/test/integration/mysql_js_client": {
# "name": "clickhouse/mysql-js-client",
# "dependent": []
# },
# "docker/test/integration/mysql_php_client": {
# "name": "clickhouse/mysql-php-client",
# "dependent": []
# },
# "docker/test/integration/postgresql_java_client": {
# "name": "clickhouse/postgresql-java-client",
# "dependent": []
# },
# "docker/test/integration/kerberos_kdc": {
# "only_amd64": true,
# "name": "clickhouse/kerberos-kdc",
# "dependent": []
# },
# "docker/test/integration/kerberized_hadoop": {
# "only_amd64": true,
# "name": "clickhouse/kerberized-hadoop",
# "dependent": []
# },
# "docker/test/sqlancer": {
# "name": "clickhouse/sqlancer-test",
# "dependent": []
# },
# "docker/test/install/deb": {
# "name": "clickhouse/install-deb-test",
# "dependent": []
# },
# "docker/test/install/rpm": {
# "name": "clickhouse/install-rpm-test",
# "dependent": []
# },
# "docker/test/integration/nginx_dav": {
# "name": "clickhouse/nginx-dav",
# "dependent": []
# }
class JobNames:
STYLE_CHECK = "Style Check"
FAST_TEST = "Fast test"
BUILD = "Build"
STATELESS = "Stateless tests"
STATEFUL = "Stateful tests"
STRESS = "Stress tests"

View File

@ -1,14 +1,13 @@
from ci.settings.definitions import ( # aux settings:
S3_BUCKET_HTTP_ENDPOINT, S3_BUCKET_NAME = "clickhouse-builds"
S3_BUCKET_NAME, S3_BUCKET_HTTP_ENDPOINT = "clickhouse-builds.s3.amazonaws.com"
RunnerLabels,
)
# praktika settings:
MAIN_BRANCH = "master" MAIN_BRANCH = "master"
S3_ARTIFACT_PATH = f"{S3_BUCKET_NAME}/artifacts" S3_ARTIFACT_PATH = f"{S3_BUCKET_NAME}/artifacts"
CI_CONFIG_RUNS_ON = [RunnerLabels.CI_SERVICES] CI_CONFIG_RUNS_ON = ["ci_services"]
DOCKER_BUILD_RUNS_ON = [RunnerLabels.CI_SERVICES_EBS] DOCKER_BUILD_RUNS_ON = ["ci_services_ebs"]
CACHE_S3_PATH = f"{S3_BUCKET_NAME}/ci_ch_cache" CACHE_S3_PATH = f"{S3_BUCKET_NAME}/ci_ch_cache"
HTML_S3_PATH = f"{S3_BUCKET_NAME}/reports" HTML_S3_PATH = f"{S3_BUCKET_NAME}/reports"
S3_BUCKET_TO_HTTP_ENDPOINT = {S3_BUCKET_NAME: S3_BUCKET_HTTP_ENDPOINT} S3_BUCKET_TO_HTTP_ENDPOINT = {S3_BUCKET_NAME: S3_BUCKET_HTTP_ENDPOINT}

17
ci/setup.py Normal file
View File

@ -0,0 +1,17 @@
from setuptools import find_packages, setup
setup(
name="praktika",
version="0.1",
packages=find_packages(),
url="https://github.com/ClickHouse/praktika",
license="Apache 2.0",
author="Max Kainov",
author_email="max.kainov@clickhouse.com",
description="CI Infrastructure Toolbox",
entry_points={
"console_scripts": [
"praktika=praktika.__main__:main",
]
},
)

571
ci/workflows/defs.py Normal file
View File

@ -0,0 +1,571 @@
from praktika import Artifact, Docker, Job, Secret
from praktika.settings import Settings
class RunnerLabels:
CI_SERVICES = "ci_services"
CI_SERVICES_EBS = "ci_services_ebs"
BUILDER_AMD = "builder"
BUILDER_ARM = "builder-aarch64"
FUNC_TESTER_AMD = "func-tester"
FUNC_TESTER_ARM = "func-tester-aarch64"
class CIFiles:
UNIT_TESTS_RESULTS = "/tmp/praktika/output/unit_tests_result.json"
UNIT_TESTS_BIN = "/tmp/praktika/build/src/unit_tests_dbms"
BASE_BRANCH = "master"
azure_secret = Secret.Config(
name="azure_connection_string",
type=Secret.Type.AWS_SSM_VAR,
)
SECRETS = [
Secret.Config(
name="dockerhub_robot_password",
type=Secret.Type.AWS_SSM_VAR,
),
azure_secret,
# Secret.Config(
# name="woolenwolf_gh_app.clickhouse-app-id",
# type=Secret.Type.AWS_SSM_SECRET,
# ),
# Secret.Config(
# name="woolenwolf_gh_app.clickhouse-app-key",
# type=Secret.Type.AWS_SSM_SECRET,
# ),
]
DOCKERS = [
Docker.Config(
name="clickhouse/binary-builder",
path="./ci/docker/binary-builder",
platforms=Docker.Platforms.arm_amd,
depends_on=["clickhouse/fasttest"],
),
# Docker.Config(
# name="clickhouse/cctools",
# path="./ci/docker/packager/cctools",
# platforms=Docker.Platforms.arm_amd,
# depends_on=[],
# ),
# Docker.Config(
# name="clickhouse/test-old-centos",
# path="./ci/docker/test/compatibility/centos",
# platforms=Docker.Platforms.arm_amd,
# depends_on=[],
# ),
# Docker.Config(
# name="clickhouse/test-old-ubuntu",
# path="./ci/docker/test/compatibility/ubuntu",
# platforms=Docker.Platforms.arm_amd,
# depends_on=[],
# ),
# Docker.Config(
# name="clickhouse/test-util",
# path="./ci/docker/test/util",
# platforms=Docker.Platforms.arm_amd,
# depends_on=[],
# ),
# Docker.Config(
# name="clickhouse/integration-test",
# path="./ci/docker/test/integration/base",
# platforms=Docker.Platforms.arm_amd,
# depends_on=["clickhouse/test-base"],
# ),
# Docker.Config(
# name="clickhouse/fuzzer",
# path="./ci/docker/test/fuzzer",
# platforms=Docker.Platforms.arm_amd,
# depends_on=["clickhouse/test-base"],
# ),
# Docker.Config(
# name="clickhouse/performance-comparison",
# path="./ci/docker/test/performance-comparison",
# platforms=Docker.Platforms.arm_amd,
# depends_on=[],
# ),
Docker.Config(
name="clickhouse/fasttest",
path="./ci/docker/fasttest",
platforms=Docker.Platforms.arm_amd,
depends_on=[],
),
# Docker.Config(
# name="clickhouse/test-base",
# path="./ci/docker/test/base",
# platforms=Docker.Platforms.arm_amd,
# depends_on=["clickhouse/test-util"],
# ),
# Docker.Config(
# name="clickhouse/clickbench",
# path="./ci/docker/test/clickbench",
# platforms=Docker.Platforms.arm_amd,
# depends_on=["clickhouse/test-base"],
# ),
# Docker.Config(
# name="clickhouse/keeper-jepsen-test",
# path="./ci/docker/test/keeper-jepsen",
# platforms=Docker.Platforms.arm_amd,
# depends_on=["clickhouse/test-base"],
# ),
# Docker.Config(
# name="clickhouse/server-jepsen-test",
# path="./ci/docker/test/server-jepsen",
# platforms=Docker.Platforms.arm_amd,
# depends_on=["clickhouse/test-base"],
# ),
# Docker.Config(
# name="clickhouse/sqllogic-test",
# path="./ci/docker/test/sqllogic",
# platforms=Docker.Platforms.arm_amd,
# depends_on=["clickhouse/test-base"],
# ),
# Docker.Config(
# name="clickhouse/sqltest",
# path="./ci/docker/test/sqltest",
# platforms=Docker.Platforms.arm_amd,
# depends_on=["clickhouse/test-base"],
# ),
Docker.Config(
name="clickhouse/stateless-test",
path="./ci/docker/stateless-test",
platforms=Docker.Platforms.arm_amd,
depends_on=[],
),
Docker.Config(
name="clickhouse/stateful-test",
path="./ci/docker/stateful-test",
platforms=Docker.Platforms.arm_amd,
depends_on=["clickhouse/stateless-test"],
),
# Docker.Config(
# name="clickhouse/stress-test",
# path="./ci/docker/test/stress",
# platforms=Docker.Platforms.arm_amd,
# depends_on=["clickhouse/stateful-test"],
# ),
# Docker.Config(
# name="clickhouse/unit-test",
# path="./ci/docker/test/unit",
# platforms=Docker.Platforms.arm_amd,
# depends_on=["clickhouse/test-base"],
# ),
# Docker.Config(
# name="clickhouse/integration-tests-runner",
# path="./ci/docker/test/integration/runner",
# platforms=Docker.Platforms.arm_amd,
# depends_on=["clickhouse/test-base"],
# ),
Docker.Config(
name="clickhouse/style-test",
path="./ci/docker/style-test",
platforms=Docker.Platforms.arm_amd,
depends_on=[],
),
# Docker.Config(
# name="clickhouse/docs-builder",
# path="./ci/docker/docs/builder",
# platforms=Docker.Platforms.arm_amd,
# depends_on=["clickhouse/test-base"],
# ),
]
# TODO:
# "docker/test/integration/s3_proxy": {
# "name": "clickhouse/s3-proxy",
# "dependent": []
# },
# "docker/test/integration/resolver": {
# "name": "clickhouse/python-bottle",
# "dependent": []
# },
# "docker/test/integration/helper_container": {
# "name": "clickhouse/integration-helper",
# "dependent": []
# },
# "docker/test/integration/mysql_golang_client": {
# "name": "clickhouse/mysql-golang-client",
# "dependent": []
# },
# "docker/test/integration/dotnet_client": {
# "name": "clickhouse/dotnet-client",
# "dependent": []
# },
# "docker/test/integration/mysql_java_client": {
# "name": "clickhouse/mysql-java-client",
# "dependent": []
# },
# "docker/test/integration/mysql_js_client": {
# "name": "clickhouse/mysql-js-client",
# "dependent": []
# },
# "docker/test/integration/mysql_php_client": {
# "name": "clickhouse/mysql-php-client",
# "dependent": []
# },
# "docker/test/integration/postgresql_java_client": {
# "name": "clickhouse/postgresql-java-client",
# "dependent": []
# },
# "docker/test/integration/kerberos_kdc": {
# "only_amd64": true,
# "name": "clickhouse/kerberos-kdc",
# "dependent": []
# },
# "docker/test/integration/kerberized_hadoop": {
# "only_amd64": true,
# "name": "clickhouse/kerberized-hadoop",
# "dependent": []
# },
# "docker/test/sqlancer": {
# "name": "clickhouse/sqlancer-test",
# "dependent": []
# },
# "docker/test/install/deb": {
# "name": "clickhouse/install-deb-test",
# "dependent": []
# },
# "docker/test/install/rpm": {
# "name": "clickhouse/install-rpm-test",
# "dependent": []
# },
# "docker/test/integration/nginx_dav": {
# "name": "clickhouse/nginx-dav",
# "dependent": []
# }
class JobNames:
STYLE_CHECK = "Style Check"
FAST_TEST = "Fast test"
BUILD = "Build"
STATELESS = "Stateless tests"
STATEFUL = "Stateful tests"
STRESS = "Stress tests"
class ToolSet:
COMPILER_C = "clang-19"
COMPILER_CPP = "clang++-19"
class ArtifactNames:
CH_AMD_DEBUG = "CH_AMD_DEBUG"
CH_AMD_RELEASE = "CH_AMD_RELEASE"
CH_AMD_ASAN = "CH_AMD_ASAN"
CH_AMD_TSAN = "CH_AMD_TSAN"
CH_AMD_MSAN = "CH_AMD_MSAN"
CH_AMD_UBSAN = "CH_AMD_UBSAN"
CH_AMD_BINARY = "CH_AMD_BINARY"
CH_ARM_RELEASE = "CH_ARM_RELEASE"
CH_ARM_ASAN = "CH_ARM_ASAN"
CH_ODBC_B_AMD_DEBUG = "CH_ODBC_B_AMD_DEBUG"
CH_ODBC_B_AMD_RELEASE = "CH_ODBC_B_AMD_RELEASE"
CH_ODBC_B_AMD_ASAN = "CH_ODBC_B_AMD_ASAN"
CH_ODBC_B_AMD_TSAN = "CH_ODBC_B_AMD_TSAN"
CH_ODBC_B_AMD_MSAN = "CH_ODBC_B_AMD_MSAN"
CH_ODBC_B_AMD_UBSAN = "CH_ODBC_B_AMD_UBSAN"
CH_ODBC_B_ARM_RELEASE = "CH_ODBC_B_ARM_RELEASE"
CH_ODBC_B_ARM_ASAN = "CH_ODBC_B_ARM_ASAN"
UNITTEST_AMD_ASAN = "UNITTEST_AMD_ASAN"
UNITTEST_AMD_TSAN = "UNITTEST_AMD_TSAN"
UNITTEST_AMD_MSAN = "UNITTEST_AMD_MSAN"
UNITTEST_AMD_UBSAN = "UNITTEST_AMD_UBSAN"
UNITTEST_AMD_BINARY = "UNITTEST_AMD_BINARY"
DEB_AMD_DEBUG = "DEB_AMD_DEBUG"
DEB_AMD_RELEASE = "DEB_AMD_RELEASE"
DEB_AMD_ASAN = "DEB_AMD_ASAN"
DEB_AMD_TSAN = "DEB_AMD_TSAN"
DEB_AMD_MSAM = "DEB_AMD_MSAM"
DEB_AMD_UBSAN = "DEB_AMD_UBSAN"
DEB_ARM_RELEASE = "DEB_ARM_RELEASE"
DEB_ARM_ASAN = "DEB_ARM_ASAN"
ARTIFACTS = [
*Artifact.Config(
name="...",
type=Artifact.Type.S3,
path=f"{Settings.TEMP_DIR}/build/programs/clickhouse",
).parametrize(
names=[
ArtifactNames.CH_AMD_DEBUG,
ArtifactNames.CH_AMD_RELEASE,
ArtifactNames.CH_AMD_ASAN,
ArtifactNames.CH_AMD_TSAN,
ArtifactNames.CH_AMD_MSAN,
ArtifactNames.CH_AMD_UBSAN,
ArtifactNames.CH_AMD_BINARY,
ArtifactNames.CH_ARM_RELEASE,
ArtifactNames.CH_ARM_ASAN,
]
),
*Artifact.Config(
name="...",
type=Artifact.Type.S3,
path=f"{Settings.TEMP_DIR}/build/programs/clickhouse-odbc-bridge",
).parametrize(
names=[
ArtifactNames.CH_ODBC_B_AMD_DEBUG,
ArtifactNames.CH_ODBC_B_AMD_ASAN,
ArtifactNames.CH_ODBC_B_AMD_TSAN,
ArtifactNames.CH_ODBC_B_AMD_MSAN,
ArtifactNames.CH_ODBC_B_AMD_UBSAN,
ArtifactNames.CH_ODBC_B_AMD_RELEASE,
ArtifactNames.CH_ODBC_B_ARM_RELEASE,
ArtifactNames.CH_ODBC_B_ARM_ASAN,
]
),
# *Artifact.Config(
# name="...",
# type=Artifact.Type.S3,
# path=f"{Settings.TEMP_DIR}/build/src/unit_tests_dbms",
# ).parametrize(
# names=[
# ArtifactNames.UNITTEST_AMD_BINARY,
# ArtifactNames.UNITTEST_AMD_ASAN,
# ArtifactNames.UNITTEST_AMD_TSAN,
# ArtifactNames.UNITTEST_AMD_MSAN,
# ArtifactNames.UNITTEST_AMD_UBSAN,
# ]
# ),
*Artifact.Config(
name="*",
type=Artifact.Type.S3,
path=f"{Settings.TEMP_DIR}/output/*.deb",
).parametrize(
names=[
ArtifactNames.DEB_AMD_DEBUG,
ArtifactNames.DEB_AMD_ASAN,
ArtifactNames.DEB_AMD_TSAN,
ArtifactNames.DEB_AMD_MSAM,
ArtifactNames.DEB_AMD_UBSAN,
]
),
Artifact.Config(
name=ArtifactNames.DEB_AMD_RELEASE,
type=Artifact.Type.S3,
path=f"{Settings.TEMP_DIR}/output/*.deb",
),
Artifact.Config(
name=ArtifactNames.DEB_ARM_RELEASE,
type=Artifact.Type.S3,
path=f"{Settings.TEMP_DIR}/output/*.deb",
),
Artifact.Config(
name=ArtifactNames.DEB_ARM_ASAN,
type=Artifact.Type.S3,
path=f"{Settings.TEMP_DIR}/output/*.deb",
),
]
class Jobs:
style_check_job = Job.Config(
name=JobNames.STYLE_CHECK,
runs_on=[RunnerLabels.CI_SERVICES],
command="python3 ./ci/jobs/check_style.py",
run_in_docker="clickhouse/style-test",
)
fast_test_job = Job.Config(
name=JobNames.FAST_TEST,
runs_on=[RunnerLabels.BUILDER_AMD],
command="python3 ./ci/jobs/fast_test.py",
run_in_docker="clickhouse/fasttest",
digest_config=Job.CacheDigestConfig(
include_paths=[
"./ci/jobs/fast_test.py",
"./tests/queries/0_stateless/",
"./src",
],
),
)
build_jobs = Job.Config(
name=JobNames.BUILD,
runs_on=["...from params..."],
requires=[],
command="python3 ./ci/jobs/build_clickhouse.py --build-type {PARAMETER}",
run_in_docker="clickhouse/binary-builder",
timeout=3600 * 2,
digest_config=Job.CacheDigestConfig(
include_paths=[
"./src",
"./contrib/",
"./CMakeLists.txt",
"./PreLoad.cmake",
"./cmake",
"./base",
"./programs",
"./docker/packager/packager",
"./rust",
"./tests/ci/version_helper.py",
"./ci/jobs/build_clickhouse.py",
],
),
).parametrize(
parameter=[
"amd_debug",
"amd_release",
"amd_asan",
"amd_tsan",
"amd_msan",
"amd_ubsan",
"amd_binary",
"arm_release",
"arm_asan",
],
provides=[
[
ArtifactNames.CH_AMD_DEBUG,
ArtifactNames.DEB_AMD_DEBUG,
ArtifactNames.CH_ODBC_B_AMD_DEBUG,
],
[
ArtifactNames.CH_AMD_RELEASE,
ArtifactNames.DEB_AMD_RELEASE,
ArtifactNames.CH_ODBC_B_AMD_RELEASE,
],
[
ArtifactNames.CH_AMD_ASAN,
ArtifactNames.DEB_AMD_ASAN,
ArtifactNames.CH_ODBC_B_AMD_ASAN,
# ArtifactNames.UNITTEST_AMD_ASAN,
],
[
ArtifactNames.CH_AMD_TSAN,
ArtifactNames.DEB_AMD_TSAN,
ArtifactNames.CH_ODBC_B_AMD_TSAN,
# ArtifactNames.UNITTEST_AMD_TSAN,
],
[
ArtifactNames.CH_AMD_MSAN,
ArtifactNames.DEB_AMD_MSAM,
ArtifactNames.CH_ODBC_B_AMD_MSAN,
# ArtifactNames.UNITTEST_AMD_MSAN,
],
[
ArtifactNames.CH_AMD_UBSAN,
ArtifactNames.DEB_AMD_UBSAN,
ArtifactNames.CH_ODBC_B_AMD_UBSAN,
# ArtifactNames.UNITTEST_AMD_UBSAN,
],
[
ArtifactNames.CH_AMD_BINARY,
# ArtifactNames.UNITTEST_AMD_BINARY,
],
[
ArtifactNames.CH_ARM_RELEASE,
ArtifactNames.DEB_ARM_RELEASE,
ArtifactNames.CH_ODBC_B_ARM_RELEASE,
],
[
ArtifactNames.CH_ARM_ASAN,
ArtifactNames.DEB_ARM_ASAN,
ArtifactNames.CH_ODBC_B_ARM_ASAN,
],
],
runs_on=[
[RunnerLabels.BUILDER_AMD],
[RunnerLabels.BUILDER_AMD],
[RunnerLabels.BUILDER_AMD],
[RunnerLabels.BUILDER_AMD],
[RunnerLabels.BUILDER_AMD],
[RunnerLabels.BUILDER_AMD],
[RunnerLabels.BUILDER_AMD],
[RunnerLabels.BUILDER_ARM],
[RunnerLabels.BUILDER_ARM],
],
)
stateless_tests_jobs = Job.Config(
name=JobNames.STATELESS,
runs_on=[RunnerLabels.BUILDER_AMD],
command="python3 ./ci/jobs/functional_stateless_tests.py --test-options {PARAMETER}",
# many tests expect to see "/var/lib/clickhouse" in various output lines - add mount for now, consider creating this dir in docker file
run_in_docker="clickhouse/stateless-test+--security-opt seccomp=unconfined",
digest_config=Job.CacheDigestConfig(
include_paths=[
"./ci/jobs/functional_stateless_tests.py",
],
),
).parametrize(
parameter=[
"amd_debug,parallel",
"amd_debug,non-parallel",
"amd_release,parallel",
"amd_release,non-parallel",
"arm_asan,parallel",
"arm_asan,non-parallel",
],
runs_on=[
[RunnerLabels.BUILDER_AMD],
[RunnerLabels.FUNC_TESTER_AMD],
[RunnerLabels.BUILDER_AMD],
[RunnerLabels.FUNC_TESTER_AMD],
[RunnerLabels.BUILDER_ARM],
[RunnerLabels.FUNC_TESTER_ARM],
],
requires=[
[ArtifactNames.CH_AMD_DEBUG, ArtifactNames.CH_ODBC_B_AMD_DEBUG],
[ArtifactNames.CH_AMD_DEBUG, ArtifactNames.CH_ODBC_B_AMD_DEBUG],
[ArtifactNames.CH_AMD_RELEASE, ArtifactNames.CH_ODBC_B_AMD_RELEASE],
[ArtifactNames.CH_AMD_RELEASE, ArtifactNames.CH_ODBC_B_AMD_RELEASE],
[ArtifactNames.CH_ARM_ASAN, ArtifactNames.CH_ODBC_B_ARM_ASAN],
[ArtifactNames.CH_ARM_ASAN, ArtifactNames.CH_ODBC_B_ARM_ASAN],
],
)
stateful_tests_jobs = Job.Config(
name=JobNames.STATEFUL,
runs_on=[RunnerLabels.BUILDER_AMD],
command="python3 ./ci/jobs/functional_stateful_tests.py --test-options {PARAMETER}",
# many tests expect to see "/var/lib/clickhouse"
# some tests expect to see "/var/log/clickhouse"
run_in_docker="clickhouse/stateless-test+--security-opt seccomp=unconfined",
digest_config=Job.CacheDigestConfig(
include_paths=[
"./ci/jobs/functional_stateful_tests.py",
],
),
).parametrize(
parameter=[
"amd_release,parallel",
],
runs_on=[
[RunnerLabels.BUILDER_AMD],
],
requires=[
[ArtifactNames.CH_AMD_DEBUG],
],
)
# TODO: refactor job to be aligned with praktika style (remove wrappers, run in docker)
stress_test_jobs = Job.Config(
name=JobNames.STRESS,
runs_on=[RunnerLabels.BUILDER_ARM],
command="python3 ./tests/ci/stress_check.py {PARAMETER}",
digest_config=Job.CacheDigestConfig(
include_paths=[
"./ci/jobs/functional_stateful_tests.py",
],
),
).parametrize(
parameter=[
"arm_release",
],
runs_on=[
[RunnerLabels.FUNC_TESTER_ARM],
],
requires=[
[ArtifactNames.DEB_ARM_RELEASE],
],
)

View File

@ -1,250 +1,20 @@
from praktika import Artifact, Job, Workflow from praktika import Workflow
from praktika.settings import Settings
from ci.settings.definitions import (
BASE_BRANCH,
DOCKERS,
SECRETS,
JobNames,
RunnerLabels,
)
class ArtifactNames:
CH_AMD_DEBUG = "CH_AMD_DEBUG"
CH_AMD_RELEASE = "CH_AMD_RELEASE"
CH_ARM_RELEASE = "CH_ARM_RELEASE"
CH_ARM_ASAN = "CH_ARM_ASAN"
CH_ODBC_B_AMD_DEBUG = "CH_ODBC_B_AMD_DEBUG"
CH_ODBC_B_AMD_RELEASE = "CH_ODBC_B_AMD_RELEASE"
CH_ODBC_B_ARM_RELEASE = "CH_ODBC_B_ARM_RELEASE"
CH_ODBC_B_ARM_ASAN = "CH_ODBC_B_ARM_ASAN"
DEB_AMD_DEBUG = "DEB_AMD_DEBUG"
DEB_AMD_RELEASE = "DEB_AMD_RELEASE"
DEB_ARM_RELEASE = "DEB_ARM_RELEASE"
DEB_ARM_ASAN = "DEB_ARM_ASAN"
style_check_job = Job.Config(
name=JobNames.STYLE_CHECK,
runs_on=[RunnerLabels.CI_SERVICES],
command="python3 ./ci/jobs/check_style.py",
run_in_docker="clickhouse/style-test",
)
fast_test_job = Job.Config(
name=JobNames.FAST_TEST,
runs_on=[RunnerLabels.BUILDER_AMD],
command="python3 ./ci/jobs/fast_test.py",
run_in_docker="clickhouse/fasttest",
digest_config=Job.CacheDigestConfig(
include_paths=[
"./ci/jobs/fast_test.py",
"./tests/queries/0_stateless/",
"./src",
],
),
)
build_jobs = Job.Config(
name=JobNames.BUILD,
runs_on=["...from params..."],
requires=[],
command="python3 ./ci/jobs/build_clickhouse.py --build-type {PARAMETER}",
run_in_docker="clickhouse/fasttest",
timeout=3600 * 2,
digest_config=Job.CacheDigestConfig(
include_paths=[
"./src",
"./contrib/",
"./CMakeLists.txt",
"./PreLoad.cmake",
"./cmake",
"./base",
"./programs",
"./docker/packager/packager",
"./rust",
"./tests/ci/version_helper.py",
"./ci/jobs/build_clickhouse.py",
],
),
).parametrize(
parameter=["amd_debug", "amd_release", "arm_release", "arm_asan"],
provides=[
[
ArtifactNames.CH_AMD_DEBUG,
ArtifactNames.DEB_AMD_DEBUG,
ArtifactNames.CH_ODBC_B_AMD_DEBUG,
],
[
ArtifactNames.CH_AMD_RELEASE,
ArtifactNames.DEB_AMD_RELEASE,
ArtifactNames.CH_ODBC_B_AMD_RELEASE,
],
[
ArtifactNames.CH_ARM_RELEASE,
ArtifactNames.DEB_ARM_RELEASE,
ArtifactNames.CH_ODBC_B_ARM_RELEASE,
],
[
ArtifactNames.CH_ARM_ASAN,
ArtifactNames.DEB_ARM_ASAN,
ArtifactNames.CH_ODBC_B_ARM_ASAN,
],
],
runs_on=[
[RunnerLabels.BUILDER_AMD],
[RunnerLabels.BUILDER_AMD],
[RunnerLabels.BUILDER_ARM],
[RunnerLabels.BUILDER_ARM],
],
)
stateless_tests_jobs = Job.Config(
name=JobNames.STATELESS,
runs_on=[RunnerLabels.BUILDER_AMD],
command="python3 ./ci/jobs/functional_stateless_tests.py --test-options {PARAMETER}",
# many tests expect to see "/var/lib/clickhouse" in various output lines - add mount for now, consider creating this dir in docker file
run_in_docker="clickhouse/stateless-test+--security-opt seccomp=unconfined",
digest_config=Job.CacheDigestConfig(
include_paths=[
"./ci/jobs/functional_stateless_tests.py",
],
),
).parametrize(
parameter=[
"amd_debug,parallel",
"amd_debug,non-parallel",
"amd_release,parallel",
"amd_release,non-parallel",
"arm_asan,parallel",
"arm_asan,non-parallel",
],
runs_on=[
[RunnerLabels.BUILDER_AMD],
[RunnerLabels.FUNC_TESTER_AMD],
[RunnerLabels.BUILDER_AMD],
[RunnerLabels.FUNC_TESTER_AMD],
[RunnerLabels.BUILDER_ARM],
[RunnerLabels.FUNC_TESTER_ARM],
],
requires=[
[ArtifactNames.CH_AMD_DEBUG, ArtifactNames.CH_ODBC_B_AMD_DEBUG],
[ArtifactNames.CH_AMD_DEBUG, ArtifactNames.CH_ODBC_B_AMD_DEBUG],
[ArtifactNames.CH_AMD_RELEASE, ArtifactNames.CH_ODBC_B_AMD_RELEASE],
[ArtifactNames.CH_AMD_RELEASE, ArtifactNames.CH_ODBC_B_AMD_RELEASE],
[ArtifactNames.CH_ARM_ASAN, ArtifactNames.CH_ODBC_B_ARM_ASAN],
[ArtifactNames.CH_ARM_ASAN, ArtifactNames.CH_ODBC_B_ARM_ASAN],
],
)
stateful_tests_jobs = Job.Config(
name=JobNames.STATEFUL,
runs_on=[RunnerLabels.BUILDER_AMD],
command="python3 ./ci/jobs/functional_stateful_tests.py --test-options {PARAMETER}",
# many tests expect to see "/var/lib/clickhouse"
# some tests expect to see "/var/log/clickhouse"
run_in_docker="clickhouse/stateless-test+--security-opt seccomp=unconfined",
digest_config=Job.CacheDigestConfig(
include_paths=[
"./ci/jobs/functional_stateful_tests.py",
],
),
).parametrize(
parameter=[
"amd_release,parallel",
],
runs_on=[
[RunnerLabels.BUILDER_AMD],
],
requires=[
[ArtifactNames.CH_AMD_DEBUG],
],
)
# TODO: refactor job to be aligned with praktika style (remove wrappers, run in docker)
stress_test_jobs = Job.Config(
name=JobNames.STRESS,
runs_on=[RunnerLabels.BUILDER_ARM],
command="python3 ./tests/ci/stress_check.py {PARAMETER}",
digest_config=Job.CacheDigestConfig(
include_paths=[
"./ci/jobs/functional_stateful_tests.py",
],
),
).parametrize(
parameter=[
"arm_release",
],
runs_on=[
[RunnerLabels.FUNC_TESTER_ARM],
],
requires=[
[ArtifactNames.DEB_ARM_RELEASE],
],
)
from ci.workflows.defs import ARTIFACTS, BASE_BRANCH, DOCKERS, SECRETS, Jobs
workflow = Workflow.Config( workflow = Workflow.Config(
name="PR", name="PR",
event=Workflow.Event.PULL_REQUEST, event=Workflow.Event.PULL_REQUEST,
base_branches=[BASE_BRANCH], base_branches=[BASE_BRANCH],
jobs=[ jobs=[
style_check_job, Jobs.style_check_job,
fast_test_job, Jobs.fast_test_job,
*build_jobs, *Jobs.build_jobs,
*stateless_tests_jobs, *Jobs.stateless_tests_jobs,
*stateful_tests_jobs, *Jobs.stateful_tests_jobs,
*stress_test_jobs, *Jobs.stress_test_jobs,
],
artifacts=[
*Artifact.Config(
name="...",
type=Artifact.Type.S3,
path=f"{Settings.TEMP_DIR}/build/programs/clickhouse",
).parametrize(
names=[
ArtifactNames.CH_AMD_DEBUG,
ArtifactNames.CH_AMD_RELEASE,
ArtifactNames.CH_ARM_RELEASE,
ArtifactNames.CH_ARM_ASAN,
]
),
*Artifact.Config(
name="...",
type=Artifact.Type.S3,
path=f"{Settings.TEMP_DIR}/build/programs/clickhouse-odbc-bridge",
).parametrize(
names=[
ArtifactNames.CH_ODBC_B_AMD_DEBUG,
ArtifactNames.CH_ODBC_B_AMD_RELEASE,
ArtifactNames.CH_ODBC_B_ARM_RELEASE,
ArtifactNames.CH_ODBC_B_ARM_ASAN,
]
),
Artifact.Config(
name=ArtifactNames.DEB_AMD_DEBUG,
type=Artifact.Type.S3,
path=f"{Settings.TEMP_DIR}/output/*.deb",
),
Artifact.Config(
name=ArtifactNames.DEB_AMD_RELEASE,
type=Artifact.Type.S3,
path=f"{Settings.TEMP_DIR}/output/*.deb",
),
Artifact.Config(
name=ArtifactNames.DEB_ARM_RELEASE,
type=Artifact.Type.S3,
path=f"{Settings.TEMP_DIR}/output/*.deb",
),
Artifact.Config(
name=ArtifactNames.DEB_ARM_ASAN,
type=Artifact.Type.S3,
path=f"{Settings.TEMP_DIR}/output/*.deb",
),
], ],
artifacts=ARTIFACTS,
dockers=DOCKERS, dockers=DOCKERS,
secrets=SECRETS, secrets=SECRETS,
enable_cache=True, enable_cache=True,
@ -255,13 +25,3 @@ workflow = Workflow.Config(
WORKFLOWS = [ WORKFLOWS = [
workflow, workflow,
] ]
# if __name__ == "__main__":
# # local job test inside praktika environment
# from praktika.runner import Runner
# from praktika.digest import Digest
#
# print(Digest().calc_job_digest(amd_debug_build_job))
#
# Runner().run(workflow, fast_test_job, docker="fasttest", local_run=True)

View File

@ -519,7 +519,7 @@ class JobReport:
json.dump(asdict(self), json_file, default=path_converter, indent=2) json.dump(asdict(self), json_file, default=path_converter, indent=2)
# temporary WA to ease integration with praktika # temporary WA to ease integration with praktika
check_name = os.getenv("CHECK_NAME", "") check_name = os.getenv("JOB_NAME", "")
if check_name: if check_name:
self.to_praktika_result(job_name=check_name).dump() self.to_praktika_result(job_name=check_name).dump()