CI: packaging

This commit is contained in:
Max Kainov 2024-11-14 23:05:42 +00:00
parent 4e56c026cd
commit 627d15c22d
12 changed files with 841 additions and 48 deletions

661
.github/workflows/pr.yaml vendored Normal file
View File

@ -0,0 +1,661 @@
# generated by praktika
name: PR
on:
pull_request:
branches: ['master']
# Cancel the previous wf run in PRs.
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
env:
# Force the stdout and stderr streams to be unbuffered
PYTHONUNBUFFERED: 1
GH_TOKEN: ${{ github.token }}
# Allow updating GH commit statuses and PR comments to post an actual job reports link
permissions: write-all
jobs:
config_workflow:
runs-on: [ci_services]
needs: []
name: "Config Workflow"
outputs:
data: ${{ steps.run.outputs.DATA }}
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
ref: ${{ github.head_ref }}
- name: Prepare env script
run: |
cat > /tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF'
export PYTHONPATH=./ci:.
cat > /tmp/praktika/workflow_config_pr.json << 'EOF'
${{ needs.config_workflow.outputs.data }}
EOF
cat > /tmp/praktika/workflow_status.json << 'EOF'
${{ toJson(needs) }}
EOF
ENV_SETUP_SCRIPT_EOF
rm -rf /tmp/praktika/input /tmp/praktika/output /tmp/praktika
mkdir -p /tmp/praktika /tmp/praktika/input /tmp/praktika/output
- name: Run
id: run
run: |
. /tmp/praktika_setup_env.sh
set -o pipefail
if command -v ts &> /dev/null; then
python3 -m praktika run --job '''Config Workflow''' --workflow "PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee /tmp/praktika/praktika_run.log
else
python3 -m praktika run --job '''Config Workflow''' --workflow "PR" --ci |& tee /tmp/praktika/praktika_run.log
fi
docker_builds:
runs-on: [ci_services_ebs]
needs: [config_workflow]
if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'RG9ja2VyIEJ1aWxkcw==') }}
name: "Docker Builds"
outputs:
data: ${{ steps.run.outputs.DATA }}
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
ref: ${{ github.head_ref }}
- name: Prepare env script
run: |
cat > /tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF'
export PYTHONPATH=./ci:.
cat > /tmp/praktika/workflow_config_pr.json << 'EOF'
${{ needs.config_workflow.outputs.data }}
EOF
cat > /tmp/praktika/workflow_status.json << 'EOF'
${{ toJson(needs) }}
EOF
ENV_SETUP_SCRIPT_EOF
rm -rf /tmp/praktika/input /tmp/praktika/output /tmp/praktika
mkdir -p /tmp/praktika /tmp/praktika/input /tmp/praktika/output
- name: Run
id: run
run: |
. /tmp/praktika_setup_env.sh
set -o pipefail
if command -v ts &> /dev/null; then
python3 -m praktika run --job '''Docker Builds''' --workflow "PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee /tmp/praktika/praktika_run.log
else
python3 -m praktika run --job '''Docker Builds''' --workflow "PR" --ci |& tee /tmp/praktika/praktika_run.log
fi
style_check:
runs-on: [ci_services]
needs: [config_workflow, docker_builds]
if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'U3R5bGUgQ2hlY2s=') }}
name: "Style Check"
outputs:
data: ${{ steps.run.outputs.DATA }}
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
ref: ${{ github.head_ref }}
- name: Prepare env script
run: |
cat > /tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF'
export PYTHONPATH=./ci:.
cat > /tmp/praktika/workflow_config_pr.json << 'EOF'
${{ needs.config_workflow.outputs.data }}
EOF
cat > /tmp/praktika/workflow_status.json << 'EOF'
${{ toJson(needs) }}
EOF
ENV_SETUP_SCRIPT_EOF
rm -rf /tmp/praktika/input /tmp/praktika/output /tmp/praktika
mkdir -p /tmp/praktika /tmp/praktika/input /tmp/praktika/output
- name: Run
id: run
run: |
. /tmp/praktika_setup_env.sh
set -o pipefail
if command -v ts &> /dev/null; then
python3 -m praktika run --job '''Style Check''' --workflow "PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee /tmp/praktika/praktika_run.log
else
python3 -m praktika run --job '''Style Check''' --workflow "PR" --ci |& tee /tmp/praktika/praktika_run.log
fi
fast_test:
runs-on: [builder]
needs: [config_workflow, docker_builds]
if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'RmFzdCB0ZXN0') }}
name: "Fast test"
outputs:
data: ${{ steps.run.outputs.DATA }}
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
ref: ${{ github.head_ref }}
- name: Prepare env script
run: |
cat > /tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF'
export PYTHONPATH=./ci:.
cat > /tmp/praktika/workflow_config_pr.json << 'EOF'
${{ needs.config_workflow.outputs.data }}
EOF
cat > /tmp/praktika/workflow_status.json << 'EOF'
${{ toJson(needs) }}
EOF
ENV_SETUP_SCRIPT_EOF
rm -rf /tmp/praktika/input /tmp/praktika/output /tmp/praktika
mkdir -p /tmp/praktika /tmp/praktika/input /tmp/praktika/output
- name: Run
id: run
run: |
. /tmp/praktika_setup_env.sh
set -o pipefail
if command -v ts &> /dev/null; then
python3 -m praktika run --job '''Fast test''' --workflow "PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee /tmp/praktika/praktika_run.log
else
python3 -m praktika run --job '''Fast test''' --workflow "PR" --ci |& tee /tmp/praktika/praktika_run.log
fi
build_amd_debug:
runs-on: [builder]
needs: [config_workflow, docker_builds]
if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'QnVpbGQgKGFtZF9kZWJ1Zyk=') }}
name: "Build (amd_debug)"
outputs:
data: ${{ steps.run.outputs.DATA }}
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
ref: ${{ github.head_ref }}
- name: Prepare env script
run: |
cat > /tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF'
export PYTHONPATH=./ci:.
cat > /tmp/praktika/workflow_config_pr.json << 'EOF'
${{ needs.config_workflow.outputs.data }}
EOF
cat > /tmp/praktika/workflow_status.json << 'EOF'
${{ toJson(needs) }}
EOF
ENV_SETUP_SCRIPT_EOF
rm -rf /tmp/praktika/input /tmp/praktika/output /tmp/praktika
mkdir -p /tmp/praktika /tmp/praktika/input /tmp/praktika/output
- name: Run
id: run
run: |
. /tmp/praktika_setup_env.sh
set -o pipefail
if command -v ts &> /dev/null; then
python3 -m praktika run --job '''Build (amd_debug)''' --workflow "PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee /tmp/praktika/praktika_run.log
else
python3 -m praktika run --job '''Build (amd_debug)''' --workflow "PR" --ci |& tee /tmp/praktika/praktika_run.log
fi
build_amd_release:
runs-on: [builder]
needs: [config_workflow, docker_builds]
if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'QnVpbGQgKGFtZF9yZWxlYXNlKQ==') }}
name: "Build (amd_release)"
outputs:
data: ${{ steps.run.outputs.DATA }}
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
ref: ${{ github.head_ref }}
- name: Prepare env script
run: |
cat > /tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF'
export PYTHONPATH=./ci:.
cat > /tmp/praktika/workflow_config_pr.json << 'EOF'
${{ needs.config_workflow.outputs.data }}
EOF
cat > /tmp/praktika/workflow_status.json << 'EOF'
${{ toJson(needs) }}
EOF
ENV_SETUP_SCRIPT_EOF
rm -rf /tmp/praktika/input /tmp/praktika/output /tmp/praktika
mkdir -p /tmp/praktika /tmp/praktika/input /tmp/praktika/output
- name: Run
id: run
run: |
. /tmp/praktika_setup_env.sh
set -o pipefail
if command -v ts &> /dev/null; then
python3 -m praktika run --job '''Build (amd_release)''' --workflow "PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee /tmp/praktika/praktika_run.log
else
python3 -m praktika run --job '''Build (amd_release)''' --workflow "PR" --ci |& tee /tmp/praktika/praktika_run.log
fi
build_arm_release:
runs-on: [builder-aarch64]
needs: [config_workflow, docker_builds]
if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'QnVpbGQgKGFybV9yZWxlYXNlKQ==') }}
name: "Build (arm_release)"
outputs:
data: ${{ steps.run.outputs.DATA }}
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
ref: ${{ github.head_ref }}
- name: Prepare env script
run: |
cat > /tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF'
export PYTHONPATH=./ci:.
cat > /tmp/praktika/workflow_config_pr.json << 'EOF'
${{ needs.config_workflow.outputs.data }}
EOF
cat > /tmp/praktika/workflow_status.json << 'EOF'
${{ toJson(needs) }}
EOF
ENV_SETUP_SCRIPT_EOF
rm -rf /tmp/praktika/input /tmp/praktika/output /tmp/praktika
mkdir -p /tmp/praktika /tmp/praktika/input /tmp/praktika/output
- name: Run
id: run
run: |
. /tmp/praktika_setup_env.sh
set -o pipefail
if command -v ts &> /dev/null; then
python3 -m praktika run --job '''Build (arm_release)''' --workflow "PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee /tmp/praktika/praktika_run.log
else
python3 -m praktika run --job '''Build (arm_release)''' --workflow "PR" --ci |& tee /tmp/praktika/praktika_run.log
fi
build_arm_asan:
runs-on: [builder-aarch64]
needs: [config_workflow, docker_builds]
if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'QnVpbGQgKGFybV9hc2FuKQ==') }}
name: "Build (arm_asan)"
outputs:
data: ${{ steps.run.outputs.DATA }}
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
ref: ${{ github.head_ref }}
- name: Prepare env script
run: |
cat > /tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF'
export PYTHONPATH=./ci:.
cat > /tmp/praktika/workflow_config_pr.json << 'EOF'
${{ needs.config_workflow.outputs.data }}
EOF
cat > /tmp/praktika/workflow_status.json << 'EOF'
${{ toJson(needs) }}
EOF
ENV_SETUP_SCRIPT_EOF
rm -rf /tmp/praktika/input /tmp/praktika/output /tmp/praktika
mkdir -p /tmp/praktika /tmp/praktika/input /tmp/praktika/output
- name: Run
id: run
run: |
. /tmp/praktika_setup_env.sh
set -o pipefail
if command -v ts &> /dev/null; then
python3 -m praktika run --job '''Build (arm_asan)''' --workflow "PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee /tmp/praktika/praktika_run.log
else
python3 -m praktika run --job '''Build (arm_asan)''' --workflow "PR" --ci |& tee /tmp/praktika/praktika_run.log
fi
stateless_tests_amd_debugparallel:
runs-on: [builder]
needs: [config_workflow, docker_builds, build_amd_debug]
if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhbWRfZGVidWcscGFyYWxsZWwp') }}
name: "Stateless tests (amd_debug,parallel)"
outputs:
data: ${{ steps.run.outputs.DATA }}
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
ref: ${{ github.head_ref }}
- name: Prepare env script
run: |
cat > /tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF'
export PYTHONPATH=./ci:.
cat > /tmp/praktika/workflow_config_pr.json << 'EOF'
${{ needs.config_workflow.outputs.data }}
EOF
cat > /tmp/praktika/workflow_status.json << 'EOF'
${{ toJson(needs) }}
EOF
ENV_SETUP_SCRIPT_EOF
rm -rf /tmp/praktika/input /tmp/praktika/output /tmp/praktika
mkdir -p /tmp/praktika /tmp/praktika/input /tmp/praktika/output
- name: Run
id: run
run: |
. /tmp/praktika_setup_env.sh
set -o pipefail
if command -v ts &> /dev/null; then
python3 -m praktika run --job '''Stateless tests (amd_debug,parallel)''' --workflow "PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee /tmp/praktika/praktika_run.log
else
python3 -m praktika run --job '''Stateless tests (amd_debug,parallel)''' --workflow "PR" --ci |& tee /tmp/praktika/praktika_run.log
fi
stateless_tests_amd_debugnon_parallel:
runs-on: [func-tester]
needs: [config_workflow, docker_builds, build_amd_debug]
if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhbWRfZGVidWcsbm9uLXBhcmFsbGVsKQ==') }}
name: "Stateless tests (amd_debug,non-parallel)"
outputs:
data: ${{ steps.run.outputs.DATA }}
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
ref: ${{ github.head_ref }}
- name: Prepare env script
run: |
cat > /tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF'
export PYTHONPATH=./ci:.
cat > /tmp/praktika/workflow_config_pr.json << 'EOF'
${{ needs.config_workflow.outputs.data }}
EOF
cat > /tmp/praktika/workflow_status.json << 'EOF'
${{ toJson(needs) }}
EOF
ENV_SETUP_SCRIPT_EOF
rm -rf /tmp/praktika/input /tmp/praktika/output /tmp/praktika
mkdir -p /tmp/praktika /tmp/praktika/input /tmp/praktika/output
- name: Run
id: run
run: |
. /tmp/praktika_setup_env.sh
set -o pipefail
if command -v ts &> /dev/null; then
python3 -m praktika run --job '''Stateless tests (amd_debug,non-parallel)''' --workflow "PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee /tmp/praktika/praktika_run.log
else
python3 -m praktika run --job '''Stateless tests (amd_debug,non-parallel)''' --workflow "PR" --ci |& tee /tmp/praktika/praktika_run.log
fi
stateless_tests_amd_releaseparallel:
runs-on: [builder]
needs: [config_workflow, docker_builds, build_amd_release]
if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhbWRfcmVsZWFzZSxwYXJhbGxlbCk=') }}
name: "Stateless tests (amd_release,parallel)"
outputs:
data: ${{ steps.run.outputs.DATA }}
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
ref: ${{ github.head_ref }}
- name: Prepare env script
run: |
cat > /tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF'
export PYTHONPATH=./ci:.
cat > /tmp/praktika/workflow_config_pr.json << 'EOF'
${{ needs.config_workflow.outputs.data }}
EOF
cat > /tmp/praktika/workflow_status.json << 'EOF'
${{ toJson(needs) }}
EOF
ENV_SETUP_SCRIPT_EOF
rm -rf /tmp/praktika/input /tmp/praktika/output /tmp/praktika
mkdir -p /tmp/praktika /tmp/praktika/input /tmp/praktika/output
- name: Run
id: run
run: |
. /tmp/praktika_setup_env.sh
set -o pipefail
if command -v ts &> /dev/null; then
python3 -m praktika run --job '''Stateless tests (amd_release,parallel)''' --workflow "PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee /tmp/praktika/praktika_run.log
else
python3 -m praktika run --job '''Stateless tests (amd_release,parallel)''' --workflow "PR" --ci |& tee /tmp/praktika/praktika_run.log
fi
stateless_tests_amd_releasenon_parallel:
runs-on: [func-tester]
needs: [config_workflow, docker_builds, build_amd_release]
if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhbWRfcmVsZWFzZSxub24tcGFyYWxsZWwp') }}
name: "Stateless tests (amd_release,non-parallel)"
outputs:
data: ${{ steps.run.outputs.DATA }}
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
ref: ${{ github.head_ref }}
- name: Prepare env script
run: |
cat > /tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF'
export PYTHONPATH=./ci:.
cat > /tmp/praktika/workflow_config_pr.json << 'EOF'
${{ needs.config_workflow.outputs.data }}
EOF
cat > /tmp/praktika/workflow_status.json << 'EOF'
${{ toJson(needs) }}
EOF
ENV_SETUP_SCRIPT_EOF
rm -rf /tmp/praktika/input /tmp/praktika/output /tmp/praktika
mkdir -p /tmp/praktika /tmp/praktika/input /tmp/praktika/output
- name: Run
id: run
run: |
. /tmp/praktika_setup_env.sh
set -o pipefail
if command -v ts &> /dev/null; then
python3 -m praktika run --job '''Stateless tests (amd_release,non-parallel)''' --workflow "PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee /tmp/praktika/praktika_run.log
else
python3 -m praktika run --job '''Stateless tests (amd_release,non-parallel)''' --workflow "PR" --ci |& tee /tmp/praktika/praktika_run.log
fi
stateless_tests_arm_asanparallel:
runs-on: [builder-aarch64]
needs: [config_workflow, docker_builds, build_arm_asan]
if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhcm1fYXNhbixwYXJhbGxlbCk=') }}
name: "Stateless tests (arm_asan,parallel)"
outputs:
data: ${{ steps.run.outputs.DATA }}
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
ref: ${{ github.head_ref }}
- name: Prepare env script
run: |
cat > /tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF'
export PYTHONPATH=./ci:.
cat > /tmp/praktika/workflow_config_pr.json << 'EOF'
${{ needs.config_workflow.outputs.data }}
EOF
cat > /tmp/praktika/workflow_status.json << 'EOF'
${{ toJson(needs) }}
EOF
ENV_SETUP_SCRIPT_EOF
rm -rf /tmp/praktika/input /tmp/praktika/output /tmp/praktika
mkdir -p /tmp/praktika /tmp/praktika/input /tmp/praktika/output
- name: Run
id: run
run: |
. /tmp/praktika_setup_env.sh
set -o pipefail
if command -v ts &> /dev/null; then
python3 -m praktika run --job '''Stateless tests (arm_asan,parallel)''' --workflow "PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee /tmp/praktika/praktika_run.log
else
python3 -m praktika run --job '''Stateless tests (arm_asan,parallel)''' --workflow "PR" --ci |& tee /tmp/praktika/praktika_run.log
fi
stateless_tests_arm_asannon_parallel:
runs-on: [func-tester-aarch64]
needs: [config_workflow, docker_builds, build_arm_asan]
if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhcm1fYXNhbixub24tcGFyYWxsZWwp') }}
name: "Stateless tests (arm_asan,non-parallel)"
outputs:
data: ${{ steps.run.outputs.DATA }}
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
ref: ${{ github.head_ref }}
- name: Prepare env script
run: |
cat > /tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF'
export PYTHONPATH=./ci:.
cat > /tmp/praktika/workflow_config_pr.json << 'EOF'
${{ needs.config_workflow.outputs.data }}
EOF
cat > /tmp/praktika/workflow_status.json << 'EOF'
${{ toJson(needs) }}
EOF
ENV_SETUP_SCRIPT_EOF
rm -rf /tmp/praktika/input /tmp/praktika/output /tmp/praktika
mkdir -p /tmp/praktika /tmp/praktika/input /tmp/praktika/output
- name: Run
id: run
run: |
. /tmp/praktika_setup_env.sh
set -o pipefail
if command -v ts &> /dev/null; then
python3 -m praktika run --job '''Stateless tests (arm_asan,non-parallel)''' --workflow "PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee /tmp/praktika/praktika_run.log
else
python3 -m praktika run --job '''Stateless tests (arm_asan,non-parallel)''' --workflow "PR" --ci |& tee /tmp/praktika/praktika_run.log
fi
stateful_tests_amd_debugparallel:
runs-on: [builder]
needs: [config_workflow, docker_builds, build_amd_debug]
if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'U3RhdGVmdWwgdGVzdHMgKGFtZF9kZWJ1ZyxwYXJhbGxlbCk=') }}
name: "Stateful tests (amd_debug,parallel)"
outputs:
data: ${{ steps.run.outputs.DATA }}
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
ref: ${{ github.head_ref }}
- name: Prepare env script
run: |
cat > /tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF'
export PYTHONPATH=./ci:.
cat > /tmp/praktika/workflow_config_pr.json << 'EOF'
${{ needs.config_workflow.outputs.data }}
EOF
cat > /tmp/praktika/workflow_status.json << 'EOF'
${{ toJson(needs) }}
EOF
ENV_SETUP_SCRIPT_EOF
rm -rf /tmp/praktika/input /tmp/praktika/output /tmp/praktika
mkdir -p /tmp/praktika /tmp/praktika/input /tmp/praktika/output
- name: Run
id: run
run: |
. /tmp/praktika_setup_env.sh
set -o pipefail
if command -v ts &> /dev/null; then
python3 -m praktika run --job '''Stateful tests (amd_debug,parallel)''' --workflow "PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee /tmp/praktika/praktika_run.log
else
python3 -m praktika run --job '''Stateful tests (amd_debug,parallel)''' --workflow "PR" --ci |& tee /tmp/praktika/praktika_run.log
fi
finish_workflow:
runs-on: [ci_services]
needs: [config_workflow, docker_builds, style_check, fast_test, build_amd_debug, build_amd_release, build_arm_release, build_arm_asan, stateless_tests_amd_debugparallel, stateless_tests_amd_debugnon_parallel, stateless_tests_amd_releaseparallel, stateless_tests_amd_releasenon_parallel, stateless_tests_arm_asanparallel, stateless_tests_arm_asannon_parallel, stateful_tests_amd_debugparallel]
if: ${{ !cancelled() }}
name: "Finish Workflow"
outputs:
data: ${{ steps.run.outputs.DATA }}
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
ref: ${{ github.head_ref }}
- name: Prepare env script
run: |
cat > /tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF'
export PYTHONPATH=./ci:.
cat > /tmp/praktika/workflow_config_pr.json << 'EOF'
${{ needs.config_workflow.outputs.data }}
EOF
cat > /tmp/praktika/workflow_status.json << 'EOF'
${{ toJson(needs) }}
EOF
ENV_SETUP_SCRIPT_EOF
rm -rf /tmp/praktika/input /tmp/praktika/output /tmp/praktika
mkdir -p /tmp/praktika /tmp/praktika/input /tmp/praktika/output
- name: Run
id: run
run: |
. /tmp/praktika_setup_env.sh
set -o pipefail
if command -v ts &> /dev/null; then
python3 -m praktika run --job '''Finish Workflow''' --workflow "PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee /tmp/praktika/praktika_run.log
else
python3 -m praktika run --job '''Finish Workflow''' --workflow "PR" --ci |& tee /tmp/praktika/praktika_run.log
fi

View File

View File

@ -105,5 +105,14 @@ RUN groupadd --system --gid 1000 clickhouse \
&& useradd --system --gid 1000 --uid 1000 -m clickhouse \
&& mkdir -p /.cache/sccache && chmod 777 /.cache/sccache
# TODO move nfpm to docker that will do packaging
ARG TARGETARCH
ARG NFPM_VERSION=2.20.0
RUN arch=${TARGETARCH:-amd64} \
&& curl -Lo /tmp/nfpm.deb "https://github.com/goreleaser/nfpm/releases/download/v${NFPM_VERSION}/nfpm_${arch}.deb" \
&& dpkg -i /tmp/nfpm.deb \
&& rm /tmp/nfpm.deb
ENV PYTHONPATH="/wd"
ENV PYTHONUNBUFFERED=1

View File

@ -4,11 +4,15 @@ from praktika.result import Result
from praktika.settings import Settings
from praktika.utils import MetaClasses, Shell, Utils
from ci.jobs.scripts.clickhouse_version import CHVersion
class JobStages(metaclass=MetaClasses.WithIter):
CHECKOUT_SUBMODULES = "checkout"
CMAKE = "cmake"
UNSHALLOW = "unshallow"
BUILD = "build"
PACKAGE = "package"
def parse_args():
@ -92,6 +96,24 @@ def main():
res = True
results = []
if res and JobStages.UNSHALLOW in stages:
results.append(
Result.create_from_command_execution(
name="Repo Unshallow",
command="git rev-parse --is-shallow-repository | grep -q true && git fetch --filter=tree:0 --depth=5000 origin $(git rev-parse --abbrev-ref HEAD)",
with_log=True,
)
)
if results[-1].is_ok():
try:
version = CHVersion.get_version()
print(f"Got version from repo [{version}]")
except Exception as e:
results[-1].set_failed().set_info(
f"Failed to retrieve version from repo: ex [{e}]"
)
res = results[-1].is_ok()
if res and JobStages.CHECKOUT_SUBMODULES in stages:
Shell.check(f"rm -rf {build_dir} && mkdir -p {build_dir}")
results.append(
@ -127,6 +149,32 @@ def main():
Shell.check(f"ls -l {build_dir}/programs/")
res = results[-1].is_ok()
if res and JobStages.PACKAGE in stages:
if "debug" in build_type:
package_type = "debug"
elif "release" in build_type:
package_type = "release"
elif "asan" in build_type:
package_type = "asan"
else:
assert False, "TODO"
output_dir = "/tmp/praktika/output/"
assert Shell.check(f"rm -f {output_dir}/*.deb")
results.append(
Result.create_from_command_execution(
name="Build Packages",
command=[
f"DESTDIR={build_dir}/root ninja programs/install",
f"ln -sf {build_dir}/root {Utils.cwd()}/packages/root && cd {Utils.cwd()}/packages/ && OUTPUT_DIR={output_dir} BUILD_TYPE={package_type} VERSION_STRING={version} ./build --deb",
],
workdir=build_dir,
with_log=True,
)
)
res = results[-1].is_ok()
Result.create_from(results=results, stopwatch=stop_watch).complete_job()

View File

@ -0,0 +1,36 @@
from pathlib import Path
from praktika.utils import Shell
class CHVersion:
FILE_WITH_VERSION_PATH = "./cmake/autogenerated_versions.txt"
@classmethod
def _get_tweak(cls):
tag = Shell.get_output("git describe --tags --abbrev=0")
assert tag.startswith("v24")
num = Shell.get_output(f"git rev-list --count {tag}..HEAD")
return int(num)
@classmethod
def get_version(cls):
versions = {}
for line in (
Path(cls.FILE_WITH_VERSION_PATH).read_text(encoding="utf-8").splitlines()
):
line = line.strip()
if not line.startswith("SET("):
continue
name, value = line[4:-1].split(maxsplit=1)
name = name.removeprefix("VERSION_").lower()
try:
value = int(value)
except ValueError:
pass
versions[name] = value
version_sha = versions["githash"]
tweak = int(Shell.get_output(f"git rev-list --count {version_sha}..HEAD", verbose=True))
return f"{versions['major']}.{versions['minor']}.{versions['patch']}.{tweak}"

View File

@ -179,7 +179,7 @@ class _Environment(MetaClasses.Serializable):
if bucket in path:
path = path.replace(bucket, endpoint)
break
REPORT_URL = f"https://{path}/{Path(settings.HTML_PAGE_FILE).name}?PR={self.PR_NUMBER}&sha={'latest' if latest else self.SHA}&name_0={urllib.parse.quote(self.WORKFLOW_NAME, safe='')}&name_1={urllib.parse.quote(self.JOB_NAME, safe='')}"
REPORT_URL = f"https://{path}/{Path(settings.HTML_PAGE_FILE).name}?PR={self.PR_NUMBER}&sha={'latest' if latest else self.SHA}&name_0={urllib.parse.quote(self.WORKFLOW_NAME, safe='')}"
return REPORT_URL
def is_local_run(self):

View File

@ -137,14 +137,14 @@ class HtmlRunnerHooks:
summary_result.start_time = Utils.timestamp()
assert _ResultS3.copy_result_to_s3_with_version(summary_result, version=0)
page_url = env.get_report_url(settings=Settings)
page_url = env.get_report_url(settings=Settings, latest=True)
print(f"CI Status page url [{page_url}]")
res1 = GH.post_commit_status(
name=_workflow.name,
status=Result.Status.PENDING,
description="",
url=env.get_report_url(settings=Settings, latest=True),
url=page_url,
)
res2 = GH.post_pr_comment(
comment_body=f"Workflow [[{_workflow.name}]({page_url})], commit [{_Environment.get().SHA[:8]}]",

View File

@ -121,6 +121,9 @@ class Result(MetaClasses.Serializable):
def set_success(self) -> "Result":
return self.set_status(Result.Status.SUCCESS)
def set_failed(self) -> "Result":
return self.set_status(Result.Status.FAILED)
def set_results(self, results: List["Result"]) -> "Result":
self.results = results
self.dump()

View File

@ -1,3 +1,5 @@
import glob
import json
import os
import re
import sys
@ -58,6 +60,9 @@ class Runner:
workflow_config.digest_dockers[docker.name] = Digest().calc_docker_digest(
docker, workflow.dockers
)
# work around for old clickhouse jobs
os.environ["DOCKER_TAG"] = json.dumps(workflow_config.digest_dockers)
workflow_config.dump()
Result.generate_pending(job.name).dump()
@ -119,8 +124,21 @@ class Runner:
else:
prefixes = [env.get_s3_prefix()] * len(required_artifacts)
for artifact, prefix in zip(required_artifacts, prefixes):
s3_path = f"{Settings.S3_ARTIFACT_PATH}/{prefix}/{Utils.normalize_string(artifact._provided_by)}/{Path(artifact.path).name}"
assert S3.copy_file_from_s3(s3_path=s3_path, local_path=Settings.INPUT_DIR)
recursive = False
include_pattern = ""
if "*" in artifact.path:
s3_path = f"{Settings.S3_ARTIFACT_PATH}/{prefix}/{Utils.normalize_string(artifact._provided_by)}/"
recursive = True
include_pattern = Path(artifact.path).name
assert "*" in include_pattern
else:
s3_path = f"{Settings.S3_ARTIFACT_PATH}/{prefix}/{Utils.normalize_string(artifact._provided_by)}/{Path(artifact.path).name}"
assert S3.copy_file_from_s3(
s3_path=s3_path,
local_path=Settings.INPUT_DIR,
recursive=recursive,
include_pattern=include_pattern,
)
return 0
@ -262,10 +280,11 @@ class Runner:
f"ls -l {artifact.path}", verbose=True
), f"Artifact {artifact.path} not found"
s3_path = f"{Settings.S3_ARTIFACT_PATH}/{env.get_s3_prefix()}/{Utils.normalize_string(env.JOB_NAME)}"
link = S3.copy_file_to_s3(
s3_path=s3_path, local_path=artifact.path
)
result.set_link(link)
for file_path in glob.glob(artifact.path):
link = S3.copy_file_to_s3(
s3_path=s3_path, local_path=file_path
)
result.set_link(link)
except Exception as e:
error = (
f"ERROR: Failed to upload artifact [{artifact}], ex [{e}]"

View File

@ -117,15 +117,21 @@ class S3:
return res
@classmethod
def copy_file_from_s3(cls, s3_path, local_path):
def copy_file_from_s3(
cls, s3_path, local_path, recursive=False, include_pattern=""
):
assert Path(s3_path), f"Invalid S3 Path [{s3_path}]"
if Path(local_path).is_dir():
local_path = Path(local_path) / Path(s3_path).name
pass
else:
assert Path(
local_path
).parent.is_dir(), f"Parent path for [{local_path}] does not exist"
cmd = f"aws s3 cp s3://{s3_path} {local_path}"
if recursive:
cmd += " --recursive"
if include_pattern:
cmd += f" --include {include_pattern}"
res = cls.run_command_with_retries(cmd)
return res

View File

@ -16,6 +16,11 @@ class ArtifactNames:
CH_ARM_RELEASE = "CH_ARM_RELEASE"
CH_ARM_ASAN = "CH_ARM_ASAN"
DEB_AMD_DEBUG = "DEB_AMD_DEBUG"
DEB_AMD_RELEASE = "DEB_AMD_RELEASE"
DEB_ARM_RELEASE = "DEB_ARM_RELEASE"
DEB_ARM_ASAN = "DEB_ARM_ASAN"
style_check_job = Job.Config(
name=JobNames.STYLE_CHECK,
@ -41,7 +46,7 @@ fast_test_job = Job.Config(
build_jobs = Job.Config(
name=JobNames.BUILD,
runs_on=["...from params..."],
requires=[JobNames.FAST_TEST],
requires=[],
command="python3 ./ci/jobs/build_clickhouse.py --build-type {PARAMETER}",
run_in_docker="clickhouse/fasttest",
timeout=3600 * 2,
@ -63,10 +68,10 @@ build_jobs = Job.Config(
).parametrize(
parameter=["amd_debug", "amd_release", "arm_release", "arm_asan"],
provides=[
[ArtifactNames.CH_AMD_DEBUG],
[ArtifactNames.CH_AMD_RELEASE],
[ArtifactNames.CH_ARM_RELEASE],
[ArtifactNames.CH_ARM_ASAN],
[ArtifactNames.CH_AMD_DEBUG, ArtifactNames.DEB_AMD_DEBUG],
[ArtifactNames.CH_AMD_RELEASE, ArtifactNames.DEB_AMD_RELEASE],
[ArtifactNames.CH_ARM_RELEASE, ArtifactNames.DEB_ARM_RELEASE],
[ArtifactNames.CH_ARM_ASAN, ArtifactNames.DEB_ARM_ASAN],
],
runs_on=[
[RunnerLabels.BUILDER_AMD],
@ -170,6 +175,26 @@ workflow = Workflow.Config(
type=Artifact.Type.S3,
path=f"{Settings.TEMP_DIR}/build/programs/clickhouse",
),
Artifact.Config(
name=ArtifactNames.DEB_AMD_DEBUG,
type=Artifact.Type.S3,
path=f"{Settings.TEMP_DIR}/output/*.deb",
),
Artifact.Config(
name=ArtifactNames.DEB_AMD_RELEASE,
type=Artifact.Type.S3,
path=f"{Settings.TEMP_DIR}/output/*.deb",
),
Artifact.Config(
name=ArtifactNames.DEB_ARM_RELEASE,
type=Artifact.Type.S3,
path=f"{Settings.TEMP_DIR}/output/*.deb",
),
Artifact.Config(
name=ArtifactNames.DEB_ARM_ASAN,
type=Artifact.Type.S3,
path=f"{Settings.TEMP_DIR}/output/*.deb",
),
],
dockers=DOCKERS,
secrets=SECRETS,

View File

@ -5,24 +5,14 @@ set -e
# Avoid dependency on locale
LC_ALL=C
# Normalize output directory
if [ -n "$OUTPUT_DIR" ]; then
OUTPUT_DIR=$(realpath -m "$OUTPUT_DIR")
fi
CUR_DIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)
cd "$CUR_DIR"
ROOT_DIR=$(readlink -f "$(git rev-parse --show-cdup)")
PKG_ROOT='root'
DEB_ARCH=${DEB_ARCH:-amd64}
OUTPUT_DIR=${OUTPUT_DIR:-$ROOT_DIR}
[ -d "${OUTPUT_DIR}" ] || mkdir -p "${OUTPUT_DIR}"
SANITIZER=${SANITIZER:-""}
SOURCE=${SOURCE:-$PKG_ROOT}
cd "$(dirname "${BASH_SOURCE[0]}")"
HELP="${0} [--test] [--rpm] [-h|--help]
--test - adds '+test' prefix to version
--apk - build APK packages
@ -40,12 +30,7 @@ Used envs:
VERSION_STRING='${VERSION_STRING}' - the package version to overwrite
"
if [ -z "${VERSION_STRING}" ]; then
# Get CLICKHOUSE_VERSION_STRING from the current git repo
eval "$("$ROOT_DIR/tests/ci/version_helper.py" -e)"
else
CLICKHOUSE_VERSION_STRING=${VERSION_STRING}
fi
CLICKHOUSE_VERSION_STRING=${VERSION_STRING}
export CLICKHOUSE_VERSION_STRING
@ -144,31 +129,32 @@ CLICKHOUSE_VERSION_STRING+=$VERSION_POSTFIX
echo -e "\nCurrent version is $CLICKHOUSE_VERSION_STRING"
for config in clickhouse*.yaml; do
if [[ $BUILD_TYPE != 'release' ]] && [[ "$config" == "clickhouse-keeper-dbg.yaml" ]]; then
continue
fi
if [ -n "$MAKE_DEB" ] || [ -n "$MAKE_TGZ" ]; then
echo "Building deb package for $config"
# Preserve package path
exec 9>&1
PKG_PATH=$(nfpm package --target "$OUTPUT_DIR" --config "$config" --packager deb | tee /dev/fd/9)
PKG_PATH=${PKG_PATH##*created package: }
exec 9>&-
nfpm package --target "$OUTPUT_DIR" --config "$config" --packager deb
fi
if [ -n "$MAKE_APK" ]; then
echo "Building apk package for $config"
nfpm package --target "$OUTPUT_DIR" --config "$config" --packager apk
echo "Building apk package for $config"
nfpm package --target "$OUTPUT_DIR" --config "$config" --packager apk
fi
if [ -n "$MAKE_ARCHLINUX" ]; then
echo "Building archlinux package for $config"
nfpm package --target "$OUTPUT_DIR" --config "$config" --packager archlinux
echo "Building archlinux package for $config"
nfpm package --target "$OUTPUT_DIR" --config "$config" --packager archlinux
fi
if [ -n "$MAKE_RPM" ]; then
echo "Building rpm package for $config"
nfpm package --target "$OUTPUT_DIR" --config "$config" --packager rpm
echo "Building rpm package for $config"
nfpm package --target "$OUTPUT_DIR" --config "$config" --packager rpm
fi
if [ -n "$MAKE_TGZ" ]; then
echo "Building tarball for $config"
deb2tgz "$PKG_PATH"
echo "Building tarball for $config"
deb2tgz "$PKG_PATH"
fi
done