mirror of
https://github.com/ClickHouse/ClickHouse.git
synced 2024-11-22 07:31:57 +00:00
Merge pull request #56501 from ClickHouse/reusable-tests
Continue rewriting workflows to reusable tests
This commit is contained in:
commit
e437d3ae58
2
.github/actions/common_setup/action.yml
vendored
2
.github/actions/common_setup/action.yml
vendored
@ -19,6 +19,8 @@ runs:
|
|||||||
cat >> "$GITHUB_ENV" << 'EOF'
|
cat >> "$GITHUB_ENV" << 'EOF'
|
||||||
TEMP_PATH=${{runner.temp}}/${{inputs.job_type}}
|
TEMP_PATH=${{runner.temp}}/${{inputs.job_type}}
|
||||||
REPO_COPY=${{runner.temp}}/${{inputs.job_type}}/git-repo-copy
|
REPO_COPY=${{runner.temp}}/${{inputs.job_type}}/git-repo-copy
|
||||||
|
IMAGES_PATH=${{runner.temp}}/images_path
|
||||||
|
REPORTS_PATH=${{runner.temp}}/reports_dir
|
||||||
EOF
|
EOF
|
||||||
if [ -z "${{env.GITHUB_JOB_OVERRIDDEN}}" ] && [ "true" == "${{inputs.nested_job}}" ]; then
|
if [ -z "${{env.GITHUB_JOB_OVERRIDDEN}}" ] && [ "true" == "${{inputs.nested_job}}" ]; then
|
||||||
echo "The GITHUB_JOB_OVERRIDDEN ENV is unset, and must be set for the nested jobs"
|
echo "The GITHUB_JOB_OVERRIDDEN ENV is unset, and must be set for the nested jobs"
|
||||||
|
347
.github/workflows/backport_branches.yml
vendored
347
.github/workflows/backport_branches.yml
vendored
@ -105,66 +105,22 @@ jobs:
|
|||||||
path: ${{ runner.temp }}/changed_images.json
|
path: ${{ runner.temp }}/changed_images.json
|
||||||
CompatibilityCheckX86:
|
CompatibilityCheckX86:
|
||||||
needs: [BuilderDebRelease]
|
needs: [BuilderDebRelease]
|
||||||
runs-on: [self-hosted, style-checker]
|
uses: ./.github/workflows/reusable_test.yml
|
||||||
steps:
|
|
||||||
- name: Set envs
|
|
||||||
run: |
|
|
||||||
cat >> "$GITHUB_ENV" << 'EOF'
|
|
||||||
TEMP_PATH=${{runner.temp}}/compatibility_check
|
|
||||||
REPO_COPY=${{runner.temp}}/compatibility_check/ClickHouse
|
|
||||||
REPORTS_PATH=${{runner.temp}}/reports_dir
|
|
||||||
EOF
|
|
||||||
- name: Check out repository code
|
|
||||||
uses: ClickHouse/checkout@v1
|
|
||||||
with:
|
with:
|
||||||
clear-repository: true
|
test_name: Compatibility check X86
|
||||||
- name: Download json reports
|
runner_type: style-checker
|
||||||
uses: actions/download-artifact@v3
|
run_command: |
|
||||||
with:
|
cd "$REPO_COPY/tests/ci"
|
||||||
path: ${{ env.REPORTS_PATH }}
|
python3 compatibility_check.py --check-name "Compatibility check (amd64)" --check-glibc --check-distributions
|
||||||
- name: CompatibilityCheckX86
|
|
||||||
run: |
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
|
||||||
mkdir -p "$TEMP_PATH"
|
|
||||||
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
|
||||||
cd "$REPO_COPY/tests/ci" && python3 compatibility_check.py --check-name "Compatibility check (amd64)" --check-glibc --check-distributions
|
|
||||||
- name: Cleanup
|
|
||||||
if: always()
|
|
||||||
run: |
|
|
||||||
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
|
||||||
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
|
||||||
CompatibilityCheckAarch64:
|
CompatibilityCheckAarch64:
|
||||||
needs: [BuilderDebAarch64]
|
needs: [BuilderDebAarch64]
|
||||||
runs-on: [self-hosted, style-checker]
|
uses: ./.github/workflows/reusable_test.yml
|
||||||
steps:
|
|
||||||
- name: Set envs
|
|
||||||
run: |
|
|
||||||
cat >> "$GITHUB_ENV" << 'EOF'
|
|
||||||
TEMP_PATH=${{runner.temp}}/compatibility_check
|
|
||||||
REPO_COPY=${{runner.temp}}/compatibility_check/ClickHouse
|
|
||||||
REPORTS_PATH=${{runner.temp}}/reports_dir
|
|
||||||
EOF
|
|
||||||
- name: Check out repository code
|
|
||||||
uses: ClickHouse/checkout@v1
|
|
||||||
with:
|
with:
|
||||||
clear-repository: true
|
test_name: Compatibility check X86
|
||||||
- name: Download json reports
|
runner_type: style-checker
|
||||||
uses: actions/download-artifact@v3
|
run_command: |
|
||||||
with:
|
cd "$REPO_COPY/tests/ci"
|
||||||
path: ${{ env.REPORTS_PATH }}
|
python3 compatibility_check.py --check-name "Compatibility check (aarch64)" --check-glibc
|
||||||
- name: CompatibilityCheckAarch64
|
|
||||||
run: |
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
|
||||||
mkdir -p "$TEMP_PATH"
|
|
||||||
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
|
||||||
cd "$REPO_COPY/tests/ci" && python3 compatibility_check.py --check-name "Compatibility check (aarch64)" --check-glibc
|
|
||||||
- name: Cleanup
|
|
||||||
if: always()
|
|
||||||
run: |
|
|
||||||
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
|
||||||
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
|
||||||
#########################################################################################
|
#########################################################################################
|
||||||
#################################### ORDINARY BUILDS ####################################
|
#################################### ORDINARY BUILDS ####################################
|
||||||
#########################################################################################
|
#########################################################################################
|
||||||
@ -239,303 +195,114 @@ jobs:
|
|||||||
##################################### BUILD REPORTER #######################################
|
##################################### BUILD REPORTER #######################################
|
||||||
############################################################################################
|
############################################################################################
|
||||||
BuilderReport:
|
BuilderReport:
|
||||||
|
if: ${{ success() || failure() }}
|
||||||
needs:
|
needs:
|
||||||
- BuilderDebRelease
|
- BuilderDebRelease
|
||||||
- BuilderDebAarch64
|
- BuilderDebAarch64
|
||||||
- BuilderDebAsan
|
- BuilderDebAsan
|
||||||
- BuilderDebTsan
|
- BuilderDebTsan
|
||||||
- BuilderDebDebug
|
- BuilderDebDebug
|
||||||
runs-on: [self-hosted, style-checker]
|
uses: ./.github/workflows/reusable_test.yml
|
||||||
if: ${{ success() || failure() }}
|
|
||||||
steps:
|
|
||||||
- name: Set envs
|
|
||||||
run: |
|
|
||||||
cat >> "$GITHUB_ENV" << 'EOF'
|
|
||||||
CHECK_NAME=ClickHouse build check
|
|
||||||
REPORTS_PATH=${{runner.temp}}/reports_dir
|
|
||||||
TEMP_PATH=${{runner.temp}}/report_check
|
|
||||||
NEEDS_DATA_PATH=${{runner.temp}}/needs.json
|
|
||||||
EOF
|
|
||||||
- name: Download json reports
|
|
||||||
uses: actions/download-artifact@v3
|
|
||||||
with:
|
with:
|
||||||
path: ${{ env.REPORTS_PATH }}
|
test_name: ClickHouse build check
|
||||||
- name: Check out repository code
|
runner_type: style-checker
|
||||||
uses: ClickHouse/checkout@v1
|
additional_envs: |
|
||||||
with:
|
NEEDS_DATA<<NDENV
|
||||||
clear-repository: true
|
|
||||||
- name: Report Builder
|
|
||||||
run: |
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
|
||||||
mkdir -p "$TEMP_PATH"
|
|
||||||
cat > "$NEEDS_DATA_PATH" << 'EOF'
|
|
||||||
${{ toJSON(needs) }}
|
${{ toJSON(needs) }}
|
||||||
EOF
|
NDENV
|
||||||
|
run_command: |
|
||||||
cd "$GITHUB_WORKSPACE/tests/ci"
|
cd "$GITHUB_WORKSPACE/tests/ci"
|
||||||
python3 build_report_check.py "$CHECK_NAME"
|
python3 build_report_check.py "$CHECK_NAME"
|
||||||
- name: Cleanup
|
|
||||||
if: always()
|
|
||||||
run: |
|
|
||||||
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
|
||||||
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
|
||||||
BuilderSpecialReport:
|
BuilderSpecialReport:
|
||||||
|
if: ${{ success() || failure() }}
|
||||||
needs:
|
needs:
|
||||||
- BuilderBinDarwin
|
- BuilderBinDarwin
|
||||||
- BuilderBinDarwinAarch64
|
- BuilderBinDarwinAarch64
|
||||||
runs-on: [self-hosted, style-checker]
|
uses: ./.github/workflows/reusable_test.yml
|
||||||
if: ${{ success() || failure() }}
|
|
||||||
steps:
|
|
||||||
- name: Set envs
|
|
||||||
run: |
|
|
||||||
cat >> "$GITHUB_ENV" << 'EOF'
|
|
||||||
TEMP_PATH=${{runner.temp}}/report_check
|
|
||||||
REPORTS_PATH=${{runner.temp}}/reports_dir
|
|
||||||
CHECK_NAME=ClickHouse special build check
|
|
||||||
NEEDS_DATA_PATH=${{runner.temp}}/needs.json
|
|
||||||
EOF
|
|
||||||
- name: Download json reports
|
|
||||||
uses: actions/download-artifact@v3
|
|
||||||
with:
|
with:
|
||||||
path: ${{ env.REPORTS_PATH }}
|
test_name: ClickHouse special build check
|
||||||
- name: Check out repository code
|
runner_type: style-checker
|
||||||
uses: ClickHouse/checkout@v1
|
additional_envs: |
|
||||||
with:
|
NEEDS_DATA<<NDENV
|
||||||
clear-repository: true
|
|
||||||
- name: Report Builder
|
|
||||||
run: |
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
|
||||||
mkdir -p "$TEMP_PATH"
|
|
||||||
cat > "$NEEDS_DATA_PATH" << 'EOF'
|
|
||||||
${{ toJSON(needs) }}
|
${{ toJSON(needs) }}
|
||||||
EOF
|
NDENV
|
||||||
|
run_command: |
|
||||||
cd "$GITHUB_WORKSPACE/tests/ci"
|
cd "$GITHUB_WORKSPACE/tests/ci"
|
||||||
python3 build_report_check.py "$CHECK_NAME"
|
python3 build_report_check.py "$CHECK_NAME"
|
||||||
- name: Cleanup
|
|
||||||
if: always()
|
|
||||||
run: |
|
|
||||||
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
|
||||||
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
|
||||||
############################################################################################
|
############################################################################################
|
||||||
#################################### INSTALL PACKAGES ######################################
|
#################################### INSTALL PACKAGES ######################################
|
||||||
############################################################################################
|
############################################################################################
|
||||||
InstallPackagesTestRelease:
|
InstallPackagesTestRelease:
|
||||||
needs: [BuilderDebRelease]
|
needs: [BuilderDebRelease]
|
||||||
runs-on: [self-hosted, style-checker]
|
uses: ./.github/workflows/reusable_test.yml
|
||||||
steps:
|
|
||||||
- name: Set envs
|
|
||||||
run: |
|
|
||||||
cat >> "$GITHUB_ENV" << 'EOF'
|
|
||||||
TEMP_PATH=${{runner.temp}}/test_install
|
|
||||||
REPORTS_PATH=${{runner.temp}}/reports_dir
|
|
||||||
CHECK_NAME=Install packages (amd64)
|
|
||||||
REPO_COPY=${{runner.temp}}/test_install/ClickHouse
|
|
||||||
EOF
|
|
||||||
- name: Download json reports
|
|
||||||
uses: actions/download-artifact@v3
|
|
||||||
with:
|
with:
|
||||||
path: ${{ env.REPORTS_PATH }}
|
test_name: Install packages (amd64)
|
||||||
- name: Check out repository code
|
runner_type: style-checker
|
||||||
uses: ClickHouse/checkout@v1
|
run_command: |
|
||||||
with:
|
|
||||||
clear-repository: true
|
|
||||||
- name: Test packages installation
|
|
||||||
run: |
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
|
||||||
mkdir -p "$TEMP_PATH"
|
|
||||||
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
|
||||||
cd "$REPO_COPY/tests/ci"
|
cd "$REPO_COPY/tests/ci"
|
||||||
python3 install_check.py "$CHECK_NAME"
|
python3 install_check.py "$CHECK_NAME"
|
||||||
- name: Cleanup
|
|
||||||
if: always()
|
|
||||||
run: |
|
|
||||||
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
|
||||||
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
|
||||||
InstallPackagesTestAarch64:
|
InstallPackagesTestAarch64:
|
||||||
needs: [BuilderDebAarch64]
|
needs: [BuilderDebAarch64]
|
||||||
runs-on: [self-hosted, style-checker-aarch64]
|
uses: ./.github/workflows/reusable_test.yml
|
||||||
steps:
|
|
||||||
- name: Set envs
|
|
||||||
run: |
|
|
||||||
cat >> "$GITHUB_ENV" << 'EOF'
|
|
||||||
TEMP_PATH=${{runner.temp}}/test_install
|
|
||||||
REPORTS_PATH=${{runner.temp}}/reports_dir
|
|
||||||
CHECK_NAME=Install packages (arm64)
|
|
||||||
REPO_COPY=${{runner.temp}}/test_install/ClickHouse
|
|
||||||
EOF
|
|
||||||
- name: Download json reports
|
|
||||||
uses: actions/download-artifact@v3
|
|
||||||
with:
|
with:
|
||||||
path: ${{ env.REPORTS_PATH }}
|
test_name: Install packages (arm64)
|
||||||
- name: Check out repository code
|
runner_type: style-checker-aarch64
|
||||||
uses: ClickHouse/checkout@v1
|
run_command: |
|
||||||
with:
|
|
||||||
clear-repository: true
|
|
||||||
- name: Test packages installation
|
|
||||||
run: |
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
|
||||||
mkdir -p "$TEMP_PATH"
|
|
||||||
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
|
||||||
cd "$REPO_COPY/tests/ci"
|
cd "$REPO_COPY/tests/ci"
|
||||||
python3 install_check.py "$CHECK_NAME"
|
python3 install_check.py "$CHECK_NAME"
|
||||||
- name: Cleanup
|
|
||||||
if: always()
|
|
||||||
run: |
|
|
||||||
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
|
||||||
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
|
||||||
##############################################################################################
|
##############################################################################################
|
||||||
########################### FUNCTIONAl STATELESS TESTS #######################################
|
########################### FUNCTIONAl STATELESS TESTS #######################################
|
||||||
##############################################################################################
|
##############################################################################################
|
||||||
FunctionalStatelessTestAsan:
|
FunctionalStatelessTestAsan:
|
||||||
needs: [BuilderDebAsan]
|
needs: [BuilderDebAsan]
|
||||||
runs-on: [self-hosted, func-tester]
|
uses: ./.github/workflows/reusable_test.yml
|
||||||
steps:
|
with:
|
||||||
- name: Set envs
|
test_name: Stateless tests (asan)
|
||||||
run: |
|
runner_type: func-tester
|
||||||
cat >> "$GITHUB_ENV" << 'EOF'
|
additional_envs: |
|
||||||
TEMP_PATH=${{runner.temp}}/stateless_debug
|
|
||||||
REPORTS_PATH=${{runner.temp}}/reports_dir
|
|
||||||
CHECK_NAME=Stateless tests (asan)
|
|
||||||
REPO_COPY=${{runner.temp}}/stateless_debug/ClickHouse
|
|
||||||
KILL_TIMEOUT=10800
|
KILL_TIMEOUT=10800
|
||||||
EOF
|
run_command: |
|
||||||
- name: Download json reports
|
|
||||||
uses: actions/download-artifact@v3
|
|
||||||
with:
|
|
||||||
path: ${{ env.REPORTS_PATH }}
|
|
||||||
- name: Check out repository code
|
|
||||||
uses: ClickHouse/checkout@v1
|
|
||||||
with:
|
|
||||||
clear-repository: true
|
|
||||||
- name: Functional test
|
|
||||||
run: |
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
|
||||||
mkdir -p "$TEMP_PATH"
|
|
||||||
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
|
||||||
cd "$REPO_COPY/tests/ci"
|
cd "$REPO_COPY/tests/ci"
|
||||||
python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT"
|
python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT"
|
||||||
- name: Cleanup
|
|
||||||
if: always()
|
|
||||||
run: |
|
|
||||||
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
|
||||||
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
|
||||||
##############################################################################################
|
##############################################################################################
|
||||||
############################ FUNCTIONAl STATEFUL TESTS #######################################
|
############################ FUNCTIONAl STATEFUL TESTS #######################################
|
||||||
##############################################################################################
|
##############################################################################################
|
||||||
FunctionalStatefulTestDebug:
|
FunctionalStatefulTestDebug:
|
||||||
needs: [BuilderDebDebug]
|
needs: [BuilderDebDebug]
|
||||||
runs-on: [self-hosted, func-tester]
|
uses: ./.github/workflows/reusable_test.yml
|
||||||
steps:
|
with:
|
||||||
- name: Set envs
|
test_name: Stateful tests (debug)
|
||||||
run: |
|
runner_type: func-tester
|
||||||
cat >> "$GITHUB_ENV" << 'EOF'
|
additional_envs: |
|
||||||
TEMP_PATH=${{runner.temp}}/stateful_debug
|
|
||||||
REPORTS_PATH=${{runner.temp}}/reports_dir
|
|
||||||
CHECK_NAME=Stateful tests (debug)
|
|
||||||
REPO_COPY=${{runner.temp}}/stateful_debug/ClickHouse
|
|
||||||
KILL_TIMEOUT=3600
|
KILL_TIMEOUT=3600
|
||||||
EOF
|
run_command: |
|
||||||
- name: Download json reports
|
|
||||||
uses: actions/download-artifact@v3
|
|
||||||
with:
|
|
||||||
path: ${{ env.REPORTS_PATH }}
|
|
||||||
- name: Check out repository code
|
|
||||||
uses: ClickHouse/checkout@v1
|
|
||||||
with:
|
|
||||||
clear-repository: true
|
|
||||||
- name: Functional test
|
|
||||||
run: |
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
|
||||||
mkdir -p "$TEMP_PATH"
|
|
||||||
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
|
||||||
cd "$REPO_COPY/tests/ci"
|
cd "$REPO_COPY/tests/ci"
|
||||||
python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT"
|
python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT"
|
||||||
- name: Cleanup
|
|
||||||
if: always()
|
|
||||||
run: |
|
|
||||||
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
|
||||||
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
|
||||||
##############################################################################################
|
##############################################################################################
|
||||||
######################################### STRESS TESTS #######################################
|
######################################### STRESS TESTS #######################################
|
||||||
##############################################################################################
|
##############################################################################################
|
||||||
StressTestTsan:
|
StressTestTsan:
|
||||||
needs: [BuilderDebTsan]
|
needs: [BuilderDebTsan]
|
||||||
# func testers have 16 cores + 128 GB memory
|
uses: ./.github/workflows/reusable_test.yml
|
||||||
# while stress testers have 36 cores + 72 memory
|
|
||||||
# It would be better to have something like 32 + 128,
|
|
||||||
# but such servers almost unavailable as spot instances.
|
|
||||||
runs-on: [self-hosted, func-tester]
|
|
||||||
steps:
|
|
||||||
- name: Set envs
|
|
||||||
run: |
|
|
||||||
cat >> "$GITHUB_ENV" << 'EOF'
|
|
||||||
TEMP_PATH=${{runner.temp}}/stress_thread
|
|
||||||
REPORTS_PATH=${{runner.temp}}/reports_dir
|
|
||||||
CHECK_NAME=Stress test (tsan)
|
|
||||||
REPO_COPY=${{runner.temp}}/stress_thread/ClickHouse
|
|
||||||
EOF
|
|
||||||
- name: Download json reports
|
|
||||||
uses: actions/download-artifact@v3
|
|
||||||
with:
|
with:
|
||||||
path: ${{ env.REPORTS_PATH }}
|
test_name: Stress test (tsan)
|
||||||
- name: Check out repository code
|
runner_type: stress-tester
|
||||||
uses: ClickHouse/checkout@v1
|
run_command: |
|
||||||
with:
|
|
||||||
clear-repository: true
|
|
||||||
- name: Stress test
|
|
||||||
run: |
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
|
||||||
mkdir -p "$TEMP_PATH"
|
|
||||||
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
|
||||||
cd "$REPO_COPY/tests/ci"
|
cd "$REPO_COPY/tests/ci"
|
||||||
python3 stress_check.py "$CHECK_NAME"
|
python3 stress_check.py "$CHECK_NAME"
|
||||||
- name: Cleanup
|
|
||||||
if: always()
|
|
||||||
run: |
|
|
||||||
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
|
||||||
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
|
||||||
#############################################################################################
|
#############################################################################################
|
||||||
############################# INTEGRATION TESTS #############################################
|
############################# INTEGRATION TESTS #############################################
|
||||||
#############################################################################################
|
#############################################################################################
|
||||||
IntegrationTestsRelease:
|
IntegrationTestsRelease:
|
||||||
needs: [BuilderDebRelease]
|
needs: [BuilderDebRelease]
|
||||||
runs-on: [self-hosted, stress-tester]
|
uses: ./.github/workflows/reusable_test.yml
|
||||||
steps:
|
|
||||||
- name: Set envs
|
|
||||||
run: |
|
|
||||||
cat >> "$GITHUB_ENV" << 'EOF'
|
|
||||||
TEMP_PATH=${{runner.temp}}/integration_tests_release
|
|
||||||
REPORTS_PATH=${{runner.temp}}/reports_dir
|
|
||||||
CHECK_NAME=Integration tests (release)
|
|
||||||
REPO_COPY=${{runner.temp}}/integration_tests_release/ClickHouse
|
|
||||||
EOF
|
|
||||||
- name: Download json reports
|
|
||||||
uses: actions/download-artifact@v3
|
|
||||||
with:
|
with:
|
||||||
path: ${{ env.REPORTS_PATH }}
|
test_name: Integration tests (release)
|
||||||
- name: Check out repository code
|
runner_type: stress-tester
|
||||||
uses: ClickHouse/checkout@v1
|
batches: 4
|
||||||
with:
|
run_command: |
|
||||||
clear-repository: true
|
|
||||||
- name: Integration test
|
|
||||||
run: |
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
|
||||||
mkdir -p "$TEMP_PATH"
|
|
||||||
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
|
||||||
cd "$REPO_COPY/tests/ci"
|
cd "$REPO_COPY/tests/ci"
|
||||||
python3 integration_test_check.py "$CHECK_NAME"
|
python3 integration_test_check.py "$CHECK_NAME"
|
||||||
- name: Cleanup
|
|
||||||
if: always()
|
|
||||||
run: |
|
|
||||||
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
|
||||||
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
|
||||||
FinishCheck:
|
FinishCheck:
|
||||||
needs:
|
needs:
|
||||||
- DockerHubPush
|
- DockerHubPush
|
||||||
|
70
.github/workflows/docs_check.yml
vendored
70
.github/workflows/docs_check.yml
vendored
@ -96,68 +96,30 @@ jobs:
|
|||||||
path: ${{ runner.temp }}/changed_images.json
|
path: ${{ runner.temp }}/changed_images.json
|
||||||
StyleCheck:
|
StyleCheck:
|
||||||
needs: DockerHubPush
|
needs: DockerHubPush
|
||||||
runs-on: [self-hosted, style-checker]
|
# We need additional `&& ! cancelled()` to have the job being able to cancel
|
||||||
if: ${{ success() || failure() }}
|
if: ${{ success() || failure() || ( always() && ! cancelled() ) }}
|
||||||
steps:
|
uses: ./.github/workflows/reusable_test.yml
|
||||||
- name: Set envs
|
with:
|
||||||
run: |
|
test_name: Style check
|
||||||
cat >> "$GITHUB_ENV" << 'EOF'
|
runner_type: style-checker
|
||||||
TEMP_PATH=${{ runner.temp }}/style_check
|
run_command: |
|
||||||
|
cd "$REPO_COPY/tests/ci"
|
||||||
|
python3 style_check.py
|
||||||
|
secrets:
|
||||||
|
secret_envs: |
|
||||||
ROBOT_CLICKHOUSE_SSH_KEY<<RCSK
|
ROBOT_CLICKHOUSE_SSH_KEY<<RCSK
|
||||||
${{secrets.ROBOT_CLICKHOUSE_SSH_KEY}}
|
${{secrets.ROBOT_CLICKHOUSE_SSH_KEY}}
|
||||||
RCSK
|
RCSK
|
||||||
EOF
|
|
||||||
- name: Download changed images
|
|
||||||
# even if artifact does not exist, e.g. on `do not test` label or failed Docker job
|
|
||||||
continue-on-error: true
|
|
||||||
uses: actions/download-artifact@v3
|
|
||||||
with:
|
|
||||||
name: changed_images
|
|
||||||
path: ${{ env.TEMP_PATH }}
|
|
||||||
- name: Check out repository code
|
|
||||||
uses: ClickHouse/checkout@v1
|
|
||||||
with:
|
|
||||||
clear-repository: true
|
|
||||||
- name: Style Check
|
|
||||||
run: |
|
|
||||||
cd "$GITHUB_WORKSPACE/tests/ci"
|
|
||||||
python3 style_check.py
|
|
||||||
- name: Cleanup
|
|
||||||
if: always()
|
|
||||||
run: |
|
|
||||||
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
|
||||||
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
|
||||||
DocsCheck:
|
DocsCheck:
|
||||||
needs: DockerHubPush
|
needs: DockerHubPush
|
||||||
runs-on: [self-hosted, func-tester-aarch64]
|
uses: ./.github/workflows/reusable_test.yml
|
||||||
steps:
|
|
||||||
- name: Set envs
|
|
||||||
run: |
|
|
||||||
cat >> "$GITHUB_ENV" << 'EOF'
|
|
||||||
TEMP_PATH=${{runner.temp}}/docs_check
|
|
||||||
REPO_COPY=${{runner.temp}}/docs_check/ClickHouse
|
|
||||||
EOF
|
|
||||||
- name: Download changed images
|
|
||||||
uses: actions/download-artifact@v3
|
|
||||||
with:
|
with:
|
||||||
name: changed_images
|
test_name: Docs check
|
||||||
path: ${{ env.TEMP_PATH }}
|
runner_type: func-tester-aarch64
|
||||||
- name: Check out repository code
|
additional_envs: |
|
||||||
uses: ClickHouse/checkout@v1
|
run_command: |
|
||||||
with:
|
|
||||||
clear-repository: true
|
|
||||||
- name: Docs Check
|
|
||||||
run: |
|
|
||||||
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
|
||||||
cd "$REPO_COPY/tests/ci"
|
cd "$REPO_COPY/tests/ci"
|
||||||
python3 docs_check.py
|
python3 docs_check.py
|
||||||
- name: Cleanup
|
|
||||||
if: always()
|
|
||||||
run: |
|
|
||||||
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
|
||||||
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
|
||||||
FinishCheck:
|
FinishCheck:
|
||||||
needs:
|
needs:
|
||||||
- StyleCheck
|
- StyleCheck
|
||||||
|
57
.github/workflows/jepsen.yml
vendored
57
.github/workflows/jepsen.yml
vendored
@ -11,60 +11,19 @@ on: # yamllint disable-line rule:truthy
|
|||||||
workflow_call:
|
workflow_call:
|
||||||
jobs:
|
jobs:
|
||||||
KeeperJepsenRelease:
|
KeeperJepsenRelease:
|
||||||
runs-on: [self-hosted, style-checker]
|
uses: ./.github/workflows/reusable_test.yml
|
||||||
steps:
|
|
||||||
- name: Set envs
|
|
||||||
run: |
|
|
||||||
cat >> "$GITHUB_ENV" << 'EOF'
|
|
||||||
TEMP_PATH=${{runner.temp}}/keeper_jepsen
|
|
||||||
REPO_COPY=${{runner.temp}}/keeper_jepsen/ClickHouse
|
|
||||||
EOF
|
|
||||||
- name: Check out repository code
|
|
||||||
uses: ClickHouse/checkout@v1
|
|
||||||
with:
|
with:
|
||||||
clear-repository: true
|
test_name: Jepsen keeper check
|
||||||
fetch-depth: 0
|
runner_type: style-checker
|
||||||
filter: tree:0
|
run_command: |
|
||||||
- name: Jepsen Test
|
|
||||||
run: |
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
|
||||||
mkdir -p "$TEMP_PATH"
|
|
||||||
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
|
||||||
cd "$REPO_COPY/tests/ci"
|
cd "$REPO_COPY/tests/ci"
|
||||||
python3 jepsen_check.py keeper
|
python3 jepsen_check.py keeper
|
||||||
- name: Cleanup
|
|
||||||
if: always()
|
|
||||||
run: |
|
|
||||||
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
|
||||||
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
|
||||||
# ServerJepsenRelease:
|
# ServerJepsenRelease:
|
||||||
# runs-on: [self-hosted, style-checker]
|
# runs-on: [self-hosted, style-checker]
|
||||||
# if: ${{ always() }}
|
# uses: ./.github/workflows/reusable_test.yml
|
||||||
# needs: [KeeperJepsenRelease]
|
|
||||||
# steps:
|
|
||||||
# - name: Set envs
|
|
||||||
# run: |
|
|
||||||
# cat >> "$GITHUB_ENV" << 'EOF'
|
|
||||||
# TEMP_PATH=${{runner.temp}}/server_jepsen
|
|
||||||
# REPO_COPY=${{runner.temp}}/server_jepsen/ClickHouse
|
|
||||||
# EOF
|
|
||||||
# - name: Check out repository code
|
|
||||||
# uses: ClickHouse/checkout@v1
|
|
||||||
# with:
|
# with:
|
||||||
# clear-repository: true
|
# test_name: Jepsen server check
|
||||||
# fetch-depth: 0
|
# runner_type: style-checker
|
||||||
# filter: tree:0
|
# run_command: |
|
||||||
# - name: Jepsen Test
|
|
||||||
# run: |
|
|
||||||
# sudo rm -fr "$TEMP_PATH"
|
|
||||||
# mkdir -p "$TEMP_PATH"
|
|
||||||
# cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
|
||||||
# cd "$REPO_COPY/tests/ci"
|
# cd "$REPO_COPY/tests/ci"
|
||||||
# python3 jepsen_check.py server
|
# python3 jepsen_check.py server
|
||||||
# - name: Cleanup
|
|
||||||
# if: always()
|
|
||||||
# run: |
|
|
||||||
# docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
|
||||||
# docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
|
||||||
# sudo rm -fr "$TEMP_PATH"
|
|
||||||
|
85
.github/workflows/libfuzzer.yml
vendored
85
.github/workflows/libfuzzer.yml
vendored
@ -10,86 +10,17 @@ on: # yamllint disable-line rule:truthy
|
|||||||
workflow_call:
|
workflow_call:
|
||||||
jobs:
|
jobs:
|
||||||
BuilderFuzzers:
|
BuilderFuzzers:
|
||||||
runs-on: [self-hosted, builder]
|
uses: ./.github/workflows/reusable_build.yml
|
||||||
steps:
|
|
||||||
- name: Set envs
|
|
||||||
run: |
|
|
||||||
cat >> "$GITHUB_ENV" << 'EOF'
|
|
||||||
TEMP_PATH=${{runner.temp}}/build_check
|
|
||||||
IMAGES_PATH=${{runner.temp}}/images_path
|
|
||||||
REPO_COPY=${{runner.temp}}/build_check/ClickHouse
|
|
||||||
CACHES_PATH=${{runner.temp}}/../ccaches
|
|
||||||
BUILD_NAME=fuzzers
|
|
||||||
EOF
|
|
||||||
- name: Download changed images
|
|
||||||
# even if artifact does not exist, e.g. on `do not test` label or failed Docker job
|
|
||||||
continue-on-error: true
|
|
||||||
uses: actions/download-artifact@v3
|
|
||||||
with:
|
with:
|
||||||
name: changed_images
|
build_name: fuzzers
|
||||||
path: ${{ env.IMAGES_PATH }}
|
|
||||||
- name: Check out repository code
|
|
||||||
uses: ClickHouse/checkout@v1
|
|
||||||
with:
|
|
||||||
clear-repository: true
|
|
||||||
submodules: true
|
|
||||||
ref: ${{github.ref}}
|
|
||||||
- name: Build
|
|
||||||
run: |
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
|
||||||
mkdir -p "$TEMP_PATH"
|
|
||||||
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
|
||||||
cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME"
|
|
||||||
- name: Upload build URLs to artifacts
|
|
||||||
if: ${{ success() || failure() }}
|
|
||||||
uses: actions/upload-artifact@v3
|
|
||||||
with:
|
|
||||||
name: ${{ env.BUILD_URLS }}
|
|
||||||
path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json
|
|
||||||
- name: Cleanup
|
|
||||||
if: always()
|
|
||||||
run: |
|
|
||||||
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
|
||||||
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
|
|
||||||
libFuzzerTest:
|
libFuzzerTest:
|
||||||
needs: [BuilderFuzzers]
|
needs: [BuilderFuzzers]
|
||||||
runs-on: [self-hosted, func-tester]
|
uses: ./.github/workflows/reusable_test.yml
|
||||||
steps:
|
with:
|
||||||
- name: Set envs
|
test_name: libFuzzer tests
|
||||||
run: |
|
runner_type: func-tester
|
||||||
cat >> "$GITHUB_ENV" << 'EOF'
|
additional_envs: |
|
||||||
TEMP_PATH=${{runner.temp}}/libfuzzer
|
|
||||||
REPORTS_PATH=${{runner.temp}}/reports_dir
|
|
||||||
CHECK_NAME=libFuzzer tests
|
|
||||||
REPO_COPY=${{runner.temp}}/libfuzzer/ClickHouse
|
|
||||||
KILL_TIMEOUT=10800
|
KILL_TIMEOUT=10800
|
||||||
EOF
|
run_command: |
|
||||||
- name: Download changed images
|
|
||||||
# even if artifact does not exist, e.g. on `do not test` label or failed Docker job
|
|
||||||
continue-on-error: true
|
|
||||||
uses: actions/download-artifact@v3
|
|
||||||
with:
|
|
||||||
name: changed_images
|
|
||||||
path: ${{ env.TEMP_PATH }}
|
|
||||||
- name: Download json reports
|
|
||||||
uses: actions/download-artifact@v3
|
|
||||||
with:
|
|
||||||
path: ${{ env.REPORTS_PATH }}
|
|
||||||
- name: Check out repository code
|
|
||||||
uses: ClickHouse/checkout@v1
|
|
||||||
with:
|
|
||||||
clear-repository: true
|
|
||||||
- name: libFuzzer test
|
|
||||||
run: |
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
|
||||||
mkdir -p "$TEMP_PATH"
|
|
||||||
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
|
||||||
cd "$REPO_COPY/tests/ci"
|
cd "$REPO_COPY/tests/ci"
|
||||||
python3 libfuzzer_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT"
|
python3 libfuzzer_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT"
|
||||||
- name: Cleanup
|
|
||||||
if: always()
|
|
||||||
run: |
|
|
||||||
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
|
||||||
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
|
||||||
|
3341
.github/workflows/master.yml
vendored
3341
.github/workflows/master.yml
vendored
File diff suppressed because it is too large
Load Diff
3
.github/workflows/nightly.yml
vendored
3
.github/workflows/nightly.yml
vendored
@ -74,9 +74,6 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
name: changed_images
|
name: changed_images
|
||||||
path: ${{ runner.temp }}/changed_images.json
|
path: ${{ runner.temp }}/changed_images.json
|
||||||
Codebrowser:
|
|
||||||
needs: [DockerHubPush]
|
|
||||||
uses: ./.github/workflows/woboq.yml
|
|
||||||
SonarCloud:
|
SonarCloud:
|
||||||
runs-on: [self-hosted, builder]
|
runs-on: [self-hosted, builder]
|
||||||
env:
|
env:
|
||||||
|
4378
.github/workflows/pull_request.yml
vendored
4378
.github/workflows/pull_request.yml
vendored
File diff suppressed because it is too large
Load Diff
1486
.github/workflows/release_branches.yml
vendored
1486
.github/workflows/release_branches.yml
vendored
File diff suppressed because it is too large
Load Diff
9
.github/workflows/reusable_build.yml
vendored
9
.github/workflows/reusable_build.yml
vendored
@ -1,6 +1,10 @@
|
|||||||
### For the pure soul wishes to move it to another place
|
### For the pure soul wishes to move it to another place
|
||||||
# https://github.com/orgs/community/discussions/9050
|
# https://github.com/orgs/community/discussions/9050
|
||||||
|
|
||||||
|
env:
|
||||||
|
# Force the stdout and stderr streams to be unbuffered
|
||||||
|
PYTHONUNBUFFERED: 1
|
||||||
|
|
||||||
name: Build ClickHouse
|
name: Build ClickHouse
|
||||||
'on':
|
'on':
|
||||||
workflow_call:
|
workflow_call:
|
||||||
@ -25,6 +29,8 @@ name: Build ClickHouse
|
|||||||
jobs:
|
jobs:
|
||||||
Build:
|
Build:
|
||||||
name: Build-${{inputs.build_name}}
|
name: Build-${{inputs.build_name}}
|
||||||
|
env:
|
||||||
|
GITHUB_JOB_OVERRIDDEN: Build-${{inputs.build_name}}
|
||||||
runs-on: [self-hosted, '${{inputs.runner_type}}']
|
runs-on: [self-hosted, '${{inputs.runner_type}}']
|
||||||
steps:
|
steps:
|
||||||
- name: Check out repository code
|
- name: Check out repository code
|
||||||
@ -37,8 +43,6 @@ jobs:
|
|||||||
- name: Set build envs
|
- name: Set build envs
|
||||||
run: |
|
run: |
|
||||||
cat >> "$GITHUB_ENV" << 'EOF'
|
cat >> "$GITHUB_ENV" << 'EOF'
|
||||||
IMAGES_PATH=${{runner.temp}}/images_path
|
|
||||||
GITHUB_JOB_OVERRIDDEN=Build-${{inputs.build_name}}
|
|
||||||
${{inputs.additional_envs}}
|
${{inputs.additional_envs}}
|
||||||
EOF
|
EOF
|
||||||
python3 "$GITHUB_WORKSPACE"/tests/ci/ci_config.py --build-name "${{inputs.build_name}}" >> "$GITHUB_ENV"
|
python3 "$GITHUB_WORKSPACE"/tests/ci/ci_config.py --build-name "${{inputs.build_name}}" >> "$GITHUB_ENV"
|
||||||
@ -71,4 +75,5 @@ jobs:
|
|||||||
name: ${{ env.BUILD_URLS }}
|
name: ${{ env.BUILD_URLS }}
|
||||||
path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json
|
path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json
|
||||||
- name: Clean
|
- name: Clean
|
||||||
|
if: always()
|
||||||
uses: ./.github/actions/clean
|
uses: ./.github/actions/clean
|
||||||
|
113
.github/workflows/reusable_test.yml
vendored
Normal file
113
.github/workflows/reusable_test.yml
vendored
Normal file
@ -0,0 +1,113 @@
|
|||||||
|
### For the pure soul wishes to move it to another place
|
||||||
|
# https://github.com/orgs/community/discussions/9050
|
||||||
|
|
||||||
|
name: Testing workflow
|
||||||
|
'on':
|
||||||
|
workflow_call:
|
||||||
|
inputs:
|
||||||
|
test_name:
|
||||||
|
description: the value of test type from tests/ci/ci_config.py, ends up as $CHECK_NAME ENV
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
runner_type:
|
||||||
|
description: the label of runner to use
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
run_command:
|
||||||
|
description: the command to launch the check. Usually starts with `cd '$REPO_COPY/tests/ci'`
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
batches:
|
||||||
|
description: how many batches for the test will be launched
|
||||||
|
default: 1
|
||||||
|
type: number
|
||||||
|
checkout_depth:
|
||||||
|
description: the value of the git shallow checkout
|
||||||
|
required: false
|
||||||
|
type: number
|
||||||
|
default: 1
|
||||||
|
submodules:
|
||||||
|
description: if the submodules should be checked out
|
||||||
|
required: false
|
||||||
|
type: boolean
|
||||||
|
default: false
|
||||||
|
additional_envs:
|
||||||
|
description: additional ENV variables to setup the job
|
||||||
|
type: string
|
||||||
|
secrets:
|
||||||
|
secret_envs:
|
||||||
|
description: if given, it's passed to the environments
|
||||||
|
required: false
|
||||||
|
|
||||||
|
env:
|
||||||
|
# Force the stdout and stderr streams to be unbuffered
|
||||||
|
PYTHONUNBUFFERED: 1
|
||||||
|
CHECK_NAME: ${{inputs.test_name}}
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
PrepareStrategy:
|
||||||
|
# batches < 1 is misconfiguration,
|
||||||
|
# and we need this step only for batches > 1
|
||||||
|
if: ${{ inputs.batches > 1 }}
|
||||||
|
runs-on: [self-hosted, style-checker-aarch64]
|
||||||
|
outputs:
|
||||||
|
batches: ${{steps.batches.outputs.batches}}
|
||||||
|
steps:
|
||||||
|
- name: Calculate batches
|
||||||
|
id: batches
|
||||||
|
run: |
|
||||||
|
batches_output=$(python3 -c 'import json; print(json.dumps(list(range(${{inputs.batches}}))))')
|
||||||
|
echo "batches=${batches_output}" >> "$GITHUB_OUTPUT"
|
||||||
|
Test:
|
||||||
|
# If PrepareStrategy is skipped for batches == 1,
|
||||||
|
# we still need to launch the test.
|
||||||
|
# `! failure()` is mandatory here to launch on skipped Job
|
||||||
|
# `&& !cancelled()` to allow the be cancelable
|
||||||
|
if: ${{ ( !failure() && !cancelled() ) && inputs.batches > 0 }}
|
||||||
|
# Do not add `-0` to the end, if there's only one batch
|
||||||
|
name: ${{inputs.test_name}}${{ inputs.batches > 1 && format('-{0}',matrix.batch) || '' }}
|
||||||
|
env:
|
||||||
|
GITHUB_JOB_OVERRIDDEN: ${{inputs.test_name}}${{ inputs.batches > 1 && format('-{0}',matrix.batch) || '' }}
|
||||||
|
runs-on: [self-hosted, '${{inputs.runner_type}}']
|
||||||
|
needs: [PrepareStrategy]
|
||||||
|
strategy:
|
||||||
|
fail-fast: false # we always wait for entire matrix
|
||||||
|
matrix:
|
||||||
|
# if PrepareStrategy does not have batches, we use 0
|
||||||
|
batch: ${{ needs.PrepareStrategy.outputs.batches
|
||||||
|
&& fromJson(needs.PrepareStrategy.outputs.batches)
|
||||||
|
|| fromJson('[0]')}}
|
||||||
|
steps:
|
||||||
|
- name: Check out repository code
|
||||||
|
uses: ClickHouse/checkout@v1
|
||||||
|
with:
|
||||||
|
clear-repository: true
|
||||||
|
submodules: ${{inputs.submodules}}
|
||||||
|
fetch-depth: ${{inputs.checkout_depth}}
|
||||||
|
filter: tree:0
|
||||||
|
- name: Set build envs
|
||||||
|
run: |
|
||||||
|
cat >> "$GITHUB_ENV" << 'EOF'
|
||||||
|
${{inputs.additional_envs}}
|
||||||
|
${{secrets.secret_envs}}
|
||||||
|
EOF
|
||||||
|
- name: Common setup
|
||||||
|
uses: ./.github/actions/common_setup
|
||||||
|
with:
|
||||||
|
job_type: test
|
||||||
|
- name: Download json reports
|
||||||
|
uses: actions/download-artifact@v3
|
||||||
|
with:
|
||||||
|
path: ${{ env.REPORTS_PATH }}
|
||||||
|
- name: Setup batch
|
||||||
|
if: ${{ inputs.batches > 1}}
|
||||||
|
run: |
|
||||||
|
cat >> "$GITHUB_ENV" << 'EOF'
|
||||||
|
RUN_BY_HASH_NUM=${{matrix.batch}}
|
||||||
|
RUN_BY_HASH_TOTAL=${{inputs.batches}}
|
||||||
|
EOF
|
||||||
|
- name: Run test
|
||||||
|
run: ${{inputs.run_command}}
|
||||||
|
- name: Clean
|
||||||
|
if: always()
|
||||||
|
uses: ./.github/actions/clean
|
44
.github/workflows/woboq.yml
vendored
44
.github/workflows/woboq.yml
vendored
@ -1,44 +0,0 @@
|
|||||||
name: WoboqBuilder
|
|
||||||
env:
|
|
||||||
# Force the stdout and stderr streams to be unbuffered
|
|
||||||
PYTHONUNBUFFERED: 1
|
|
||||||
|
|
||||||
concurrency:
|
|
||||||
group: woboq
|
|
||||||
on: # yamllint disable-line rule:truthy
|
|
||||||
workflow_dispatch:
|
|
||||||
workflow_call:
|
|
||||||
jobs:
|
|
||||||
# don't use dockerhub push because this image updates so rarely
|
|
||||||
WoboqCodebrowser:
|
|
||||||
runs-on: [self-hosted, style-checker]
|
|
||||||
timeout-minutes: 420 # the task is pretty heavy, so there's an additional hour
|
|
||||||
steps:
|
|
||||||
- name: Set envs
|
|
||||||
run: |
|
|
||||||
cat >> "$GITHUB_ENV" << 'EOF'
|
|
||||||
TEMP_PATH=${{runner.temp}}/codebrowser
|
|
||||||
REPO_COPY=${{runner.temp}}/codebrowser/ClickHouse
|
|
||||||
IMAGES_PATH=${{runner.temp}}/images_path
|
|
||||||
EOF
|
|
||||||
- name: Check out repository code
|
|
||||||
uses: ClickHouse/checkout@v1
|
|
||||||
with:
|
|
||||||
clear-repository: true
|
|
||||||
submodules: 'true'
|
|
||||||
- name: Download json reports
|
|
||||||
uses: actions/download-artifact@v3
|
|
||||||
with:
|
|
||||||
path: ${{ env.IMAGES_PATH }}
|
|
||||||
- name: Codebrowser
|
|
||||||
run: |
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
|
||||||
mkdir -p "$TEMP_PATH"
|
|
||||||
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
|
||||||
cd "$REPO_COPY/tests/ci" && python3 codebrowser_check.py
|
|
||||||
- name: Cleanup
|
|
||||||
if: always()
|
|
||||||
run: |
|
|
||||||
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
|
||||||
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
|
@ -4,8 +4,8 @@ FROM node:16-alpine
|
|||||||
|
|
||||||
RUN apk add --no-cache git openssh bash
|
RUN apk add --no-cache git openssh bash
|
||||||
|
|
||||||
# At this point we want to really update /opt/clickhouse-docs
|
# At this point we want to really update /opt/clickhouse-docs directory
|
||||||
# despite the cached images
|
# So we reset the cache
|
||||||
ARG CACHE_INVALIDATOR=0
|
ARG CACHE_INVALIDATOR=0
|
||||||
|
|
||||||
RUN git clone https://github.com/ClickHouse/clickhouse-docs.git \
|
RUN git clone https://github.com/ClickHouse/clickhouse-docs.git \
|
||||||
|
@ -37,7 +37,10 @@ from commit_status_helper import (
|
|||||||
from ci_config import CI_CONFIG
|
from ci_config import CI_CONFIG
|
||||||
|
|
||||||
|
|
||||||
|
# Old way to read the neads_data
|
||||||
NEEDS_DATA_PATH = os.getenv("NEEDS_DATA_PATH", "")
|
NEEDS_DATA_PATH = os.getenv("NEEDS_DATA_PATH", "")
|
||||||
|
# Now it's set here. Two-steps migration for backward compatibility
|
||||||
|
NEEDS_DATA = os.getenv("NEEDS_DATA", "")
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
@ -58,6 +61,10 @@ def main():
|
|||||||
if os.path.exists(NEEDS_DATA_PATH):
|
if os.path.exists(NEEDS_DATA_PATH):
|
||||||
with open(NEEDS_DATA_PATH, "rb") as file_handler:
|
with open(NEEDS_DATA_PATH, "rb") as file_handler:
|
||||||
needs_data = json.load(file_handler)
|
needs_data = json.load(file_handler)
|
||||||
|
|
||||||
|
if NEEDS_DATA:
|
||||||
|
needs_data = json.loads(NEEDS_DATA)
|
||||||
|
|
||||||
required_builds = len(needs_data)
|
required_builds = len(needs_data)
|
||||||
|
|
||||||
if needs_data:
|
if needs_data:
|
||||||
|
@ -50,21 +50,27 @@ class CiConfig:
|
|||||||
|
|
||||||
def validate(self) -> None:
|
def validate(self) -> None:
|
||||||
errors = []
|
errors = []
|
||||||
# All build configs must belong to build_report_config
|
for name, build_config in self.build_config.items():
|
||||||
for build_name in self.build_config.keys():
|
|
||||||
build_in_reports = False
|
build_in_reports = False
|
||||||
for report_config in self.builds_report_config.values():
|
for report_config in self.builds_report_config.values():
|
||||||
if build_name in report_config:
|
if name in report_config:
|
||||||
build_in_reports = True
|
build_in_reports = True
|
||||||
break
|
break
|
||||||
|
# All build configs must belong to build_report_config
|
||||||
if not build_in_reports:
|
if not build_in_reports:
|
||||||
|
logging.error("Build name %s does not belong to build reports", name)
|
||||||
|
errors.append(f"Build name {name} does not belong to build reports")
|
||||||
|
# The name should be the same as build_config.name
|
||||||
|
if not build_config.name == name:
|
||||||
logging.error(
|
logging.error(
|
||||||
"Build name %s does not belong to build reports", build_name
|
"Build name '%s' does not match the config 'name' value '%s'",
|
||||||
|
name,
|
||||||
|
build_config.name,
|
||||||
)
|
)
|
||||||
errors.append(
|
errors.append(
|
||||||
f"Build name {build_name} does not belong to build reports"
|
f"Build name {name} does not match 'name' value '{build_config.name}'"
|
||||||
)
|
)
|
||||||
# And otherwise
|
# All build_report_config values should be in build_config.keys()
|
||||||
for build_report_name, build_names in self.builds_report_config.items():
|
for build_report_name, build_names in self.builds_report_config.items():
|
||||||
missed_names = [
|
missed_names = [
|
||||||
name for name in build_names if name not in self.build_config.keys()
|
name for name in build_names if name not in self.build_config.keys()
|
||||||
@ -216,7 +222,7 @@ CI_CONFIG = CiConfig(
|
|||||||
),
|
),
|
||||||
"fuzzers": BuildConfig(
|
"fuzzers": BuildConfig(
|
||||||
name="fuzzers",
|
name="fuzzers",
|
||||||
compiler="clang-16",
|
compiler="clang-17",
|
||||||
package_type="fuzzers",
|
package_type="fuzzers",
|
||||||
),
|
),
|
||||||
},
|
},
|
||||||
|
@ -1,150 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
|
|
||||||
|
|
||||||
import logging
|
|
||||||
import os
|
|
||||||
from pathlib import Path
|
|
||||||
|
|
||||||
from github import Github
|
|
||||||
|
|
||||||
from commit_status_helper import get_commit, post_commit_status
|
|
||||||
from docker_pull_helper import get_image_with_version, DockerImage
|
|
||||||
from env_helper import (
|
|
||||||
IMAGES_PATH,
|
|
||||||
REPO_COPY,
|
|
||||||
S3_DOWNLOAD,
|
|
||||||
S3_BUILDS_BUCKET,
|
|
||||||
S3_TEST_REPORTS_BUCKET,
|
|
||||||
TEMP_PATH,
|
|
||||||
)
|
|
||||||
from get_robot_token import get_best_robot_token
|
|
||||||
from pr_info import PRInfo
|
|
||||||
from report import TestResult
|
|
||||||
from s3_helper import S3Helper
|
|
||||||
from stopwatch import Stopwatch
|
|
||||||
from tee_popen import TeePopen
|
|
||||||
from upload_result_helper import upload_results
|
|
||||||
|
|
||||||
NAME = "Woboq Build"
|
|
||||||
|
|
||||||
|
|
||||||
def get_run_command(
|
|
||||||
repo_path: Path, output_path: Path, image: DockerImage, sha: str
|
|
||||||
) -> str:
|
|
||||||
user = f"{os.geteuid()}:{os.getegid()}"
|
|
||||||
cmd = (
|
|
||||||
f"docker run --rm --user={user} --volume={repo_path}:/build "
|
|
||||||
f"--volume={output_path}:/workdir/output --network=host "
|
|
||||||
# use sccache, https://github.com/KDAB/codebrowser/issues/111
|
|
||||||
f"-e SCCACHE_BUCKET='{S3_BUILDS_BUCKET}' "
|
|
||||||
"-e SCCACHE_S3_KEY_PREFIX=ccache/sccache "
|
|
||||||
'-e CMAKE_FLAGS="$CMAKE_FLAGS -DCOMPILER_CACHE=sccache" '
|
|
||||||
f"-e 'DATA={S3_DOWNLOAD}/{S3_TEST_REPORTS_BUCKET}/codebrowser/data' "
|
|
||||||
f"-e SHA={sha} {image}"
|
|
||||||
)
|
|
||||||
return cmd
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
|
||||||
logging.basicConfig(level=logging.INFO)
|
|
||||||
|
|
||||||
stopwatch = Stopwatch()
|
|
||||||
|
|
||||||
gh = Github(get_best_robot_token(), per_page=100)
|
|
||||||
pr_info = PRInfo()
|
|
||||||
commit = get_commit(gh, pr_info.sha)
|
|
||||||
temp_path = Path(TEMP_PATH)
|
|
||||||
|
|
||||||
if not temp_path.exists():
|
|
||||||
os.makedirs(temp_path)
|
|
||||||
|
|
||||||
docker_image = get_image_with_version(IMAGES_PATH, "clickhouse/codebrowser")
|
|
||||||
# FIXME: the codebrowser is broken with clang-16, workaround with clang-15
|
|
||||||
# See https://github.com/ClickHouse/ClickHouse/issues/50077
|
|
||||||
docker_image.version = "49701-4dcdcf4c11b5604f1c5d3121c9c6fea3e957b605"
|
|
||||||
s3_helper = S3Helper()
|
|
||||||
|
|
||||||
result_path = temp_path / "result_path"
|
|
||||||
if not result_path.exists():
|
|
||||||
os.makedirs(result_path)
|
|
||||||
|
|
||||||
run_command = get_run_command(
|
|
||||||
Path(REPO_COPY), result_path, docker_image, pr_info.sha[:12]
|
|
||||||
)
|
|
||||||
|
|
||||||
logging.info("Going to run codebrowser: %s", run_command)
|
|
||||||
|
|
||||||
run_log_path = result_path / "run.log"
|
|
||||||
|
|
||||||
state = "success"
|
|
||||||
with TeePopen(run_command, run_log_path) as process:
|
|
||||||
retcode = process.wait()
|
|
||||||
if retcode == 0:
|
|
||||||
logging.info("Run successfully")
|
|
||||||
else:
|
|
||||||
logging.info("Run failed")
|
|
||||||
state = "failure"
|
|
||||||
|
|
||||||
report_path = result_path / "html_report"
|
|
||||||
logging.info("Report path %s", report_path)
|
|
||||||
|
|
||||||
s3_path_prefix = "codebrowser"
|
|
||||||
index_template = (
|
|
||||||
f'<a href="{S3_DOWNLOAD}/{S3_TEST_REPORTS_BUCKET}/{s3_path_prefix}/index.html">'
|
|
||||||
"{}</a>"
|
|
||||||
)
|
|
||||||
additional_logs = [path.absolute() for path in result_path.glob("*.log")]
|
|
||||||
test_results = [
|
|
||||||
TestResult(
|
|
||||||
index_template.format("Generate codebrowser site"),
|
|
||||||
state,
|
|
||||||
stopwatch.duration_seconds,
|
|
||||||
additional_logs,
|
|
||||||
)
|
|
||||||
]
|
|
||||||
|
|
||||||
if state == "success":
|
|
||||||
stopwatch.reset()
|
|
||||||
_ = s3_helper.fast_parallel_upload_dir(
|
|
||||||
report_path, s3_path_prefix, S3_TEST_REPORTS_BUCKET
|
|
||||||
)
|
|
||||||
test_results.append(
|
|
||||||
TestResult(
|
|
||||||
index_template.format("Upload codebrowser site"),
|
|
||||||
state,
|
|
||||||
stopwatch.duration_seconds,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
# Check if the run log contains `FATAL Error:`, that means the code problem
|
|
||||||
stopwatch.reset()
|
|
||||||
fatal_error = "FATAL Error:"
|
|
||||||
logging.info("Search for '%s' in %s", fatal_error, run_log_path)
|
|
||||||
with open(run_log_path, "r", encoding="utf-8") as rlfd:
|
|
||||||
for line in rlfd.readlines():
|
|
||||||
if "FATAL Error:" in line:
|
|
||||||
logging.warning(
|
|
||||||
"The line '%s' found, mark the run as failure", fatal_error
|
|
||||||
)
|
|
||||||
state = "failure"
|
|
||||||
test_results.append(
|
|
||||||
TestResult(
|
|
||||||
"Indexing error",
|
|
||||||
state,
|
|
||||||
stopwatch.duration_seconds,
|
|
||||||
additional_logs,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
break
|
|
||||||
|
|
||||||
report_url = upload_results(
|
|
||||||
s3_helper, pr_info.number, pr_info.sha, test_results, [], NAME
|
|
||||||
)
|
|
||||||
|
|
||||||
print(f"::notice ::Report url: {report_url}")
|
|
||||||
|
|
||||||
post_commit_status(commit, state, report_url, "Report built", NAME, pr_info)
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
main()
|
|
@ -17,7 +17,7 @@ from commit_status_helper import (
|
|||||||
update_mergeable_check,
|
update_mergeable_check,
|
||||||
)
|
)
|
||||||
from docker_pull_helper import get_image_with_version
|
from docker_pull_helper import get_image_with_version
|
||||||
from env_helper import TEMP_PATH, REPO_COPY
|
from env_helper import TEMP_PATH, REPO_COPY, REPORTS_PATH
|
||||||
from get_robot_token import get_best_robot_token
|
from get_robot_token import get_best_robot_token
|
||||||
from pr_info import PRInfo
|
from pr_info import PRInfo
|
||||||
from report import TestResults, TestResult
|
from report import TestResults, TestResult
|
||||||
@ -57,6 +57,8 @@ def main():
|
|||||||
|
|
||||||
temp_path = Path(TEMP_PATH)
|
temp_path = Path(TEMP_PATH)
|
||||||
temp_path.mkdir(parents=True, exist_ok=True)
|
temp_path.mkdir(parents=True, exist_ok=True)
|
||||||
|
reports_path = Path(REPORTS_PATH)
|
||||||
|
reports_path.mkdir(parents=True, exist_ok=True)
|
||||||
repo_path = Path(REPO_COPY)
|
repo_path = Path(REPO_COPY)
|
||||||
|
|
||||||
pr_info = PRInfo(need_changed_files=True)
|
pr_info = PRInfo(need_changed_files=True)
|
||||||
@ -82,7 +84,7 @@ def main():
|
|||||||
elif args.force:
|
elif args.force:
|
||||||
logging.info("Check the docs because of force flag")
|
logging.info("Check the docs because of force flag")
|
||||||
|
|
||||||
docker_image = get_image_with_version(temp_path, "clickhouse/docs-builder")
|
docker_image = get_image_with_version(reports_path, "clickhouse/docs-builder")
|
||||||
|
|
||||||
test_output = temp_path / "docs_check_log"
|
test_output = temp_path / "docs_check_log"
|
||||||
test_output.mkdir(parents=True, exist_ok=True)
|
test_output.mkdir(parents=True, exist_ok=True)
|
||||||
|
@ -24,7 +24,7 @@ GITHUB_WORKSPACE = os.getenv("GITHUB_WORKSPACE", git_root)
|
|||||||
GITHUB_RUN_URL = f"{GITHUB_SERVER_URL}/{GITHUB_REPOSITORY}/actions/runs/{GITHUB_RUN_ID}"
|
GITHUB_RUN_URL = f"{GITHUB_SERVER_URL}/{GITHUB_REPOSITORY}/actions/runs/{GITHUB_RUN_ID}"
|
||||||
IMAGES_PATH = os.getenv("IMAGES_PATH", TEMP_PATH)
|
IMAGES_PATH = os.getenv("IMAGES_PATH", TEMP_PATH)
|
||||||
REPORTS_PATH = os.getenv("REPORTS_PATH", p.abspath(p.join(module_dir, "./reports")))
|
REPORTS_PATH = os.getenv("REPORTS_PATH", p.abspath(p.join(module_dir, "./reports")))
|
||||||
REPO_COPY = os.getenv("REPO_COPY", git_root)
|
REPO_COPY = os.getenv("REPO_COPY", GITHUB_WORKSPACE)
|
||||||
RUNNER_TEMP = os.getenv("RUNNER_TEMP", p.abspath(p.join(module_dir, "./tmp")))
|
RUNNER_TEMP = os.getenv("RUNNER_TEMP", p.abspath(p.join(module_dir, "./tmp")))
|
||||||
S3_BUILDS_BUCKET = os.getenv("S3_BUILDS_BUCKET", "clickhouse-builds")
|
S3_BUILDS_BUCKET = os.getenv("S3_BUILDS_BUCKET", "clickhouse-builds")
|
||||||
S3_TEST_REPORTS_BUCKET = os.getenv("S3_TEST_REPORTS_BUCKET", "clickhouse-test-reports")
|
S3_TEST_REPORTS_BUCKET = os.getenv("S3_TEST_REPORTS_BUCKET", "clickhouse-test-reports")
|
||||||
|
@ -24,7 +24,7 @@ from commit_status_helper import (
|
|||||||
format_description,
|
format_description,
|
||||||
)
|
)
|
||||||
from docker_pull_helper import get_image_with_version, DockerImage
|
from docker_pull_helper import get_image_with_version, DockerImage
|
||||||
from env_helper import S3_BUILDS_BUCKET, TEMP_PATH, REPO_COPY
|
from env_helper import S3_BUILDS_BUCKET, TEMP_PATH, REPO_COPY, REPORTS_PATH
|
||||||
from get_robot_token import get_best_robot_token
|
from get_robot_token import get_best_robot_token
|
||||||
from pr_info import FORCE_TESTS_LABEL, PRInfo
|
from pr_info import FORCE_TESTS_LABEL, PRInfo
|
||||||
from report import TestResult, TestResults, read_test_results
|
from report import TestResult, TestResults, read_test_results
|
||||||
@ -117,8 +117,9 @@ def main():
|
|||||||
args = parse_args()
|
args = parse_args()
|
||||||
|
|
||||||
temp_path = Path(TEMP_PATH)
|
temp_path = Path(TEMP_PATH)
|
||||||
|
|
||||||
temp_path.mkdir(parents=True, exist_ok=True)
|
temp_path.mkdir(parents=True, exist_ok=True)
|
||||||
|
reports_path = Path(REPORTS_PATH)
|
||||||
|
reports_path.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
pr_info = PRInfo()
|
pr_info = PRInfo()
|
||||||
|
|
||||||
@ -135,7 +136,7 @@ def main():
|
|||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
sys.exit(0)
|
sys.exit(0)
|
||||||
|
|
||||||
docker_image = get_image_with_version(temp_path, "clickhouse/fasttest")
|
docker_image = get_image_with_version(reports_path, "clickhouse/fasttest")
|
||||||
|
|
||||||
s3_helper = S3Helper()
|
s3_helper = S3Helper()
|
||||||
|
|
||||||
|
@ -21,7 +21,7 @@ from commit_status_helper import (
|
|||||||
update_mergeable_check,
|
update_mergeable_check,
|
||||||
)
|
)
|
||||||
from docker_pull_helper import get_image_with_version
|
from docker_pull_helper import get_image_with_version
|
||||||
from env_helper import GITHUB_WORKSPACE, TEMP_PATH
|
from env_helper import REPO_COPY, REPORTS_PATH, TEMP_PATH
|
||||||
from get_robot_token import get_best_robot_token
|
from get_robot_token import get_best_robot_token
|
||||||
from github_helper import GitHub
|
from github_helper import GitHub
|
||||||
from git_helper import git_runner
|
from git_helper import git_runner
|
||||||
@ -139,9 +139,11 @@ def main():
|
|||||||
|
|
||||||
stopwatch = Stopwatch()
|
stopwatch = Stopwatch()
|
||||||
|
|
||||||
repo_path = Path(GITHUB_WORKSPACE)
|
repo_path = Path(REPO_COPY)
|
||||||
temp_path = Path(TEMP_PATH)
|
temp_path = Path(TEMP_PATH)
|
||||||
temp_path.mkdir(parents=True, exist_ok=True)
|
temp_path.mkdir(parents=True, exist_ok=True)
|
||||||
|
reports_path = Path(REPORTS_PATH)
|
||||||
|
reports_path.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
pr_info = PRInfo()
|
pr_info = PRInfo()
|
||||||
if args.push:
|
if args.push:
|
||||||
@ -161,7 +163,7 @@ def main():
|
|||||||
code = int(state != "success")
|
code = int(state != "success")
|
||||||
sys.exit(code)
|
sys.exit(code)
|
||||||
|
|
||||||
docker_image = get_image_with_version(temp_path, "clickhouse/style-test")
|
docker_image = get_image_with_version(reports_path, "clickhouse/style-test")
|
||||||
s3_helper = S3Helper()
|
s3_helper = S3Helper()
|
||||||
|
|
||||||
cmd = (
|
cmd = (
|
||||||
|
15
tests/ci/test_ci_config.py
Normal file
15
tests/ci/test_ci_config.py
Normal file
@ -0,0 +1,15 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
import unittest
|
||||||
|
|
||||||
|
|
||||||
|
class TestCiConfig(unittest.TestCase):
|
||||||
|
def test_no_errors_in_ci_config(self):
|
||||||
|
raised = None
|
||||||
|
try:
|
||||||
|
from ci_config import ( # pylint: disable=import-outside-toplevel
|
||||||
|
CI_CONFIG as _,
|
||||||
|
)
|
||||||
|
except Exception as exc:
|
||||||
|
raised = exc
|
||||||
|
self.assertIsNone(raised, f"CI_CONFIG import raised error {raised}")
|
@ -2,8 +2,14 @@
|
|||||||
|
|
||||||
set -e
|
set -e
|
||||||
|
|
||||||
|
WORKING_DIR=$(dirname "$0")
|
||||||
|
cd "$WORKING_DIR"
|
||||||
|
|
||||||
GIT_ROOT=$(git rev-parse --show-cdup)
|
GIT_ROOT=$(git rev-parse --show-cdup)
|
||||||
GIT_ROOT=${GIT_ROOT:-.}
|
GIT_ROOT=${GIT_ROOT:-../../}
|
||||||
act --list --directory="$GIT_ROOT" 1>/dev/null 2>&1 || act --list --directory="$GIT_ROOT" 2>&1
|
act --list --directory="$GIT_ROOT" 1>/dev/null 2>&1 || act --list --directory="$GIT_ROOT" 2>&1
|
||||||
|
|
||||||
actionlint -ignore 'reusable workflow call.+' || :
|
actionlint -ignore 'reusable workflow call.+' || :
|
||||||
|
|
||||||
|
|
||||||
|
python3 check_reusable_workflows.py
|
||||||
|
53
utils/check-style/check_reusable_workflows.py
Normal file
53
utils/check-style/check_reusable_workflows.py
Normal file
@ -0,0 +1,53 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Dict, Iterable, List
|
||||||
|
import yaml
|
||||||
|
|
||||||
|
git_root = Path(__file__).absolute().parents[2]
|
||||||
|
|
||||||
|
|
||||||
|
def check_workflows(paths: Iterable[Path]) -> List[str]:
|
||||||
|
outputs = [] # type: List[str]
|
||||||
|
for path in paths:
|
||||||
|
workflow_object = yaml.safe_load(path.read_bytes())
|
||||||
|
workflow_object["file---name"] = path.name
|
||||||
|
outputs.extend(check_name_override(workflow_object))
|
||||||
|
|
||||||
|
return outputs
|
||||||
|
|
||||||
|
|
||||||
|
def check_name_override(workflow_object: dict) -> List[str]:
|
||||||
|
outputs = [] # type: List[str]
|
||||||
|
workflow_file = workflow_object.get("file---name", "") # type: str
|
||||||
|
jobs = workflow_object.get("jobs", {}) # type: Dict[str, dict]
|
||||||
|
for name, obj in jobs.items():
|
||||||
|
header = f"Workflow '{workflow_file}': Job '{name}': "
|
||||||
|
name_overriden = obj.get("name", "")
|
||||||
|
env_name_overriden = obj.get("env", {}).get("GITHUB_JOB_OVERRIDDEN", "")
|
||||||
|
if name_overriden or env_name_overriden:
|
||||||
|
if not (name_overriden and env_name_overriden):
|
||||||
|
outputs.append(
|
||||||
|
f"{header}job has one of 'name' and 'env.GITHUB_JOB_OVERRIDDEN', "
|
||||||
|
"but not both"
|
||||||
|
)
|
||||||
|
elif name_overriden != env_name_overriden:
|
||||||
|
outputs.append(
|
||||||
|
f"{header}value of 'name' and 'env.GITHUB_JOB_OVERRIDDEN' are not "
|
||||||
|
f"equal. name={name_overriden}; "
|
||||||
|
f"env.GITHUB_JOB_OVERRIDDEN={env_name_overriden}"
|
||||||
|
)
|
||||||
|
return outputs
|
||||||
|
|
||||||
|
|
||||||
|
def main() -> None:
|
||||||
|
reusable_workflow_paths = git_root.glob(".github/workflows/reusable_*.y*ml")
|
||||||
|
outputs = check_workflows(reusable_workflow_paths)
|
||||||
|
if outputs:
|
||||||
|
print("Found next issues for workflows:")
|
||||||
|
for o in outputs:
|
||||||
|
print(o)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
Loading…
Reference in New Issue
Block a user