mirror of
https://github.com/ClickHouse/ClickHouse.git
synced 2024-11-25 00:52:02 +00:00
Merge pull request #65045 from ClickHouse/ci_config_refactoring
CI: ci_config refactoring
This commit is contained in:
commit
abb88e4d60
3
.github/PULL_REQUEST_TEMPLATE.md
vendored
3
.github/PULL_REQUEST_TEMPLATE.md
vendored
@ -48,8 +48,7 @@ At a minimum, the following information should be added (but add more as needed)
|
||||
- [ ] <!---ci_include_stateful--> Allow: Stateful tests
|
||||
- [ ] <!---ci_include_integration--> Allow: Integration Tests
|
||||
- [ ] <!---ci_include_performance--> Allow: Performance tests
|
||||
- [ ] <!---ci_set_normal_builds--> Allow: Normal Builds
|
||||
- [ ] <!---ci_set_special_builds--> Allow: Special Builds
|
||||
- [ ] <!---ci_set_builds--> Allow: All Builds
|
||||
- [ ] <!---ci_set_non_required--> Allow: All NOT Required Checks
|
||||
- [ ] <!---batch_0_1--> Allow: batch 1, 2 for multi-batch jobs
|
||||
- [ ] <!---batch_2_3--> Allow: batch 3, 4, 5, 6 for multi-batch jobs
|
||||
|
6
.github/workflows/backport_branches.yml
vendored
6
.github/workflows/backport_branches.yml
vendored
@ -70,7 +70,7 @@ jobs:
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
uses: ./.github/workflows/reusable_test.yml
|
||||
with:
|
||||
test_name: Compatibility check (amd64)
|
||||
test_name: Compatibility check (release)
|
||||
runner_type: style-checker
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
CompatibilityCheckAarch64:
|
||||
@ -194,7 +194,7 @@ jobs:
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
uses: ./.github/workflows/reusable_test.yml
|
||||
with:
|
||||
test_name: Install packages (amd64)
|
||||
test_name: Install packages (release)
|
||||
runner_type: style-checker
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
run_command: |
|
||||
@ -204,7 +204,7 @@ jobs:
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
uses: ./.github/workflows/reusable_test.yml
|
||||
with:
|
||||
test_name: Install packages (arm64)
|
||||
test_name: Install packages (aarch64)
|
||||
runner_type: style-checker-aarch64
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
run_command: |
|
||||
|
17
.github/workflows/master.yml
vendored
17
.github/workflows/master.yml
vendored
@ -115,25 +115,16 @@ jobs:
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
|
||||
################################# Reports #################################
|
||||
# Reports should be run even if Builds_1/2 failed - put them separately in wf (not in Tests_1/2)
|
||||
Builds_1_Report:
|
||||
# Reports should run even if Builds_1/2 fail - run them separately, not in Tests_1/2/3
|
||||
Builds_Report:
|
||||
# run report check for failed builds to indicate the CI error
|
||||
if: ${{ !cancelled() && needs.RunConfig.result == 'success' && contains(fromJson(needs.RunConfig.outputs.data).jobs_data.jobs_to_do, 'ClickHouse build check') }}
|
||||
needs: [RunConfig, Builds_1]
|
||||
needs: [RunConfig, Builds_1, Builds_2]
|
||||
uses: ./.github/workflows/reusable_test.yml
|
||||
with:
|
||||
test_name: ClickHouse build check
|
||||
runner_type: style-checker-aarch64
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
Builds_2_Report:
|
||||
# run report check for failed builds to indicate the CI error
|
||||
if: ${{ !cancelled() && needs.RunConfig.result == 'success' && contains(fromJson(needs.RunConfig.outputs.data).jobs_data.jobs_to_do, 'ClickHouse special build check') }}
|
||||
needs: [RunConfig, Builds_2]
|
||||
uses: ./.github/workflows/reusable_test.yml
|
||||
with:
|
||||
test_name: ClickHouse special build check
|
||||
runner_type: style-checker-aarch64
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
|
||||
MarkReleaseReady:
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
@ -165,7 +156,7 @@ jobs:
|
||||
|
||||
FinishCheck:
|
||||
if: ${{ !cancelled() }}
|
||||
needs: [RunConfig, Builds_1, Builds_2, Builds_1_Report, Builds_2_Report, Tests_1, Tests_2, Tests_3]
|
||||
needs: [RunConfig, Builds_1, Builds_2, Builds_Report, Tests_1, Tests_2, Tests_3]
|
||||
runs-on: [self-hosted, style-checker-aarch64]
|
||||
steps:
|
||||
- name: Check out repository code
|
||||
|
21
.github/workflows/pull_request.yml
vendored
21
.github/workflows/pull_request.yml
vendored
@ -143,29 +143,20 @@ jobs:
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
|
||||
################################# Reports #################################
|
||||
# Reports should by run even if Builds_1/2 fail, so put them separately in wf (not in Tests_1/2)
|
||||
Builds_1_Report:
|
||||
# Reports should run even if Builds_1/2 fail - run them separately (not in Tests_1/2/3)
|
||||
Builds_Report:
|
||||
# run report check for failed builds to indicate the CI error
|
||||
if: ${{ !cancelled() && needs.StyleCheck.result == 'success' && contains(fromJson(needs.RunConfig.outputs.data).jobs_data.jobs_to_do, 'ClickHouse build check') }}
|
||||
needs: [RunConfig, StyleCheck, Builds_1]
|
||||
if: ${{ !cancelled() && needs.RunConfig.result == 'success' && contains(fromJson(needs.RunConfig.outputs.data).jobs_data.jobs_to_do, 'ClickHouse build check') }}
|
||||
needs: [RunConfig, StyleCheck, Builds_1, Builds_2]
|
||||
uses: ./.github/workflows/reusable_test.yml
|
||||
with:
|
||||
test_name: ClickHouse build check
|
||||
runner_type: style-checker-aarch64
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
Builds_2_Report:
|
||||
# run report check for failed builds to indicate the CI error
|
||||
if: ${{ !cancelled() && needs.StyleCheck.result == 'success' && contains(fromJson(needs.RunConfig.outputs.data).jobs_data.jobs_to_do, 'ClickHouse special build check') }}
|
||||
needs: [RunConfig, StyleCheck, Builds_2]
|
||||
uses: ./.github/workflows/reusable_test.yml
|
||||
with:
|
||||
test_name: ClickHouse special build check
|
||||
runner_type: style-checker-aarch64
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
|
||||
CheckReadyForMerge:
|
||||
if: ${{ !cancelled() && needs.StyleCheck.result == 'success' }}
|
||||
needs: [RunConfig, BuildDockers, StyleCheck, FastTest, Builds_1, Builds_2, Builds_1_Report, Builds_2_Report, Tests_1, Tests_2]
|
||||
needs: [RunConfig, BuildDockers, StyleCheck, FastTest, Builds_1, Builds_2, Builds_Report, Tests_1, Tests_2]
|
||||
runs-on: [self-hosted, style-checker-aarch64]
|
||||
steps:
|
||||
- name: Check out repository code
|
||||
@ -181,7 +172,7 @@ jobs:
|
||||
#
|
||||
FinishCheck:
|
||||
if: ${{ !cancelled() }}
|
||||
needs: [RunConfig, BuildDockers, StyleCheck, FastTest, Builds_1, Builds_2, Builds_1_Report, Builds_2_Report, Tests_1, Tests_2, Tests_3]
|
||||
needs: [RunConfig, BuildDockers, StyleCheck, FastTest, Builds_1, Builds_2, Builds_Report, Tests_1, Tests_2, Tests_3]
|
||||
runs-on: [self-hosted, style-checker-aarch64]
|
||||
steps:
|
||||
- name: Check out repository code
|
||||
|
6
.github/workflows/release_branches.yml
vendored
6
.github/workflows/release_branches.yml
vendored
@ -65,7 +65,7 @@ jobs:
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
uses: ./.github/workflows/reusable_test.yml
|
||||
with:
|
||||
test_name: Compatibility check (amd64)
|
||||
test_name: Compatibility check (release)
|
||||
runner_type: style-checker
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
CompatibilityCheckAarch64:
|
||||
@ -244,7 +244,7 @@ jobs:
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
uses: ./.github/workflows/reusable_test.yml
|
||||
with:
|
||||
test_name: Install packages (amd64)
|
||||
test_name: Install packages (release)
|
||||
runner_type: style-checker
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
run_command: |
|
||||
@ -254,7 +254,7 @@ jobs:
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
uses: ./.github/workflows/reusable_test.yml
|
||||
with:
|
||||
test_name: Install packages (arm64)
|
||||
test_name: Install packages (aarch64)
|
||||
runner_type: style-checker-aarch64
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
run_command: |
|
||||
|
@ -15,7 +15,7 @@ from github.Commit import Commit
|
||||
from build_download_helper import download_build_with_progress
|
||||
from commit_status_helper import post_commit_status
|
||||
from compress_files import SUFFIX, compress_fast, decompress_fast
|
||||
from env_helper import CI, RUNNER_TEMP, S3_BUILDS_BUCKET
|
||||
from env_helper import IS_CI, RUNNER_TEMP, S3_BUILDS_BUCKET
|
||||
from git_helper import SHA_REGEXP
|
||||
from report import FOOTER_HTML_TEMPLATE, HEAD_HTML_TEMPLATE, SUCCESS
|
||||
from s3_helper import S3Helper
|
||||
@ -131,7 +131,7 @@ class ArtifactsHelper:
|
||||
post_commit_status(commit, SUCCESS, url, "Artifacts for workflow", "Artifacts")
|
||||
|
||||
def _regenerate_index(self) -> None:
|
||||
if CI:
|
||||
if IS_CI:
|
||||
files = self._get_s3_objects()
|
||||
else:
|
||||
files = self._get_local_s3_objects()
|
||||
|
@ -6,7 +6,7 @@ import subprocess
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
from build_download_helper import get_build_name_for_check, read_build_urls
|
||||
from build_download_helper import read_build_urls
|
||||
from clickhouse_helper import CiLogsCredentials
|
||||
from docker_images_helper import DockerImage, get_docker_image, pull_image
|
||||
from env_helper import REPORT_PATH, TEMP_PATH
|
||||
@ -14,6 +14,7 @@ from pr_info import PRInfo
|
||||
from report import FAIL, FAILURE, OK, SUCCESS, JobReport, TestResult
|
||||
from stopwatch import Stopwatch
|
||||
from tee_popen import TeePopen
|
||||
from ci_config import CI
|
||||
|
||||
IMAGE_NAME = "clickhouse/fuzzer"
|
||||
|
||||
@ -64,7 +65,7 @@ def main():
|
||||
|
||||
docker_image = pull_image(get_docker_image(IMAGE_NAME))
|
||||
|
||||
build_name = get_build_name_for_check(check_name)
|
||||
build_name = CI.get_required_build_name(check_name)
|
||||
urls = read_build_urls(build_name, reports_path)
|
||||
if not urls:
|
||||
raise ValueError("No build URLs found")
|
||||
|
@ -7,7 +7,7 @@ import sys
|
||||
from pathlib import Path
|
||||
from typing import List, Sequence, Tuple
|
||||
|
||||
from ci_config import JobNames
|
||||
from ci_config import CI
|
||||
from ci_utils import normalize_string
|
||||
from env_helper import TEMP_PATH
|
||||
from functional_test_check import NO_CHANGES_MSG
|
||||
@ -92,16 +92,19 @@ def main():
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
# args = parse_args()
|
||||
stopwatch = Stopwatch()
|
||||
jobs_to_validate = [JobNames.STATELESS_TEST_RELEASE, JobNames.INTEGRATION_TEST]
|
||||
jobs_to_validate = [
|
||||
CI.JobNames.STATELESS_TEST_RELEASE,
|
||||
CI.JobNames.INTEGRATION_TEST,
|
||||
]
|
||||
functional_job_report_file = Path(TEMP_PATH) / "functional_test_job_report.json"
|
||||
integration_job_report_file = Path(TEMP_PATH) / "integration_test_job_report.json"
|
||||
jobs_report_files = {
|
||||
JobNames.STATELESS_TEST_RELEASE: functional_job_report_file,
|
||||
JobNames.INTEGRATION_TEST: integration_job_report_file,
|
||||
CI.JobNames.STATELESS_TEST_RELEASE: functional_job_report_file,
|
||||
CI.JobNames.INTEGRATION_TEST: integration_job_report_file,
|
||||
}
|
||||
jobs_scripts = {
|
||||
JobNames.STATELESS_TEST_RELEASE: "functional_test_check.py",
|
||||
JobNames.INTEGRATION_TEST: "integration_test_check.py",
|
||||
CI.JobNames.STATELESS_TEST_RELEASE: "functional_test_check.py",
|
||||
CI.JobNames.INTEGRATION_TEST: "integration_test_check.py",
|
||||
}
|
||||
|
||||
for test_job in jobs_to_validate:
|
||||
|
@ -9,7 +9,7 @@ from pathlib import Path
|
||||
from typing import Tuple
|
||||
|
||||
import docker_images_helper
|
||||
from ci_config import CI_CONFIG, BuildConfig
|
||||
from ci_config import CI
|
||||
from env_helper import REPO_COPY, S3_BUILDS_BUCKET, TEMP_PATH
|
||||
from git_helper import Git
|
||||
from lambda_shared_package.lambda_shared.pr import Labels
|
||||
@ -27,7 +27,7 @@ IMAGE_NAME = "clickhouse/binary-builder"
|
||||
BUILD_LOG_NAME = "build_log.log"
|
||||
|
||||
|
||||
def _can_export_binaries(build_config: BuildConfig) -> bool:
|
||||
def _can_export_binaries(build_config: CI.BuildConfig) -> bool:
|
||||
if build_config.package_type != "deb":
|
||||
return False
|
||||
if build_config.sanitizer != "":
|
||||
@ -38,7 +38,7 @@ def _can_export_binaries(build_config: BuildConfig) -> bool:
|
||||
|
||||
|
||||
def get_packager_cmd(
|
||||
build_config: BuildConfig,
|
||||
build_config: CI.BuildConfig,
|
||||
packager_path: Path,
|
||||
output_path: Path,
|
||||
build_version: str,
|
||||
@ -147,7 +147,8 @@ def main():
|
||||
stopwatch = Stopwatch()
|
||||
build_name = args.build_name
|
||||
|
||||
build_config = CI_CONFIG.build_config[build_name]
|
||||
build_config = CI.JOB_CONFIGS[build_name].build_config
|
||||
assert build_config
|
||||
|
||||
temp_path = Path(TEMP_PATH)
|
||||
temp_path.mkdir(parents=True, exist_ok=True)
|
||||
|
@ -10,7 +10,7 @@ from typing import Any, Callable, List, Optional, Union
|
||||
|
||||
import requests
|
||||
|
||||
from ci_config import CI_CONFIG
|
||||
from ci_config import CI
|
||||
|
||||
try:
|
||||
# A work around for scripts using this downloading module without required deps
|
||||
@ -122,10 +122,6 @@ def get_gh_api(
|
||||
raise APIException(f"Unable to request data from GH API: {url}") from exc
|
||||
|
||||
|
||||
def get_build_name_for_check(check_name: str) -> str:
|
||||
return CI_CONFIG.test_configs[check_name].required_build
|
||||
|
||||
|
||||
def read_build_urls(build_name: str, reports_path: Union[Path, str]) -> List[str]:
|
||||
for root, _, files in os.walk(reports_path):
|
||||
for file in files:
|
||||
@ -210,7 +206,7 @@ def download_builds_filter(
|
||||
result_path: Path,
|
||||
filter_fn: Callable[[str], bool] = lambda _: True,
|
||||
) -> None:
|
||||
build_name = get_build_name_for_check(check_name)
|
||||
build_name = CI.get_required_build_name(check_name)
|
||||
urls = read_build_urls(build_name, reports_path)
|
||||
logger.info("The build report for %s contains the next URLs: %s", build_name, urls)
|
||||
|
||||
@ -247,7 +243,7 @@ def download_clickhouse_binary(
|
||||
def get_clickhouse_binary_url(
|
||||
check_name: str, reports_path: Union[Path, str]
|
||||
) -> Optional[str]:
|
||||
build_name = get_build_name_for_check(check_name)
|
||||
build_name = CI.get_required_build_name(check_name)
|
||||
urls = read_build_urls(build_name, reports_path)
|
||||
logger.info("The build report for %s contains the next URLs: %s", build_name, urls)
|
||||
for url in urls:
|
||||
|
@ -1,4 +1,5 @@
|
||||
#!/usr/bin/env python3
|
||||
import argparse
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
@ -6,7 +7,6 @@ import sys
|
||||
from pathlib import Path
|
||||
from typing import List
|
||||
|
||||
from ci_config import CI_CONFIG, Build
|
||||
from env_helper import (
|
||||
GITHUB_JOB_URL,
|
||||
GITHUB_REPOSITORY,
|
||||
@ -14,7 +14,7 @@ from env_helper import (
|
||||
REPORT_PATH,
|
||||
TEMP_PATH,
|
||||
CI_CONFIG_PATH,
|
||||
CI,
|
||||
IS_CI,
|
||||
)
|
||||
from pr_info import PRInfo
|
||||
from report import (
|
||||
@ -25,8 +25,10 @@ from report import (
|
||||
JobReport,
|
||||
create_build_html_report,
|
||||
get_worst_status,
|
||||
FAILURE,
|
||||
)
|
||||
from stopwatch import Stopwatch
|
||||
from ci_config import CI
|
||||
|
||||
# Old way to read the neads_data
|
||||
NEEDS_DATA_PATH = os.getenv("NEEDS_DATA_PATH", "")
|
||||
@ -46,16 +48,13 @@ def main():
|
||||
"\n ".join(p.as_posix() for p in reports_path.rglob("*.json")),
|
||||
)
|
||||
|
||||
build_check_name = sys.argv[1]
|
||||
build_check_name = CI.JobNames.BUILD_CHECK
|
||||
|
||||
pr_info = PRInfo()
|
||||
|
||||
builds_for_check = CI_CONFIG.get_builds_for_report(
|
||||
build_check_name,
|
||||
release=pr_info.is_release,
|
||||
backport=pr_info.head_ref.startswith("backport/"),
|
||||
)
|
||||
if CI:
|
||||
args = parse_args()
|
||||
|
||||
if (CI_CONFIG_PATH or IS_CI) and not args.reports:
|
||||
# In CI only specific builds might be manually selected, or some wf does not build all builds.
|
||||
# Filtering @builds_for_check to verify only builds that are present in the current CI workflow
|
||||
with open(CI_CONFIG_PATH, encoding="utf-8") as jfd:
|
||||
@ -64,8 +63,12 @@ def main():
|
||||
ci_config["jobs_data"]["jobs_to_skip"]
|
||||
+ ci_config["jobs_data"]["jobs_to_do"]
|
||||
)
|
||||
builds_for_check = [job for job in builds_for_check if job in all_ci_jobs]
|
||||
print(f"NOTE: following build reports will be accounted: [{builds_for_check}]")
|
||||
builds_for_check = [job for job in CI.BuildNames if job in all_ci_jobs]
|
||||
print(f"NOTE: following build reports will be checked: [{builds_for_check}]")
|
||||
else:
|
||||
builds_for_check = parse_args().reports
|
||||
for job in builds_for_check:
|
||||
assert job in CI.BuildNames, "Builds must be known build job names"
|
||||
|
||||
required_builds = len(builds_for_check)
|
||||
missing_builds = 0
|
||||
@ -77,8 +80,8 @@ def main():
|
||||
build_name, pr_info.number, pr_info.head_ref
|
||||
)
|
||||
if not build_result:
|
||||
if build_name == Build.FUZZERS:
|
||||
logging.info("Build [%s] is missing - skip", Build.FUZZERS)
|
||||
if build_name == CI.BuildNames.FUZZERS:
|
||||
logging.info("Build [%s] is missing - skip", CI.BuildNames.FUZZERS)
|
||||
continue
|
||||
logging.warning("Build results for %s is missing", build_name)
|
||||
build_result = BuildResult.missing_result("missing")
|
||||
@ -132,17 +135,16 @@ def main():
|
||||
# Check if there are no builds at all, do not override bad status
|
||||
if summary_status == SUCCESS:
|
||||
if missing_builds:
|
||||
summary_status = PENDING
|
||||
summary_status = FAILURE
|
||||
elif ok_groups == 0:
|
||||
summary_status = ERROR
|
||||
|
||||
addition = ""
|
||||
if missing_builds:
|
||||
addition = (
|
||||
f" ({required_builds - missing_builds} of {required_builds} builds are OK)"
|
||||
)
|
||||
description = ""
|
||||
|
||||
description = f"{ok_groups}/{total_groups} artifact groups are OK{addition}"
|
||||
if missing_builds:
|
||||
description = f"{missing_builds} of {required_builds} builds are missing."
|
||||
|
||||
description += f" {ok_groups}/{total_groups} artifact groups are OK"
|
||||
|
||||
JobReport(
|
||||
description=description,
|
||||
@ -158,5 +160,16 @@ def main():
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def parse_args():
|
||||
parser = argparse.ArgumentParser("Generates overall build report")
|
||||
|
||||
parser.add_argument(
|
||||
"--reports",
|
||||
nargs="+",
|
||||
help="List of build reports to check",
|
||||
)
|
||||
return parser.parse_args()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
@ -13,14 +13,7 @@ from typing import Any, Dict, List, Optional
|
||||
import docker_images_helper
|
||||
import upload_result_helper
|
||||
from build_check import get_release_or_pr
|
||||
from ci_config import (
|
||||
CI_CONFIG,
|
||||
Build,
|
||||
CILabels,
|
||||
CIStages,
|
||||
JobNames,
|
||||
StatusNames,
|
||||
)
|
||||
from ci_config import CI
|
||||
from ci_metadata import CiMetadata
|
||||
from ci_utils import GHActions, normalize_string
|
||||
from clickhouse_helper import (
|
||||
@ -38,10 +31,11 @@ from commit_status_helper import (
|
||||
get_commit,
|
||||
post_commit_status,
|
||||
set_status_comment,
|
||||
get_commit_filtered_statuses,
|
||||
)
|
||||
from digest_helper import DockerDigester
|
||||
from env_helper import (
|
||||
CI,
|
||||
IS_CI,
|
||||
GITHUB_JOB_API_URL,
|
||||
GITHUB_REPOSITORY,
|
||||
GITHUB_RUN_ID,
|
||||
@ -295,7 +289,7 @@ def _mark_success_action(
|
||||
batch: int,
|
||||
) -> None:
|
||||
ci_cache = CiCache(s3, indata["jobs_data"]["digests"])
|
||||
job_config = CI_CONFIG.get_job_config(job)
|
||||
job_config = CI.get_job_config(job)
|
||||
num_batches = job_config.num_batches
|
||||
# if batch is not provided - set to 0
|
||||
batch = 0 if batch == -1 else batch
|
||||
@ -305,7 +299,7 @@ def _mark_success_action(
|
||||
|
||||
# FIXME: find generic design for propagating and handling job status (e.g. stop using statuses in GH api)
|
||||
# now job ca be build job w/o status data, any other job that exit with 0 with or w/o status data
|
||||
if CI_CONFIG.is_build_job(job):
|
||||
if CI.is_build_job(job):
|
||||
# there is no CommitStatus for build jobs
|
||||
# create dummy status relying on JobReport
|
||||
# FIXME: consider creating commit status for build jobs too, to treat everything the same way
|
||||
@ -425,6 +419,7 @@ def _configure_jobs(
|
||||
pr_info: PRInfo,
|
||||
ci_settings: CiSettings,
|
||||
skip_jobs: bool,
|
||||
dry_run: bool = False,
|
||||
) -> CiCache:
|
||||
"""
|
||||
returns CICache instance with configured job's data
|
||||
@ -436,10 +431,11 @@ def _configure_jobs(
|
||||
|
||||
# get all jobs
|
||||
if not skip_jobs:
|
||||
job_configs = CI_CONFIG.get_workflow_jobs_with_configs(
|
||||
job_configs = CI.get_workflow_jobs_with_configs(
|
||||
is_mq=pr_info.is_merge_queue,
|
||||
is_docs_only=pr_info.has_changes_in_documentation_only(),
|
||||
is_master=pr_info.is_master,
|
||||
is_pr=pr_info.is_pr,
|
||||
)
|
||||
else:
|
||||
job_configs = {}
|
||||
@ -457,7 +453,8 @@ def _configure_jobs(
|
||||
ci_cache = CiCache.calc_digests_and_create(
|
||||
s3,
|
||||
job_configs,
|
||||
cache_enabled=not ci_settings.no_ci_cache and not skip_jobs and CI,
|
||||
cache_enabled=not ci_settings.no_ci_cache and not skip_jobs and IS_CI,
|
||||
dry_run=dry_run,
|
||||
)
|
||||
ci_cache.update()
|
||||
ci_cache.apply(job_configs, is_release=pr_info.is_release)
|
||||
@ -475,14 +472,14 @@ def _generate_ci_stage_config(jobs_data: Dict[str, Any]) -> Dict[str, Dict[str,
|
||||
result = {} # type: Dict[str, Any]
|
||||
stages_to_do = []
|
||||
for job in jobs_data:
|
||||
stage_type = CI_CONFIG.get_job_ci_stage(job)
|
||||
if stage_type == CIStages.NA:
|
||||
stage_type = CI.get_job_ci_stage(job)
|
||||
if stage_type == CI.WorkflowStages.NA:
|
||||
continue
|
||||
if stage_type not in result:
|
||||
result[stage_type] = []
|
||||
stages_to_do.append(stage_type)
|
||||
result[stage_type].append(
|
||||
{"job_name": job, "runner_type": CI_CONFIG.get_runner_type(job)}
|
||||
{"job_name": job, "runner_type": CI.JOB_CONFIGS[job].runner_type}
|
||||
)
|
||||
result["stages_to_do"] = stages_to_do
|
||||
return result
|
||||
@ -529,10 +526,10 @@ def _update_gh_statuses_action(indata: Dict, s3: S3Helper) -> None:
|
||||
if job not in jobs_to_skip and job not in jobs_to_do:
|
||||
# no need to create status for job that are not supposed to be executed
|
||||
continue
|
||||
if CI_CONFIG.is_build_job(job):
|
||||
if CI.is_build_job(job):
|
||||
# no GH status for build jobs
|
||||
continue
|
||||
job_config = CI_CONFIG.get_job_config(job)
|
||||
job_config = CI.get_job_config(job)
|
||||
if not job_config:
|
||||
# there might be a new job that does not exist on this branch - skip it
|
||||
continue
|
||||
@ -558,7 +555,7 @@ def _fetch_commit_tokens(message: str, pr_info: PRInfo) -> List[str]:
|
||||
res = [
|
||||
match
|
||||
for match in matches
|
||||
if match in CILabels or match.startswith("job_") or match.startswith("batch_")
|
||||
if match in CI.Tags or match.startswith("job_") or match.startswith("batch_")
|
||||
]
|
||||
print(f"CI modifiers from commit message: [{res}]")
|
||||
res_2 = []
|
||||
@ -567,7 +564,7 @@ def _fetch_commit_tokens(message: str, pr_info: PRInfo) -> List[str]:
|
||||
res_2 = [
|
||||
match
|
||||
for match in matches
|
||||
if match in CILabels
|
||||
if match in CI.Tags
|
||||
or match.startswith("job_")
|
||||
or match.startswith("batch_")
|
||||
]
|
||||
@ -643,7 +640,7 @@ def _upload_build_artifacts(
|
||||
print(f"Report file has been uploaded to [{report_url}]")
|
||||
|
||||
# Upload master head's binaries
|
||||
static_bin_name = CI_CONFIG.build_config[build_name].static_binary_name
|
||||
static_bin_name = CI.get_build_config(build_name).static_binary_name
|
||||
if pr_info.is_master and static_bin_name:
|
||||
# Full binary with debug info:
|
||||
s3_path_full = "/".join((pr_info.base_ref, static_bin_name, "clickhouse-full"))
|
||||
@ -838,15 +835,15 @@ def _add_build_to_version_history(
|
||||
|
||||
def _run_test(job_name: str, run_command: str) -> int:
|
||||
assert (
|
||||
run_command or CI_CONFIG.get_job_config(job_name).run_command
|
||||
run_command or CI.get_job_config(job_name).run_command
|
||||
), "Run command must be provided as input argument or be configured in job config"
|
||||
|
||||
env = os.environ.copy()
|
||||
timeout = CI_CONFIG.get_job_config(job_name).timeout or None
|
||||
timeout = CI.get_job_config(job_name).timeout or None
|
||||
|
||||
if not run_command:
|
||||
run_command = "/".join(
|
||||
(os.path.dirname(__file__), CI_CONFIG.get_job_config(job_name).run_command)
|
||||
(os.path.dirname(__file__), CI.get_job_config(job_name).run_command)
|
||||
)
|
||||
if ".py" in run_command and not run_command.startswith("python"):
|
||||
run_command = "python3 " + run_command
|
||||
@ -913,13 +910,23 @@ def _cancel_pr_wf(s3: S3Helper, pr_number: int, cancel_sync: bool = False) -> No
|
||||
def _set_pending_statuses(pr_info: PRInfo) -> None:
|
||||
commit = get_commit(GitHub(get_best_robot_token(), per_page=100), pr_info.sha)
|
||||
try:
|
||||
print("Set SYNC status to pending")
|
||||
commit.create_status(
|
||||
state=PENDING,
|
||||
target_url="",
|
||||
description="",
|
||||
context=StatusNames.SYNC,
|
||||
)
|
||||
found = False
|
||||
statuses = get_commit_filtered_statuses(commit)
|
||||
for commit_status in statuses:
|
||||
if commit_status.context == CI.StatusNames.SYNC:
|
||||
print(
|
||||
f"Sync status found [{commit_status.state}], [{commit_status.description}] - won't be overwritten"
|
||||
)
|
||||
found = True
|
||||
break
|
||||
if not found:
|
||||
print("Set Sync status to pending")
|
||||
commit.create_status(
|
||||
state=PENDING,
|
||||
target_url="",
|
||||
description=CI.SyncState.PENDING,
|
||||
context=CI.StatusNames.SYNC,
|
||||
)
|
||||
except Exception as ex:
|
||||
print(f"ERROR: failed to set GH commit status, ex: {ex}")
|
||||
|
||||
@ -952,7 +959,7 @@ def main() -> int:
|
||||
|
||||
### CONFIGURE action: start
|
||||
if args.configure:
|
||||
if CI and pr_info.is_pr:
|
||||
if IS_CI and pr_info.is_pr:
|
||||
# store meta on s3 (now we need it only for PRs)
|
||||
meta = CiMetadata(s3, pr_info.number, pr_info.head_ref)
|
||||
meta.run_id = int(GITHUB_RUN_ID)
|
||||
@ -962,7 +969,7 @@ def main() -> int:
|
||||
args.commit_message or None, update_from_api=True
|
||||
)
|
||||
|
||||
if ci_settings.no_merge_commit and CI:
|
||||
if ci_settings.no_merge_commit and IS_CI:
|
||||
git_runner.run(f"{GIT_PREFIX} checkout {pr_info.sha}")
|
||||
|
||||
git_ref = git_runner.run(f"{GIT_PREFIX} rev-parse HEAD")
|
||||
@ -985,18 +992,19 @@ def main() -> int:
|
||||
)
|
||||
ci_cache.print_status()
|
||||
|
||||
if CI and not pr_info.is_merge_queue:
|
||||
if IS_CI and not pr_info.is_merge_queue:
|
||||
# wait for pending jobs to be finished, await_jobs is a long blocking call
|
||||
ci_cache.await_pending_jobs(pr_info.is_release)
|
||||
|
||||
if pr_info.is_release:
|
||||
print("Release/master: CI Cache add pending records for all todo jobs")
|
||||
ci_cache.push_pending_all(pr_info.is_release)
|
||||
|
||||
# conclude results
|
||||
result["git_ref"] = git_ref
|
||||
result["version"] = version
|
||||
result["build"] = ci_cache.job_digests[Build.PACKAGE_RELEASE]
|
||||
result["docs"] = ci_cache.job_digests[JobNames.DOCS_CHECK]
|
||||
result["build"] = ci_cache.job_digests[CI.BuildNames.PACKAGE_RELEASE]
|
||||
result["docs"] = ci_cache.job_digests[CI.JobNames.DOCS_CHECK]
|
||||
result["ci_settings"] = ci_settings.as_dict()
|
||||
if not args.skip_jobs:
|
||||
result["stages_data"] = _generate_ci_stage_config(ci_cache.jobs_to_do)
|
||||
@ -1027,7 +1035,7 @@ def main() -> int:
|
||||
f"Check if rerun for name: [{check_name}], extended name [{check_name_with_group}]"
|
||||
)
|
||||
previous_status = None
|
||||
if CI_CONFIG.is_build_job(check_name):
|
||||
if CI.is_build_job(check_name):
|
||||
# this is a build job - check if a build report is present
|
||||
build_result = (
|
||||
BuildResult.load_any(check_name, pr_info.number, pr_info.head_ref)
|
||||
@ -1055,10 +1063,8 @@ def main() -> int:
|
||||
# rerun helper check
|
||||
# FIXME: remove rerun_helper check and rely on ci cache only
|
||||
if check_name not in (
|
||||
# we might want to rerun reports' jobs - disable rerun check for them
|
||||
JobNames.BUILD_CHECK,
|
||||
JobNames.BUILD_CHECK_SPECIAL,
|
||||
):
|
||||
CI.JobNames.BUILD_CHECK,
|
||||
): # we might want to rerun build report job
|
||||
rerun_helper = RerunHelper(commit, check_name_with_group)
|
||||
if rerun_helper.is_already_finished_by_status():
|
||||
status = rerun_helper.get_finished_status()
|
||||
@ -1071,7 +1077,7 @@ def main() -> int:
|
||||
# ci cache check
|
||||
if not previous_status and not ci_settings.no_ci_cache:
|
||||
ci_cache = CiCache(s3, indata["jobs_data"]["digests"]).update()
|
||||
job_config = CI_CONFIG.get_job_config(check_name)
|
||||
job_config = CI.get_job_config(check_name)
|
||||
if ci_cache.is_successful(
|
||||
check_name,
|
||||
args.batch,
|
||||
@ -1111,7 +1117,7 @@ def main() -> int:
|
||||
ch_helper = ClickHouseHelper()
|
||||
check_url = ""
|
||||
|
||||
if CI_CONFIG.is_build_job(args.job_name):
|
||||
if CI.is_build_job(args.job_name):
|
||||
assert (
|
||||
indata
|
||||
), f"--infile with config must be provided for POST action of a build type job [{args.job_name}]"
|
||||
@ -1119,8 +1125,7 @@ def main() -> int:
|
||||
# upload binaries only for normal builds in PRs
|
||||
upload_binary = (
|
||||
not pr_info.is_pr
|
||||
or args.job_name
|
||||
not in CI_CONFIG.get_builds_for_report(JobNames.BUILD_CHECK_SPECIAL)
|
||||
or CI.get_job_ci_stage(args.job_name) == CI.WorkflowStages.BUILDS_1
|
||||
or CiSettings.create_from_run_config(indata).upload_all
|
||||
)
|
||||
|
||||
|
@ -5,7 +5,8 @@ from enum import Enum
|
||||
from pathlib import Path
|
||||
from typing import Dict, Optional, Any, Union, Sequence, List, Set
|
||||
|
||||
from ci_config import JobNames, Build, CI_CONFIG, JobConfig
|
||||
from ci_config import CI
|
||||
|
||||
from ci_utils import is_hex, GHActions
|
||||
from commit_status_helper import CommitStatusData
|
||||
from env_helper import (
|
||||
@ -41,7 +42,7 @@ class CiCache:
|
||||
release - for jobs being executed on the release branch including master branch (not a PR branch)
|
||||
"""
|
||||
|
||||
_REQUIRED_DIGESTS = [JobNames.DOCS_CHECK, Build.PACKAGE_RELEASE]
|
||||
_REQUIRED_DIGESTS = [CI.JobNames.DOCS_CHECK, CI.BuildNames.PACKAGE_RELEASE]
|
||||
_S3_CACHE_PREFIX = "CI_cache_v1"
|
||||
_CACHE_BUILD_REPORT_PREFIX = "build_report"
|
||||
_RECORD_FILE_EXTENSION = ".ci"
|
||||
@ -80,7 +81,7 @@ class CiCache:
|
||||
|
||||
@classmethod
|
||||
def is_docs_job(cls, job_name: str) -> bool:
|
||||
return job_name == JobNames.DOCS_CHECK
|
||||
return job_name == CI.JobNames.DOCS_CHECK
|
||||
|
||||
@classmethod
|
||||
def is_srcs_job(cls, job_name: str) -> bool:
|
||||
@ -105,8 +106,8 @@ class CiCache:
|
||||
):
|
||||
self.enabled = cache_enabled
|
||||
self.jobs_to_skip = [] # type: List[str]
|
||||
self.jobs_to_wait = {} # type: Dict[str, JobConfig]
|
||||
self.jobs_to_do = {} # type: Dict[str, JobConfig]
|
||||
self.jobs_to_wait = {} # type: Dict[str, CI.JobConfig]
|
||||
self.jobs_to_do = {} # type: Dict[str, CI.JobConfig]
|
||||
self.s3 = s3
|
||||
self.job_digests = job_digests
|
||||
self.cache_s3_paths = {
|
||||
@ -127,9 +128,13 @@ class CiCache:
|
||||
|
||||
@classmethod
|
||||
def calc_digests_and_create(
|
||||
cls, s3: S3Helper, job_configs: Dict[str, JobConfig], cache_enabled: bool = True
|
||||
cls,
|
||||
s3: S3Helper,
|
||||
job_configs: Dict[str, CI.JobConfig],
|
||||
cache_enabled: bool = True,
|
||||
dry_run: bool = False,
|
||||
) -> "CiCache":
|
||||
job_digester = JobDigester()
|
||||
job_digester = JobDigester(dry_run=dry_run)
|
||||
digests = {}
|
||||
|
||||
print("::group::Job Digests")
|
||||
@ -140,9 +145,7 @@ class CiCache:
|
||||
|
||||
for job in cls._REQUIRED_DIGESTS:
|
||||
if job not in job_configs:
|
||||
digest = job_digester.get_job_digest(
|
||||
CI_CONFIG.get_job_config(job).digest
|
||||
)
|
||||
digest = job_digester.get_job_digest(CI.get_job_config(job).digest)
|
||||
digests[job] = digest
|
||||
print(
|
||||
f" job [{job.rjust(50)}] required for CI Cache has digest [{digest}]"
|
||||
@ -154,10 +157,10 @@ class CiCache:
|
||||
self, job_digests: Dict[str, str], job_type: JobType
|
||||
) -> str:
|
||||
if job_type == self.JobType.DOCS:
|
||||
res = job_digests[JobNames.DOCS_CHECK]
|
||||
res = job_digests[CI.JobNames.DOCS_CHECK]
|
||||
elif job_type == self.JobType.SRCS:
|
||||
if Build.PACKAGE_RELEASE in job_digests:
|
||||
res = job_digests[Build.PACKAGE_RELEASE]
|
||||
if CI.BuildNames.PACKAGE_RELEASE in job_digests:
|
||||
res = job_digests[CI.BuildNames.PACKAGE_RELEASE]
|
||||
else:
|
||||
assert False, "BUG, no build job in digest' list"
|
||||
else:
|
||||
@ -648,7 +651,7 @@ class CiCache:
|
||||
report_path = Path(REPORT_PATH)
|
||||
report_path.mkdir(exist_ok=True, parents=True)
|
||||
path = (
|
||||
self._get_record_s3_path(Build.PACKAGE_RELEASE)
|
||||
self._get_record_s3_path(CI.BuildNames.PACKAGE_RELEASE)
|
||||
+ self._CACHE_BUILD_REPORT_PREFIX
|
||||
)
|
||||
if file_prefix:
|
||||
@ -664,13 +667,14 @@ class CiCache:
|
||||
def upload_build_report(self, build_result: BuildResult) -> str:
|
||||
result_json_path = build_result.write_json(Path(TEMP_PATH))
|
||||
s3_path = (
|
||||
self._get_record_s3_path(Build.PACKAGE_RELEASE) + result_json_path.name
|
||||
self._get_record_s3_path(CI.BuildNames.PACKAGE_RELEASE)
|
||||
+ result_json_path.name
|
||||
)
|
||||
return self.s3.upload_file(
|
||||
bucket=S3_BUILDS_BUCKET, file_path=result_json_path, s3_path=s3_path
|
||||
)
|
||||
|
||||
def await_pending_jobs(self, is_release: bool) -> None:
|
||||
def await_pending_jobs(self, is_release: bool, dry_run: bool = False) -> None:
|
||||
"""
|
||||
await pending jobs to be finished
|
||||
@jobs_with_params - jobs to await. {JOB_NAME: {"batches": [BATCHES...], "num_batches": NUM_BATCHES}}
|
||||
@ -687,15 +691,9 @@ class CiCache:
|
||||
MAX_JOB_NUM_TO_WAIT = 3
|
||||
round_cnt = 0
|
||||
|
||||
# FIXME: temporary experiment: lets enable await for PR' workflows awaiting on build' jobs only
|
||||
# FIXME: temporary experiment: lets enable await for PR' workflows but for a shorter time
|
||||
if not is_release:
|
||||
MAX_ROUNDS_TO_WAIT = 1
|
||||
remove_from_wait = []
|
||||
for job in self.jobs_to_wait:
|
||||
if job not in Build:
|
||||
remove_from_wait.append(job)
|
||||
for job in remove_from_wait:
|
||||
del self.jobs_to_wait[job]
|
||||
MAX_ROUNDS_TO_WAIT = 3
|
||||
|
||||
while (
|
||||
len(self.jobs_to_wait) > MAX_JOB_NUM_TO_WAIT
|
||||
@ -713,11 +711,12 @@ class CiCache:
|
||||
start_at = int(time.time())
|
||||
while expired_sec < TIMEOUT and self.jobs_to_wait:
|
||||
await_finished: Set[str] = set()
|
||||
time.sleep(poll_interval_sec)
|
||||
if not dry_run:
|
||||
time.sleep(poll_interval_sec)
|
||||
self.update()
|
||||
for job_name, job_config in self.jobs_to_wait.items():
|
||||
num_batches = job_config.num_batches
|
||||
job_config = CI_CONFIG.get_job_config(job_name)
|
||||
job_config = CI.get_job_config(job_name)
|
||||
assert job_config.pending_batches
|
||||
assert job_config.batches
|
||||
pending_batches = list(job_config.pending_batches)
|
||||
@ -741,12 +740,11 @@ class CiCache:
|
||||
f"Job [{job_name}_[{batch}/{num_batches}]] is not pending anymore"
|
||||
)
|
||||
job_config.batches.remove(batch)
|
||||
job_config.pending_batches.remove(batch)
|
||||
else:
|
||||
print(
|
||||
f"NOTE: Job [{job_name}:{batch}] finished failed - do not add to ready"
|
||||
)
|
||||
job_config.pending_batches.remove(batch)
|
||||
job_config.pending_batches.remove(batch)
|
||||
|
||||
if not job_config.pending_batches:
|
||||
await_finished.add(job_name)
|
||||
@ -754,18 +752,25 @@ class CiCache:
|
||||
for job in await_finished:
|
||||
self.jobs_to_skip.append(job)
|
||||
del self.jobs_to_wait[job]
|
||||
del self.jobs_to_do[job]
|
||||
|
||||
expired_sec = int(time.time()) - start_at
|
||||
print(
|
||||
f"...awaiting continues... seconds left [{TIMEOUT - expired_sec}]"
|
||||
)
|
||||
if not dry_run:
|
||||
expired_sec = int(time.time()) - start_at
|
||||
print(
|
||||
f"...awaiting continues... seconds left [{TIMEOUT - expired_sec}]"
|
||||
)
|
||||
else:
|
||||
# make up for 2 iterations in dry_run
|
||||
expired_sec += int(TIMEOUT / 2) + 1
|
||||
|
||||
GHActions.print_in_group(
|
||||
"Remaining jobs:",
|
||||
[list(self.jobs_to_wait)],
|
||||
)
|
||||
|
||||
def apply(self, job_configs: Dict[str, JobConfig], is_release: bool) -> "CiCache":
|
||||
def apply(
|
||||
self, job_configs: Dict[str, CI.JobConfig], is_release: bool
|
||||
) -> "CiCache":
|
||||
if not self.enabled:
|
||||
self.jobs_to_do = job_configs
|
||||
return self
|
||||
|
File diff suppressed because it is too large
Load Diff
781
tests/ci/ci_definitions.py
Normal file
781
tests/ci/ci_definitions.py
Normal file
@ -0,0 +1,781 @@
|
||||
import copy
|
||||
from dataclasses import dataclass, field
|
||||
from pathlib import Path
|
||||
from typing import Callable, List, Union, Iterable, Optional, Literal, Any
|
||||
|
||||
from ci_utils import WithIter
|
||||
from integration_test_images import IMAGES
|
||||
|
||||
|
||||
class WorkflowStages(metaclass=WithIter):
|
||||
"""
|
||||
Stages of GitHUb actions workflow
|
||||
"""
|
||||
|
||||
# for jobs that do not belong to any stage, e.g. Build Report Check
|
||||
NA = "UNKNOWN"
|
||||
# normal builds (builds that required for further testing)
|
||||
BUILDS_1 = "Builds_1"
|
||||
# special builds
|
||||
BUILDS_2 = "Builds_2"
|
||||
# all tests required for merge
|
||||
TESTS_1 = "Tests_1"
|
||||
# not used atm
|
||||
TESTS_2 = "Tests_2"
|
||||
# all tests not required for merge
|
||||
TESTS_3 = "Tests_3"
|
||||
|
||||
|
||||
class Runners(metaclass=WithIter):
|
||||
"""
|
||||
GitHub runner's labels
|
||||
"""
|
||||
|
||||
BUILDER = "builder"
|
||||
STYLE_CHECKER = "style-checker"
|
||||
STYLE_CHECKER_ARM = "style-checker-aarch64"
|
||||
FUNC_TESTER = "func-tester"
|
||||
FUNC_TESTER_ARM = "func-tester-aarch64"
|
||||
STRESS_TESTER = "stress-tester"
|
||||
FUZZER_UNIT_TESTER = "fuzzer-unit-tester"
|
||||
|
||||
|
||||
class Tags(metaclass=WithIter):
|
||||
"""
|
||||
CI Customization tags (set via PR body or some of them in GH labels, e.g. libFuzzer)
|
||||
"""
|
||||
|
||||
DO_NOT_TEST_LABEL = "do_not_test"
|
||||
NO_MERGE_COMMIT = "no_merge_commit"
|
||||
NO_CI_CACHE = "no_ci_cache"
|
||||
# to upload all binaries from build jobs
|
||||
UPLOAD_ALL_ARTIFACTS = "upload_all"
|
||||
CI_SET_SYNC = "ci_set_sync"
|
||||
CI_SET_ARM = "ci_set_arm"
|
||||
CI_SET_REQUIRED = "ci_set_required"
|
||||
CI_SET_BUILDS = "ci_set_builds"
|
||||
CI_SET_NON_REQUIRED = "ci_set_non_required"
|
||||
CI_SET_OLD_ANALYZER = "ci_set_old_analyzer"
|
||||
|
||||
libFuzzer = "libFuzzer"
|
||||
|
||||
|
||||
class BuildNames(metaclass=WithIter):
|
||||
"""
|
||||
Build' job names
|
||||
"""
|
||||
|
||||
PACKAGE_RELEASE = "package_release"
|
||||
PACKAGE_AARCH64 = "package_aarch64"
|
||||
PACKAGE_ASAN = "package_asan"
|
||||
PACKAGE_UBSAN = "package_ubsan"
|
||||
PACKAGE_TSAN = "package_tsan"
|
||||
PACKAGE_MSAN = "package_msan"
|
||||
PACKAGE_DEBUG = "package_debug"
|
||||
PACKAGE_RELEASE_COVERAGE = "package_release_coverage"
|
||||
BINARY_RELEASE = "binary_release"
|
||||
BINARY_TIDY = "binary_tidy"
|
||||
BINARY_DARWIN = "binary_darwin"
|
||||
BINARY_AARCH64 = "binary_aarch64"
|
||||
BINARY_AARCH64_V80COMPAT = "binary_aarch64_v80compat"
|
||||
BINARY_FREEBSD = "binary_freebsd"
|
||||
BINARY_DARWIN_AARCH64 = "binary_darwin_aarch64"
|
||||
BINARY_PPC64LE = "binary_ppc64le"
|
||||
BINARY_AMD64_COMPAT = "binary_amd64_compat"
|
||||
BINARY_AMD64_MUSL = "binary_amd64_musl"
|
||||
BINARY_RISCV64 = "binary_riscv64"
|
||||
BINARY_S390X = "binary_s390x"
|
||||
BINARY_LOONGARCH64 = "binary_loongarch64"
|
||||
FUZZERS = "fuzzers"
|
||||
|
||||
|
||||
class JobNames(metaclass=WithIter):
|
||||
"""
|
||||
All CI non-build jobs (Build jobs are concatenated to this list via python hack)
|
||||
"""
|
||||
|
||||
STYLE_CHECK = "Style check"
|
||||
FAST_TEST = "Fast test"
|
||||
DOCKER_SERVER = "Docker server image"
|
||||
DOCKER_KEEPER = "Docker keeper image"
|
||||
INSTALL_TEST_AMD = "Install packages (release)"
|
||||
INSTALL_TEST_ARM = "Install packages (aarch64)"
|
||||
|
||||
STATELESS_TEST_DEBUG = "Stateless tests (debug)"
|
||||
STATELESS_TEST_RELEASE = "Stateless tests (release)"
|
||||
STATELESS_TEST_RELEASE_COVERAGE = "Stateless tests (coverage)"
|
||||
STATELESS_TEST_AARCH64 = "Stateless tests (aarch64)"
|
||||
STATELESS_TEST_ASAN = "Stateless tests (asan)"
|
||||
STATELESS_TEST_TSAN = "Stateless tests (tsan)"
|
||||
STATELESS_TEST_MSAN = "Stateless tests (msan)"
|
||||
STATELESS_TEST_UBSAN = "Stateless tests (ubsan)"
|
||||
STATELESS_TEST_OLD_ANALYZER_S3_REPLICATED_RELEASE = (
|
||||
"Stateless tests (release, old analyzer, s3, DatabaseReplicated)"
|
||||
)
|
||||
STATELESS_TEST_S3_DEBUG = "Stateless tests (debug, s3 storage)"
|
||||
STATELESS_TEST_S3_TSAN = "Stateless tests (tsan, s3 storage)"
|
||||
STATELESS_TEST_AZURE_ASAN = "Stateless tests (azure, asan)"
|
||||
STATELESS_TEST_FLAKY_ASAN = "Stateless tests flaky check (asan)"
|
||||
|
||||
STATEFUL_TEST_DEBUG = "Stateful tests (debug)"
|
||||
STATEFUL_TEST_RELEASE = "Stateful tests (release)"
|
||||
STATEFUL_TEST_RELEASE_COVERAGE = "Stateful tests (coverage)"
|
||||
STATEFUL_TEST_AARCH64 = "Stateful tests (aarch64)"
|
||||
STATEFUL_TEST_ASAN = "Stateful tests (asan)"
|
||||
STATEFUL_TEST_TSAN = "Stateful tests (tsan)"
|
||||
STATEFUL_TEST_MSAN = "Stateful tests (msan)"
|
||||
STATEFUL_TEST_UBSAN = "Stateful tests (ubsan)"
|
||||
STATEFUL_TEST_PARALLEL_REPL_RELEASE = "Stateful tests (release, ParallelReplicas)"
|
||||
STATEFUL_TEST_PARALLEL_REPL_DEBUG = "Stateful tests (debug, ParallelReplicas)"
|
||||
STATEFUL_TEST_PARALLEL_REPL_ASAN = "Stateful tests (asan, ParallelReplicas)"
|
||||
STATEFUL_TEST_PARALLEL_REPL_MSAN = "Stateful tests (msan, ParallelReplicas)"
|
||||
STATEFUL_TEST_PARALLEL_REPL_UBSAN = "Stateful tests (ubsan, ParallelReplicas)"
|
||||
STATEFUL_TEST_PARALLEL_REPL_TSAN = "Stateful tests (tsan, ParallelReplicas)"
|
||||
|
||||
STRESS_TEST_ASAN = "Stress test (asan)"
|
||||
STRESS_TEST_TSAN = "Stress test (tsan)"
|
||||
STRESS_TEST_UBSAN = "Stress test (ubsan)"
|
||||
STRESS_TEST_MSAN = "Stress test (msan)"
|
||||
STRESS_TEST_DEBUG = "Stress test (debug)"
|
||||
STRESS_TEST_AZURE_TSAN = "Stress test (azure, tsan)"
|
||||
STRESS_TEST_AZURE_MSAN = "Stress test (azure, msan)"
|
||||
|
||||
INTEGRATION_TEST = "Integration tests (release)"
|
||||
INTEGRATION_TEST_ASAN = "Integration tests (asan)"
|
||||
INTEGRATION_TEST_ASAN_OLD_ANALYZER = "Integration tests (asan, old analyzer)"
|
||||
INTEGRATION_TEST_TSAN = "Integration tests (tsan)"
|
||||
INTEGRATION_TEST_ARM = "Integration tests (aarch64)"
|
||||
INTEGRATION_TEST_FLAKY = "Integration tests flaky check (asan)"
|
||||
|
||||
UPGRADE_TEST_DEBUG = "Upgrade check (debug)"
|
||||
UPGRADE_TEST_ASAN = "Upgrade check (asan)"
|
||||
UPGRADE_TEST_TSAN = "Upgrade check (tsan)"
|
||||
UPGRADE_TEST_MSAN = "Upgrade check (msan)"
|
||||
|
||||
UNIT_TEST = "Unit tests (release)"
|
||||
UNIT_TEST_ASAN = "Unit tests (asan)"
|
||||
UNIT_TEST_MSAN = "Unit tests (msan)"
|
||||
UNIT_TEST_TSAN = "Unit tests (tsan)"
|
||||
UNIT_TEST_UBSAN = "Unit tests (ubsan)"
|
||||
|
||||
AST_FUZZER_TEST_DEBUG = "AST fuzzer (debug)"
|
||||
AST_FUZZER_TEST_ASAN = "AST fuzzer (asan)"
|
||||
AST_FUZZER_TEST_MSAN = "AST fuzzer (msan)"
|
||||
AST_FUZZER_TEST_TSAN = "AST fuzzer (tsan)"
|
||||
AST_FUZZER_TEST_UBSAN = "AST fuzzer (ubsan)"
|
||||
|
||||
JEPSEN_KEEPER = "ClickHouse Keeper Jepsen"
|
||||
JEPSEN_SERVER = "ClickHouse Server Jepsen"
|
||||
|
||||
PERFORMANCE_TEST_AMD64 = "Performance Comparison (release)"
|
||||
PERFORMANCE_TEST_ARM64 = "Performance Comparison (aarch64)"
|
||||
|
||||
SQL_LOGIC_TEST = "Sqllogic test (release)"
|
||||
|
||||
SQLANCER = "SQLancer (release)"
|
||||
SQLANCER_DEBUG = "SQLancer (debug)"
|
||||
SQLTEST = "SQLTest"
|
||||
|
||||
COMPATIBILITY_TEST = "Compatibility check (release)"
|
||||
COMPATIBILITY_TEST_ARM = "Compatibility check (aarch64)"
|
||||
|
||||
CLICKBENCH_TEST = "ClickBench (release)"
|
||||
CLICKBENCH_TEST_ARM = "ClickBench (aarch64)"
|
||||
|
||||
LIBFUZZER_TEST = "libFuzzer tests"
|
||||
|
||||
BUILD_CHECK = "ClickHouse build check"
|
||||
# BUILD_CHECK_SPECIAL = "ClickHouse special build check"
|
||||
|
||||
DOCS_CHECK = "Docs check"
|
||||
BUGFIX_VALIDATE = "Bugfix validation"
|
||||
|
||||
|
||||
# hack to concatenate Build and non-build jobs under JobNames class
|
||||
for attr_name in dir(BuildNames):
|
||||
if not attr_name.startswith("__") and not callable(getattr(BuildNames, attr_name)):
|
||||
setattr(JobNames, attr_name, getattr(BuildNames, attr_name))
|
||||
|
||||
|
||||
class StatusNames(metaclass=WithIter):
|
||||
"""
|
||||
Class with statuses that aren't related to particular jobs
|
||||
"""
|
||||
|
||||
# overall CI report
|
||||
CI = "CI running"
|
||||
# mergeable status
|
||||
MERGEABLE = "Mergeable Check"
|
||||
# status of a sync pr
|
||||
SYNC = "A Sync"
|
||||
# PR formatting check status
|
||||
PR_CHECK = "PR Check"
|
||||
|
||||
|
||||
class SyncState(metaclass=WithIter):
|
||||
PENDING = "awaiting merge"
|
||||
MERGE_FAILED = "merge failed"
|
||||
TESTING = "awaiting test results"
|
||||
TESTS_FAILED = "tests failed"
|
||||
COMPLETED = "completed"
|
||||
|
||||
|
||||
@dataclass
|
||||
class DigestConfig:
|
||||
# all files, dirs to include into digest, glob supported
|
||||
include_paths: List[Union[str, Path]] = field(default_factory=list)
|
||||
# file suffixes to exclude from digest
|
||||
exclude_files: List[str] = field(default_factory=list)
|
||||
# directories to exclude from digest
|
||||
exclude_dirs: List[Union[str, Path]] = field(default_factory=list)
|
||||
# docker names to include into digest
|
||||
docker: List[str] = field(default_factory=list)
|
||||
# git submodules digest
|
||||
git_submodules: bool = False
|
||||
|
||||
|
||||
@dataclass
|
||||
class LabelConfig:
|
||||
"""
|
||||
configures different CI scenarios per CI Tag/GH label
|
||||
"""
|
||||
|
||||
run_jobs: Iterable[str] = frozenset()
|
||||
|
||||
|
||||
@dataclass
|
||||
class BuildConfig:
|
||||
name: str
|
||||
compiler: str
|
||||
package_type: Literal["deb", "binary", "fuzzers"]
|
||||
additional_pkgs: bool = False
|
||||
debug_build: bool = False
|
||||
coverage: bool = False
|
||||
sanitizer: str = ""
|
||||
tidy: bool = False
|
||||
# sparse_checkout is needed only to test the option itself.
|
||||
# No particular sense to use it in every build, since it slows down the job.
|
||||
sparse_checkout: bool = False
|
||||
comment: str = ""
|
||||
static_binary_name: str = ""
|
||||
|
||||
def export_env(self, export: bool = False) -> str:
|
||||
def process(field_name: str, field: Union[bool, str]) -> str:
|
||||
if isinstance(field, bool):
|
||||
field = str(field).lower()
|
||||
elif not isinstance(field, str):
|
||||
field = ""
|
||||
if export:
|
||||
return f"export BUILD_{field_name.upper()}={repr(field)}"
|
||||
return f"BUILD_{field_name.upper()}={field}"
|
||||
|
||||
return "\n".join(process(k, v) for k, v in self.__dict__.items())
|
||||
|
||||
|
||||
@dataclass
|
||||
class JobConfig:
|
||||
"""
|
||||
contains config parameters for job execution in CI workflow
|
||||
"""
|
||||
|
||||
# GH Runner type (tag from @Runners)
|
||||
runner_type: str
|
||||
# used for config validation in ci unittests
|
||||
job_name_keyword: str = ""
|
||||
# builds required for the job (applicable for test jobs)
|
||||
required_builds: Optional[List[str]] = None
|
||||
# build config for the build job (applicable for builds)
|
||||
build_config: Optional[BuildConfig] = None
|
||||
# configures digest calculation for the job
|
||||
digest: DigestConfig = field(default_factory=DigestConfig)
|
||||
# will be triggered for the job if omitted in CI workflow yml
|
||||
run_command: str = ""
|
||||
# job timeout, seconds
|
||||
timeout: Optional[int] = None
|
||||
# sets number of batches for a multi-batch job
|
||||
num_batches: int = 1
|
||||
# label that enables job in CI, if set digest isn't used
|
||||
run_by_label: str = ""
|
||||
# to run always regardless of the job digest or/and label
|
||||
run_always: bool = False
|
||||
# if the job needs to be run on the release branch, including master (building packages, docker server).
|
||||
# NOTE: Subsequent runs on the same branch with the similar digest are still considered skip-able.
|
||||
required_on_release_branch: bool = False
|
||||
# job is for pr workflow only
|
||||
pr_only: bool = False
|
||||
# job is for release/master branches only
|
||||
release_only: bool = False
|
||||
# to randomly pick and run one job among jobs in the same @random_bucket (PR branches only).
|
||||
random_bucket: str = ""
|
||||
# Do not set it. A list of batches to run. It will be set in runtime in accordance with ci cache and ci settings
|
||||
batches: Optional[List[int]] = None
|
||||
# Do not set it. A list of batches to await. It will be set in runtime in accordance with ci cache and ci settings
|
||||
pending_batches: Optional[List[int]] = None
|
||||
|
||||
def with_properties(self, **kwargs: Any) -> "JobConfig":
|
||||
res = copy.deepcopy(self)
|
||||
for k, v in kwargs.items():
|
||||
assert hasattr(self, k), f"Setting invalid attribute [{k}]"
|
||||
setattr(res, k, v)
|
||||
return res
|
||||
|
||||
def get_required_build(self) -> str:
|
||||
assert self.required_builds
|
||||
return self.required_builds[0]
|
||||
|
||||
|
||||
class CommonJobConfigs:
|
||||
"""
|
||||
Common job configs
|
||||
"""
|
||||
|
||||
BUILD_REPORT = JobConfig(
|
||||
job_name_keyword="build_check",
|
||||
run_command="build_report_check.py",
|
||||
digest=DigestConfig(
|
||||
include_paths=[
|
||||
"./tests/ci/build_report_check.py",
|
||||
"./tests/ci/upload_result_helper.py",
|
||||
],
|
||||
),
|
||||
runner_type=Runners.STYLE_CHECKER_ARM,
|
||||
)
|
||||
COMPATIBILITY_TEST = JobConfig(
|
||||
job_name_keyword="compatibility",
|
||||
digest=DigestConfig(
|
||||
include_paths=["./tests/ci/compatibility_check.py"],
|
||||
docker=["clickhouse/test-old-ubuntu", "clickhouse/test-old-centos"],
|
||||
),
|
||||
run_command="compatibility_check.py",
|
||||
runner_type=Runners.STYLE_CHECKER,
|
||||
)
|
||||
INSTALL_TEST = JobConfig(
|
||||
job_name_keyword="install",
|
||||
digest=DigestConfig(
|
||||
include_paths=["./tests/ci/install_check.py"],
|
||||
docker=["clickhouse/install-deb-test", "clickhouse/install-rpm-test"],
|
||||
),
|
||||
run_command='install_check.py "$CHECK_NAME"',
|
||||
runner_type=Runners.STYLE_CHECKER,
|
||||
timeout=900,
|
||||
)
|
||||
STATELESS_TEST = JobConfig(
|
||||
job_name_keyword="stateless",
|
||||
digest=DigestConfig(
|
||||
include_paths=[
|
||||
"./tests/ci/functional_test_check.py",
|
||||
"./tests/queries/0_stateless/",
|
||||
"./tests/clickhouse-test",
|
||||
"./tests/config",
|
||||
"./tests/*.txt",
|
||||
],
|
||||
exclude_files=[".md"],
|
||||
docker=["clickhouse/stateless-test"],
|
||||
),
|
||||
run_command='functional_test_check.py "$CHECK_NAME"',
|
||||
runner_type=Runners.FUNC_TESTER,
|
||||
timeout=10800,
|
||||
)
|
||||
STATEFUL_TEST = JobConfig(
|
||||
job_name_keyword="stateful",
|
||||
digest=DigestConfig(
|
||||
include_paths=[
|
||||
"./tests/ci/functional_test_check.py",
|
||||
"./tests/queries/1_stateful/",
|
||||
"./tests/clickhouse-test",
|
||||
"./tests/config",
|
||||
"./tests/*.txt",
|
||||
],
|
||||
exclude_files=[".md"],
|
||||
docker=["clickhouse/stateful-test"],
|
||||
),
|
||||
run_command='functional_test_check.py "$CHECK_NAME"',
|
||||
runner_type=Runners.FUNC_TESTER,
|
||||
timeout=3600,
|
||||
)
|
||||
STRESS_TEST = JobConfig(
|
||||
job_name_keyword="stress",
|
||||
digest=DigestConfig(
|
||||
include_paths=[
|
||||
"./tests/queries/0_stateless/",
|
||||
"./tests/queries/1_stateful/",
|
||||
"./tests/clickhouse-test",
|
||||
"./tests/config",
|
||||
"./tests/*.txt",
|
||||
],
|
||||
exclude_files=[".md"],
|
||||
docker=["clickhouse/stress-test"],
|
||||
),
|
||||
run_command="stress_check.py",
|
||||
runner_type=Runners.STRESS_TESTER,
|
||||
timeout=9000,
|
||||
)
|
||||
UPGRADE_TEST = JobConfig(
|
||||
job_name_keyword="upgrade",
|
||||
digest=DigestConfig(
|
||||
include_paths=["./tests/ci/upgrade_check.py"],
|
||||
exclude_files=[".md"],
|
||||
docker=["clickhouse/upgrade-check"],
|
||||
),
|
||||
run_command="upgrade_check.py",
|
||||
runner_type=Runners.STRESS_TESTER,
|
||||
)
|
||||
INTEGRATION_TEST = JobConfig(
|
||||
job_name_keyword="integration",
|
||||
digest=DigestConfig(
|
||||
include_paths=[
|
||||
"./tests/ci/integration_test_check.py",
|
||||
"./tests/ci/integration_tests_runner.py",
|
||||
"./tests/integration/",
|
||||
],
|
||||
exclude_files=[".md"],
|
||||
docker=IMAGES.copy(),
|
||||
),
|
||||
run_command='integration_test_check.py "$CHECK_NAME"',
|
||||
runner_type=Runners.STRESS_TESTER,
|
||||
)
|
||||
ASTFUZZER_TEST = JobConfig(
|
||||
job_name_keyword="ast",
|
||||
digest=DigestConfig(),
|
||||
run_command="ast_fuzzer_check.py",
|
||||
run_always=True,
|
||||
runner_type=Runners.FUZZER_UNIT_TESTER,
|
||||
)
|
||||
UNIT_TEST = JobConfig(
|
||||
job_name_keyword="unit",
|
||||
digest=DigestConfig(
|
||||
include_paths=["./tests/ci/unit_tests_check.py"],
|
||||
exclude_files=[".md"],
|
||||
docker=["clickhouse/unit-test"],
|
||||
),
|
||||
run_command="unit_tests_check.py",
|
||||
runner_type=Runners.FUZZER_UNIT_TESTER,
|
||||
)
|
||||
PERF_TESTS = JobConfig(
|
||||
job_name_keyword="performance",
|
||||
digest=DigestConfig(
|
||||
include_paths=[
|
||||
"./tests/ci/performance_comparison_check.py",
|
||||
"./tests/performance/",
|
||||
],
|
||||
exclude_files=[".md"],
|
||||
docker=["clickhouse/performance-comparison"],
|
||||
),
|
||||
run_command="performance_comparison_check.py",
|
||||
runner_type=Runners.STRESS_TESTER,
|
||||
)
|
||||
SQLLANCER_TEST = JobConfig(
|
||||
job_name_keyword="lancer",
|
||||
digest=DigestConfig(),
|
||||
run_command="sqlancer_check.py",
|
||||
release_only=True,
|
||||
run_always=True,
|
||||
runner_type=Runners.FUZZER_UNIT_TESTER,
|
||||
)
|
||||
SQLLOGIC_TEST = JobConfig(
|
||||
job_name_keyword="logic",
|
||||
digest=DigestConfig(
|
||||
include_paths=["./tests/ci/sqllogic_test.py"],
|
||||
exclude_files=[".md"],
|
||||
docker=["clickhouse/sqllogic-test"],
|
||||
),
|
||||
run_command="sqllogic_test.py",
|
||||
timeout=10800,
|
||||
release_only=True,
|
||||
runner_type=Runners.STYLE_CHECKER,
|
||||
)
|
||||
SQL_TEST = JobConfig(
|
||||
job_name_keyword="sqltest",
|
||||
digest=DigestConfig(
|
||||
include_paths=["./tests/ci/sqltest.py"],
|
||||
exclude_files=[".md"],
|
||||
docker=["clickhouse/sqltest"],
|
||||
),
|
||||
run_command="sqltest.py",
|
||||
timeout=10800,
|
||||
release_only=True,
|
||||
runner_type=Runners.FUZZER_UNIT_TESTER,
|
||||
)
|
||||
BUGFIX_TEST = JobConfig(
|
||||
job_name_keyword="bugfix",
|
||||
digest=DigestConfig(),
|
||||
run_command="bugfix_validate_check.py",
|
||||
timeout=900,
|
||||
runner_type=Runners.FUNC_TESTER,
|
||||
)
|
||||
DOCKER_SERVER = JobConfig(
|
||||
job_name_keyword="docker",
|
||||
required_on_release_branch=True,
|
||||
run_command='docker_server.py --check-name "$CHECK_NAME" --release-type head --allow-build-reuse',
|
||||
digest=DigestConfig(
|
||||
include_paths=[
|
||||
"tests/ci/docker_server.py",
|
||||
"./docker/server",
|
||||
]
|
||||
),
|
||||
runner_type=Runners.STYLE_CHECKER,
|
||||
)
|
||||
CLICKBENCH_TEST = JobConfig(
|
||||
job_name_keyword="clickbench",
|
||||
digest=DigestConfig(
|
||||
include_paths=[
|
||||
"tests/ci/clickbench.py",
|
||||
],
|
||||
docker=["clickhouse/clickbench"],
|
||||
),
|
||||
run_command='clickbench.py "$CHECK_NAME"',
|
||||
timeout=900,
|
||||
runner_type=Runners.FUNC_TESTER,
|
||||
)
|
||||
BUILD = JobConfig(
|
||||
required_on_release_branch=True,
|
||||
digest=DigestConfig(
|
||||
include_paths=[
|
||||
"./src",
|
||||
"./contrib/*-cmake",
|
||||
"./contrib/consistent-hashing",
|
||||
"./contrib/murmurhash",
|
||||
"./contrib/libfarmhash",
|
||||
"./contrib/pdqsort",
|
||||
"./contrib/cityhash102",
|
||||
"./contrib/sparse-checkout",
|
||||
"./contrib/libmetrohash",
|
||||
"./contrib/update-submodules.sh",
|
||||
"./contrib/CMakeLists.txt",
|
||||
"./CMakeLists.txt",
|
||||
"./PreLoad.cmake",
|
||||
"./cmake",
|
||||
"./base",
|
||||
"./programs",
|
||||
"./packages",
|
||||
"./docker/packager/packager",
|
||||
"./rust",
|
||||
"./tests/ci/version_helper.py",
|
||||
# FIXME: This is a WA to rebuild the CH and recreate the Performance.tar.zst artifact
|
||||
# when there are changes in performance test scripts.
|
||||
# Due to the current design of the perf test we need to rebuild CH when the performance test changes,
|
||||
# otherwise the changes will not be visible in the PerformanceTest job in CI
|
||||
"./tests/performance",
|
||||
],
|
||||
exclude_files=[".md"],
|
||||
docker=["clickhouse/binary-builder"],
|
||||
git_submodules=True,
|
||||
),
|
||||
run_command="build_check.py $BUILD_NAME",
|
||||
runner_type=Runners.BUILDER,
|
||||
)
|
||||
|
||||
|
||||
REQUIRED_CHECKS = [
|
||||
StatusNames.PR_CHECK,
|
||||
StatusNames.SYNC,
|
||||
JobNames.BUILD_CHECK,
|
||||
JobNames.DOCS_CHECK,
|
||||
JobNames.FAST_TEST,
|
||||
JobNames.STATEFUL_TEST_RELEASE,
|
||||
JobNames.STATELESS_TEST_RELEASE,
|
||||
JobNames.STATELESS_TEST_ASAN,
|
||||
JobNames.STATELESS_TEST_FLAKY_ASAN,
|
||||
JobNames.STATEFUL_TEST_ASAN,
|
||||
JobNames.STYLE_CHECK,
|
||||
JobNames.UNIT_TEST_ASAN,
|
||||
JobNames.UNIT_TEST_MSAN,
|
||||
JobNames.UNIT_TEST,
|
||||
JobNames.UNIT_TEST_TSAN,
|
||||
JobNames.UNIT_TEST_UBSAN,
|
||||
JobNames.INTEGRATION_TEST_ASAN_OLD_ANALYZER,
|
||||
JobNames.STATELESS_TEST_OLD_ANALYZER_S3_REPLICATED_RELEASE,
|
||||
]
|
||||
|
||||
# Jobs that run in Merge Queue if it's enabled
|
||||
MQ_JOBS = [
|
||||
JobNames.STYLE_CHECK,
|
||||
JobNames.FAST_TEST,
|
||||
BuildNames.BINARY_RELEASE,
|
||||
JobNames.UNIT_TEST,
|
||||
]
|
||||
|
||||
|
||||
@dataclass
|
||||
class CheckDescription:
|
||||
name: str
|
||||
description: str # the check descriptions, will be put into the status table
|
||||
match_func: Callable[[str], bool] # the function to check vs the commit status
|
||||
|
||||
def __hash__(self) -> int:
|
||||
return hash(self.name + self.description)
|
||||
|
||||
|
||||
CHECK_DESCRIPTIONS = [
|
||||
CheckDescription(
|
||||
StatusNames.PR_CHECK,
|
||||
"Checks correctness of the PR's body",
|
||||
lambda x: x == "PR Check",
|
||||
),
|
||||
CheckDescription(
|
||||
StatusNames.SYNC,
|
||||
"If it fails, ask a maintainer for help",
|
||||
lambda x: x == StatusNames.SYNC,
|
||||
),
|
||||
CheckDescription(
|
||||
"AST fuzzer",
|
||||
"Runs randomly generated queries to catch program errors. "
|
||||
"The build type is optionally given in parenthesis. "
|
||||
"If it fails, ask a maintainer for help",
|
||||
lambda x: x.startswith("AST fuzzer"),
|
||||
),
|
||||
CheckDescription(
|
||||
JobNames.BUGFIX_VALIDATE,
|
||||
"Checks that either a new test (functional or integration) or there "
|
||||
"some changed tests that fail with the binary built on master branch",
|
||||
lambda x: x == JobNames.BUGFIX_VALIDATE,
|
||||
),
|
||||
CheckDescription(
|
||||
StatusNames.CI,
|
||||
"A meta-check that indicates the running CI. Normally, it's in <b>success</b> or "
|
||||
"<b>pending</b> state. The failed status indicates some problems with the PR",
|
||||
lambda x: x == "CI running",
|
||||
),
|
||||
CheckDescription(
|
||||
"ClickHouse build check",
|
||||
"Builds ClickHouse in various configurations for use in further steps. "
|
||||
"You have to fix the builds that fail. Build logs often has enough "
|
||||
"information to fix the error, but you might have to reproduce the failure "
|
||||
"locally. The <b>cmake</b> options can be found in the build log, grepping for "
|
||||
'<b>cmake</b>. Use these options and follow the <a href="'
|
||||
'https://clickhouse.com/docs/en/development/build">general build process</a>',
|
||||
lambda x: x.startswith("ClickHouse") and x.endswith("build check"),
|
||||
),
|
||||
CheckDescription(
|
||||
"Compatibility check",
|
||||
"Checks that <b>clickhouse</b> binary runs on distributions with old libc "
|
||||
"versions. If it fails, ask a maintainer for help",
|
||||
lambda x: x.startswith("Compatibility check"),
|
||||
),
|
||||
CheckDescription(
|
||||
JobNames.DOCKER_SERVER,
|
||||
"The check to build and optionally push the mentioned image to docker hub",
|
||||
lambda x: x.startswith("Docker server"),
|
||||
),
|
||||
CheckDescription(
|
||||
JobNames.DOCKER_KEEPER,
|
||||
"The check to build and optionally push the mentioned image to docker hub",
|
||||
lambda x: x.startswith("Docker keeper"),
|
||||
),
|
||||
CheckDescription(
|
||||
JobNames.DOCS_CHECK,
|
||||
"Builds and tests the documentation",
|
||||
lambda x: x == JobNames.DOCS_CHECK,
|
||||
),
|
||||
CheckDescription(
|
||||
JobNames.FAST_TEST,
|
||||
"Normally this is the first check that is ran for a PR. It builds ClickHouse "
|
||||
'and runs most of <a href="https://clickhouse.com/docs/en/development/tests'
|
||||
'#functional-tests">stateless functional tests</a>, '
|
||||
"omitting some. If it fails, further checks are not started until it is fixed. "
|
||||
"Look at the report to see which tests fail, then reproduce the failure "
|
||||
'locally as described <a href="https://clickhouse.com/docs/en/development/'
|
||||
'tests#functional-test-locally">here</a>',
|
||||
lambda x: x == JobNames.FAST_TEST,
|
||||
),
|
||||
CheckDescription(
|
||||
"Flaky tests",
|
||||
"Checks if new added or modified tests are flaky by running them repeatedly, "
|
||||
"in parallel, with more randomization. Functional tests are run 100 times "
|
||||
"with address sanitizer, and additional randomization of thread scheduling. "
|
||||
"Integration tests are run up to 10 times. If at least once a new test has "
|
||||
"failed, or was too long, this check will be red. We don't allow flaky tests, "
|
||||
'read <a href="https://clickhouse.com/blog/decorating-a-christmas-tree-with-'
|
||||
'the-help-of-flaky-tests/">the doc</a>',
|
||||
lambda x: "tests flaky check" in x,
|
||||
),
|
||||
CheckDescription(
|
||||
"Install packages",
|
||||
"Checks that the built packages are installable in a clear environment",
|
||||
lambda x: x.startswith("Install packages ("),
|
||||
),
|
||||
CheckDescription(
|
||||
"Integration tests",
|
||||
"The integration tests report. In parenthesis the package type is given, "
|
||||
"and in square brackets are the optional part/total tests",
|
||||
lambda x: x.startswith("Integration tests ("),
|
||||
),
|
||||
CheckDescription(
|
||||
StatusNames.MERGEABLE,
|
||||
"Checks if all other necessary checks are successful",
|
||||
lambda x: x == StatusNames.MERGEABLE,
|
||||
),
|
||||
CheckDescription(
|
||||
"Performance Comparison",
|
||||
"Measure changes in query performance. The performance test report is "
|
||||
'described in detail <a href="https://github.com/ClickHouse/ClickHouse/tree'
|
||||
'/master/docker/test/performance-comparison#how-to-read-the-report">here</a>. '
|
||||
"In square brackets are the optional part/total tests",
|
||||
lambda x: x.startswith("Performance Comparison"),
|
||||
),
|
||||
CheckDescription(
|
||||
"Push to Dockerhub",
|
||||
"The check for building and pushing the CI related docker images to docker hub",
|
||||
lambda x: x.startswith("Push") and "to Dockerhub" in x,
|
||||
),
|
||||
CheckDescription(
|
||||
"Sqllogic",
|
||||
"Run clickhouse on the "
|
||||
'<a href="https://www.sqlite.org/sqllogictest">sqllogic</a> '
|
||||
"test set against sqlite and checks that all statements are passed",
|
||||
lambda x: x.startswith("Sqllogic test"),
|
||||
),
|
||||
CheckDescription(
|
||||
"SQLancer",
|
||||
"Fuzzing tests that detect logical bugs with "
|
||||
'<a href="https://github.com/sqlancer/sqlancer">SQLancer</a> tool',
|
||||
lambda x: x.startswith("SQLancer"),
|
||||
),
|
||||
CheckDescription(
|
||||
"Stateful tests",
|
||||
"Runs stateful functional tests for ClickHouse binaries built in various "
|
||||
"configurations -- release, debug, with sanitizers, etc",
|
||||
lambda x: x.startswith("Stateful tests ("),
|
||||
),
|
||||
CheckDescription(
|
||||
"Stateless tests",
|
||||
"Runs stateless functional tests for ClickHouse binaries built in various "
|
||||
"configurations -- release, debug, with sanitizers, etc",
|
||||
lambda x: x.startswith("Stateless tests ("),
|
||||
),
|
||||
CheckDescription(
|
||||
"Stress test",
|
||||
"Runs stateless functional tests concurrently from several clients to detect "
|
||||
"concurrency-related errors",
|
||||
lambda x: x.startswith("Stress test ("),
|
||||
),
|
||||
CheckDescription(
|
||||
JobNames.STYLE_CHECK,
|
||||
"Runs a set of checks to keep the code style clean. If some of tests failed, "
|
||||
"see the related log from the report",
|
||||
lambda x: x == JobNames.STYLE_CHECK,
|
||||
),
|
||||
CheckDescription(
|
||||
"Unit tests",
|
||||
"Runs the unit tests for different release types",
|
||||
lambda x: x.startswith("Unit tests ("),
|
||||
),
|
||||
CheckDescription(
|
||||
"Upgrade check",
|
||||
"Runs stress tests on server version from last release and then tries to "
|
||||
"upgrade it to the version from the PR. It checks if the new server can "
|
||||
"successfully startup without any errors, crashes or sanitizer asserts",
|
||||
lambda x: x.startswith("Upgrade check ("),
|
||||
),
|
||||
CheckDescription(
|
||||
"ClickBench",
|
||||
"Runs [ClickBench](https://github.com/ClickHouse/ClickBench/) with instant-attach table",
|
||||
lambda x: x.startswith("ClickBench"),
|
||||
),
|
||||
CheckDescription(
|
||||
"Fallback for unknown",
|
||||
"There's no description for the check yet, please add it to "
|
||||
"tests/ci/ci_config.py:CHECK_DESCRIPTIONS",
|
||||
lambda x: True,
|
||||
),
|
||||
]
|
@ -3,7 +3,7 @@ from dataclasses import dataclass, asdict
|
||||
from typing import Optional, List, Dict, Any, Iterable
|
||||
|
||||
from ci_utils import normalize_string
|
||||
from ci_config import CILabels, CI_CONFIG, JobConfig, JobNames
|
||||
from ci_config import CI
|
||||
from git_helper import Runner as GitRunner, GIT_PREFIX
|
||||
from pr_info import PRInfo
|
||||
|
||||
@ -80,7 +80,7 @@ class CiSettings:
|
||||
if not res.ci_jobs:
|
||||
res.ci_jobs = []
|
||||
res.ci_jobs.append(match.removeprefix("job_"))
|
||||
elif match.startswith("ci_set_") and match in CILabels:
|
||||
elif match.startswith("ci_set_") and match in CI.Tags:
|
||||
if not res.ci_sets:
|
||||
res.ci_sets = []
|
||||
res.ci_sets.append(match)
|
||||
@ -97,15 +97,15 @@ class CiSettings:
|
||||
res.exclude_keywords += [
|
||||
normalize_string(keyword) for keyword in keywords
|
||||
]
|
||||
elif match == CILabels.NO_CI_CACHE:
|
||||
elif match == CI.Tags.NO_CI_CACHE:
|
||||
res.no_ci_cache = True
|
||||
print("NOTE: CI Cache will be disabled")
|
||||
elif match == CILabels.UPLOAD_ALL_ARTIFACTS:
|
||||
elif match == CI.Tags.UPLOAD_ALL_ARTIFACTS:
|
||||
res.upload_all = True
|
||||
print("NOTE: All binary artifacts will be uploaded")
|
||||
elif match == CILabels.DO_NOT_TEST_LABEL:
|
||||
elif match == CI.Tags.DO_NOT_TEST_LABEL:
|
||||
res.do_not_test = True
|
||||
elif match == CILabels.NO_MERGE_COMMIT:
|
||||
elif match == CI.Tags.NO_MERGE_COMMIT:
|
||||
res.no_merge_commit = True
|
||||
print("NOTE: Merge Commit will be disabled")
|
||||
elif match.startswith("batch_"):
|
||||
@ -131,18 +131,18 @@ class CiSettings:
|
||||
def _check_if_selected(
|
||||
self,
|
||||
job: str,
|
||||
job_config: JobConfig,
|
||||
job_config: CI.JobConfig,
|
||||
is_release: bool,
|
||||
is_pr: bool,
|
||||
is_mq: bool,
|
||||
labels: Iterable[str],
|
||||
) -> bool: # type: ignore #too-many-return-statements
|
||||
if self.do_not_test:
|
||||
label_config = CI_CONFIG.get_label_config(CILabels.DO_NOT_TEST_LABEL)
|
||||
assert label_config, f"Unknown tag [{CILabels.DO_NOT_TEST_LABEL}]"
|
||||
label_config = CI.get_tag_config(CI.Tags.DO_NOT_TEST_LABEL)
|
||||
assert label_config, f"Unknown tag [{CI.Tags.DO_NOT_TEST_LABEL}]"
|
||||
if job in label_config.run_jobs:
|
||||
print(
|
||||
f"Job [{job}] present in CI set [{CILabels.DO_NOT_TEST_LABEL}] - pass"
|
||||
f"Job [{job}] present in CI set [{CI.Tags.DO_NOT_TEST_LABEL}] - pass"
|
||||
)
|
||||
return True
|
||||
return False
|
||||
@ -164,7 +164,7 @@ class CiSettings:
|
||||
|
||||
to_deny = False
|
||||
if self.include_keywords:
|
||||
if job == JobNames.STYLE_CHECK:
|
||||
if job == CI.JobNames.STYLE_CHECK:
|
||||
# never exclude Style Check by include keywords
|
||||
return True
|
||||
for keyword in self.include_keywords:
|
||||
@ -175,7 +175,7 @@ class CiSettings:
|
||||
|
||||
if self.ci_sets:
|
||||
for tag in self.ci_sets:
|
||||
label_config = CI_CONFIG.get_label_config(tag)
|
||||
label_config = CI.get_tag_config(tag)
|
||||
assert label_config, f"Unknown tag [{tag}]"
|
||||
if job in label_config.run_jobs:
|
||||
print(f"Job [{job}] present in CI set [{tag}] - pass")
|
||||
@ -197,12 +197,12 @@ class CiSettings:
|
||||
|
||||
def apply(
|
||||
self,
|
||||
job_configs: Dict[str, JobConfig],
|
||||
job_configs: Dict[str, CI.JobConfig],
|
||||
is_release: bool,
|
||||
is_pr: bool,
|
||||
is_mq: bool,
|
||||
labels: Iterable[str],
|
||||
) -> Dict[str, JobConfig]:
|
||||
) -> Dict[str, CI.JobConfig]:
|
||||
"""
|
||||
Apply CI settings from pr body
|
||||
"""
|
||||
@ -220,7 +220,7 @@ class CiSettings:
|
||||
|
||||
add_parents = []
|
||||
for job in list(res):
|
||||
parent_jobs = CI_CONFIG.get_job_parents(job)
|
||||
parent_jobs = CI.get_job_parents(job)
|
||||
for parent_job in parent_jobs:
|
||||
if parent_job not in res:
|
||||
add_parents.append(parent_job)
|
||||
|
@ -17,7 +17,7 @@ from github.GithubObject import NotSet
|
||||
from github.IssueComment import IssueComment
|
||||
from github.Repository import Repository
|
||||
|
||||
from ci_config import CHECK_DESCRIPTIONS, CheckDescription, StatusNames, CIConfig
|
||||
from ci_config import CI
|
||||
from env_helper import GITHUB_REPOSITORY, GITHUB_UPSTREAM_REPOSITORY, TEMP_PATH
|
||||
from lambda_shared_package.lambda_shared.pr import Labels
|
||||
from pr_info import PRInfo
|
||||
@ -160,7 +160,7 @@ def set_status_comment(commit: Commit, pr_info: PRInfo) -> None:
|
||||
if not statuses:
|
||||
return
|
||||
|
||||
if not [status for status in statuses if status.context == StatusNames.CI]:
|
||||
if not [status for status in statuses if status.context == CI.StatusNames.CI]:
|
||||
# This is the case, when some statuses already exist for the check,
|
||||
# but not the StatusNames.CI. We should create it as pending.
|
||||
# W/o pr_info to avoid recursion, and yes, one extra create_ci_report
|
||||
@ -169,7 +169,7 @@ def set_status_comment(commit: Commit, pr_info: PRInfo) -> None:
|
||||
PENDING,
|
||||
create_ci_report(pr_info, statuses),
|
||||
"The report for running CI",
|
||||
StatusNames.CI,
|
||||
CI.StatusNames.CI,
|
||||
)
|
||||
|
||||
# We update the report in generate_status_comment function, so do it each
|
||||
@ -212,20 +212,20 @@ def generate_status_comment(pr_info: PRInfo, statuses: CommitStatuses) -> str:
|
||||
f"\n"
|
||||
)
|
||||
# group checks by the name to get the worst one per each
|
||||
grouped_statuses = {} # type: Dict[CheckDescription, CommitStatuses]
|
||||
grouped_statuses = {} # type: Dict[CI.CheckDescription, CommitStatuses]
|
||||
for status in statuses:
|
||||
cd = None
|
||||
for c in CHECK_DESCRIPTIONS:
|
||||
for c in CI.CHECK_DESCRIPTIONS:
|
||||
if c.match_func(status.context):
|
||||
cd = c
|
||||
break
|
||||
|
||||
if cd is None or cd == CHECK_DESCRIPTIONS[-1]:
|
||||
if cd is None or cd == CI.CHECK_DESCRIPTIONS[-1]:
|
||||
# This is the case for either non-found description or a fallback
|
||||
cd = CheckDescription(
|
||||
cd = CI.CheckDescription(
|
||||
status.context,
|
||||
CHECK_DESCRIPTIONS[-1].description,
|
||||
CHECK_DESCRIPTIONS[-1].match_func,
|
||||
CI.CHECK_DESCRIPTIONS[-1].description,
|
||||
CI.CHECK_DESCRIPTIONS[-1].match_func,
|
||||
)
|
||||
|
||||
if cd in grouped_statuses:
|
||||
@ -301,7 +301,7 @@ def create_ci_report(pr_info: PRInfo, statuses: CommitStatuses) -> str:
|
||||
)
|
||||
)
|
||||
return upload_results(
|
||||
S3Helper(), pr_info.number, pr_info.sha, test_results, [], StatusNames.CI
|
||||
S3Helper(), pr_info.number, pr_info.sha, test_results, [], CI.StatusNames.CI
|
||||
)
|
||||
|
||||
|
||||
@ -435,7 +435,7 @@ def set_mergeable_check(
|
||||
state,
|
||||
report_url,
|
||||
format_description(description),
|
||||
StatusNames.MERGEABLE,
|
||||
CI.StatusNames.MERGEABLE,
|
||||
)
|
||||
|
||||
|
||||
@ -443,7 +443,7 @@ def update_mergeable_check(commit: Commit, pr_info: PRInfo, check_name: str) ->
|
||||
"check if the check_name in REQUIRED_CHECKS and then trigger update"
|
||||
not_run = (
|
||||
pr_info.labels.intersection({Labels.SKIP_MERGEABLE_CHECK, Labels.RELEASE})
|
||||
or not CIConfig.is_required(check_name)
|
||||
or not CI.is_required(check_name)
|
||||
or pr_info.release_pr
|
||||
or pr_info.number == 0
|
||||
)
|
||||
@ -465,13 +465,11 @@ def trigger_mergeable_check(
|
||||
workflow_failed: bool = False,
|
||||
) -> StatusType:
|
||||
"""calculate and update StatusNames.MERGEABLE"""
|
||||
required_checks = [
|
||||
status for status in statuses if CIConfig.is_required(status.context)
|
||||
]
|
||||
required_checks = [status for status in statuses if CI.is_required(status.context)]
|
||||
|
||||
mergeable_status = None
|
||||
for status in statuses:
|
||||
if status.context == StatusNames.MERGEABLE:
|
||||
if status.context == CI.StatusNames.MERGEABLE:
|
||||
mergeable_status = status
|
||||
break
|
||||
|
||||
@ -548,7 +546,7 @@ def update_upstream_sync_status(
|
||||
"Using commit %s to post the %s status `%s`: [%s]",
|
||||
last_synced_upstream_commit.sha,
|
||||
sync_status,
|
||||
StatusNames.SYNC,
|
||||
CI.StatusNames.SYNC,
|
||||
"",
|
||||
)
|
||||
post_commit_status(
|
||||
@ -556,7 +554,7 @@ def update_upstream_sync_status(
|
||||
sync_status,
|
||||
"",
|
||||
"",
|
||||
StatusNames.SYNC,
|
||||
CI.StatusNames.SYNC,
|
||||
)
|
||||
trigger_mergeable_check(
|
||||
last_synced_upstream_commit,
|
||||
|
@ -9,10 +9,10 @@ from typing import TYPE_CHECKING, Dict, Iterable, List, Optional, Union
|
||||
from sys import modules
|
||||
|
||||
from docker_images_helper import get_images_info
|
||||
from ci_config import DigestConfig
|
||||
from git_helper import Runner
|
||||
from env_helper import ROOT_DIR
|
||||
from ci_utils import cd
|
||||
from ci_config import CI
|
||||
|
||||
DOCKER_DIGEST_LEN = 12
|
||||
JOB_DIGEST_LEN = 10
|
||||
@ -139,20 +139,21 @@ class DockerDigester:
|
||||
|
||||
|
||||
class JobDigester:
|
||||
def __init__(self):
|
||||
def __init__(self, dry_run: bool = False):
|
||||
self.dd = DockerDigester()
|
||||
self.cache: Dict[str, str] = {}
|
||||
self.dry_run = dry_run
|
||||
|
||||
@staticmethod
|
||||
def _get_config_hash(digest_config: DigestConfig) -> str:
|
||||
def _get_config_hash(digest_config: CI.DigestConfig) -> str:
|
||||
data_dict = asdict(digest_config)
|
||||
hash_obj = md5()
|
||||
hash_obj.update(str(data_dict).encode())
|
||||
hash_string = hash_obj.hexdigest()
|
||||
return hash_string
|
||||
|
||||
def get_job_digest(self, digest_config: DigestConfig) -> str:
|
||||
if not digest_config.include_paths:
|
||||
def get_job_digest(self, digest_config: CI.DigestConfig) -> str:
|
||||
if not digest_config.include_paths or self.dry_run:
|
||||
# job is not for digest
|
||||
return "f" * JOB_DIGEST_LEN
|
||||
|
||||
|
@ -8,7 +8,7 @@ import logging
|
||||
from pathlib import Path
|
||||
|
||||
from build_download_helper import download_build_with_progress
|
||||
from ci_config import CI_CONFIG
|
||||
from ci_config import CI
|
||||
from env_helper import RUNNER_TEMP, S3_ARTIFACT_DOWNLOAD_TEMPLATE
|
||||
from git_helper import Git, commit
|
||||
from version_helper import get_version_from_repo, version_arg
|
||||
@ -59,7 +59,8 @@ def main():
|
||||
temp_path.mkdir(parents=True, exist_ok=True)
|
||||
for build in args.build_names:
|
||||
# check if it's in CI_CONFIG
|
||||
config = CI_CONFIG.build_config[build]
|
||||
config = CI.JOB_CONFIGS[build].build_config
|
||||
assert config
|
||||
if args.rename and config.static_binary_name:
|
||||
path = temp_path / f"clickhouse-{config.static_binary_name}"
|
||||
else:
|
||||
|
@ -9,8 +9,9 @@ from build_download_helper import APIException, get_gh_api
|
||||
|
||||
module_dir = p.abspath(p.dirname(__file__))
|
||||
git_root = p.abspath(p.join(module_dir, "..", ".."))
|
||||
|
||||
ROOT_DIR = git_root
|
||||
CI = bool(os.getenv("CI"))
|
||||
IS_CI = bool(os.getenv("CI"))
|
||||
TEMP_PATH = os.getenv("TEMP_PATH", p.abspath(p.join(module_dir, "./tmp")))
|
||||
REPORT_PATH = f"{TEMP_PATH}/reports"
|
||||
# FIXME: latest should not be used in CI, set temporary for transition to "docker with digest as a tag"
|
||||
|
@ -4,7 +4,7 @@ import logging
|
||||
|
||||
from github import Github
|
||||
|
||||
from ci_config import StatusNames
|
||||
from ci_config import CI
|
||||
from commit_status_helper import (
|
||||
get_commit,
|
||||
get_commit_filtered_statuses,
|
||||
@ -71,7 +71,7 @@ def main():
|
||||
can_set_green_mergeable_status=True,
|
||||
)
|
||||
|
||||
ci_running_statuses = [s for s in statuses if s.context == StatusNames.CI]
|
||||
ci_running_statuses = [s for s in statuses if s.context == CI.StatusNames.CI]
|
||||
if not ci_running_statuses:
|
||||
return
|
||||
# Take the latest status
|
||||
@ -81,7 +81,11 @@ def main():
|
||||
has_pending = False
|
||||
error_cnt = 0
|
||||
for status in statuses:
|
||||
if status.context in (StatusNames.MERGEABLE, StatusNames.CI, StatusNames.SYNC):
|
||||
if status.context in (
|
||||
CI.StatusNames.MERGEABLE,
|
||||
CI.StatusNames.CI,
|
||||
CI.StatusNames.SYNC,
|
||||
):
|
||||
# do not account these statuses
|
||||
continue
|
||||
if status.state == PENDING:
|
||||
@ -108,7 +112,7 @@ def main():
|
||||
ci_state,
|
||||
ci_status.target_url,
|
||||
description,
|
||||
StatusNames.CI,
|
||||
CI.StatusNames.CI,
|
||||
pr_info,
|
||||
dump_to_file=True,
|
||||
)
|
||||
|
@ -18,7 +18,7 @@ from collections import defaultdict
|
||||
from itertools import chain
|
||||
from typing import Any, Dict
|
||||
|
||||
from env_helper import CI
|
||||
from env_helper import IS_CI
|
||||
from integration_test_images import IMAGES
|
||||
|
||||
MAX_RETRY = 1
|
||||
@ -1004,7 +1004,7 @@ def run():
|
||||
|
||||
logging.info("Running tests")
|
||||
|
||||
if CI:
|
||||
if IS_CI:
|
||||
# Avoid overlaps with previous runs
|
||||
logging.info("Clearing dmesg before run")
|
||||
subprocess.check_call("sudo -E dmesg --clear", shell=True)
|
||||
@ -1012,7 +1012,7 @@ def run():
|
||||
state, description, test_results, _ = runner.run_impl(repo_path, build_path)
|
||||
logging.info("Tests finished")
|
||||
|
||||
if CI:
|
||||
if IS_CI:
|
||||
# Dump dmesg (to capture possible OOMs)
|
||||
logging.info("Dumping dmesg")
|
||||
subprocess.check_call("sudo -E dmesg -T", shell=True)
|
||||
|
@ -13,7 +13,6 @@ import requests
|
||||
|
||||
from build_download_helper import (
|
||||
download_build_with_progress,
|
||||
get_build_name_for_check,
|
||||
read_build_urls,
|
||||
)
|
||||
from compress_files import compress_fast
|
||||
@ -25,6 +24,7 @@ from report import FAILURE, SUCCESS, JobReport, TestResult, TestResults
|
||||
from ssh import SSHKey
|
||||
from stopwatch import Stopwatch
|
||||
from tee_popen import TeePopen
|
||||
from ci_config import CI
|
||||
|
||||
JEPSEN_GROUP_NAME = "jepsen_group"
|
||||
|
||||
@ -224,7 +224,7 @@ def main():
|
||||
head = requests.head(build_url, timeout=60)
|
||||
assert head.status_code == 200, f"Clickhouse binary not found: {build_url}"
|
||||
else:
|
||||
build_name = get_build_name_for_check(check_name)
|
||||
build_name = CI.get_required_build_name(check_name)
|
||||
urls = read_build_urls(build_name, REPORT_PATH)
|
||||
build_url = None
|
||||
for url in urls:
|
||||
|
@ -12,7 +12,7 @@ from pathlib import Path
|
||||
from github import Github
|
||||
|
||||
from build_download_helper import download_builds_filter
|
||||
from ci_config import CI_CONFIG
|
||||
from ci_config import CI
|
||||
from clickhouse_helper import get_instance_id, get_instance_type
|
||||
from commit_status_helper import get_commit
|
||||
from docker_images_helper import get_docker_image, pull_image
|
||||
@ -83,7 +83,7 @@ def main():
|
||||
assert (
|
||||
check_name
|
||||
), "Check name must be provided as an input arg or in CHECK_NAME env"
|
||||
required_build = CI_CONFIG.test_configs[check_name].required_build
|
||||
required_build = CI.JOB_CONFIGS[check_name].get_required_build()
|
||||
|
||||
with open(GITHUB_EVENT_PATH, "r", encoding="utf-8") as event_file:
|
||||
event = json.load(event_file)
|
||||
|
@ -316,7 +316,9 @@ class PRInfo:
|
||||
|
||||
@property
|
||||
def is_master(self) -> bool:
|
||||
return self.number == 0 and self.head_ref == "master"
|
||||
return (
|
||||
self.number == 0 and self.head_ref == "master" and not self.is_merge_queue
|
||||
)
|
||||
|
||||
@property
|
||||
def is_release(self) -> bool:
|
||||
@ -324,7 +326,10 @@ class PRInfo:
|
||||
|
||||
@property
|
||||
def is_pr(self):
|
||||
return self.event_type == EventType.PULL_REQUEST
|
||||
if self.event_type == EventType.PULL_REQUEST:
|
||||
assert self.number
|
||||
return True
|
||||
return False
|
||||
|
||||
@property
|
||||
def is_scheduled(self) -> bool:
|
||||
@ -353,9 +358,6 @@ class PRInfo:
|
||||
if self.changed_files_requested:
|
||||
return
|
||||
|
||||
if not getattr(self, "diff_urls", False):
|
||||
raise TypeError("The event does not have diff URLs")
|
||||
|
||||
for diff_url in self.diff_urls:
|
||||
response = get_gh_api(
|
||||
diff_url,
|
||||
|
@ -21,7 +21,7 @@ from typing import (
|
||||
)
|
||||
|
||||
from build_download_helper import get_gh_api
|
||||
from ci_config import CI_CONFIG, BuildConfig
|
||||
from ci_config import CI
|
||||
from ci_utils import normalize_string
|
||||
from env_helper import REPORT_PATH, TEMP_PATH
|
||||
|
||||
@ -412,6 +412,7 @@ class BuildResult:
|
||||
ref_report = None
|
||||
master_report = None
|
||||
any_report = None
|
||||
Path(REPORT_PATH).mkdir(parents=True, exist_ok=True)
|
||||
for file in Path(REPORT_PATH).iterdir():
|
||||
if f"{build_name}.json" in file.name:
|
||||
any_report = file
|
||||
@ -448,8 +449,10 @@ class BuildResult:
|
||||
return json.dumps(asdict(self), indent=2)
|
||||
|
||||
@property
|
||||
def build_config(self) -> Optional[BuildConfig]:
|
||||
return CI_CONFIG.build_config.get(self.build_name, None)
|
||||
def build_config(self) -> Optional[CI.BuildConfig]:
|
||||
if self.build_name not in CI.JOB_CONFIGS:
|
||||
return None
|
||||
return CI.JOB_CONFIGS[self.build_name].build_config
|
||||
|
||||
@property
|
||||
def comment(self) -> str:
|
||||
|
@ -5,7 +5,6 @@ from typing import Tuple
|
||||
|
||||
from github import Github
|
||||
|
||||
from ci_config import StatusNames
|
||||
from commit_status_helper import (
|
||||
create_ci_report,
|
||||
format_description,
|
||||
@ -24,6 +23,7 @@ from lambda_shared_package.lambda_shared.pr import (
|
||||
)
|
||||
from pr_info import PRInfo
|
||||
from report import FAILURE, PENDING, SUCCESS, StatusType
|
||||
from ci_config import CI
|
||||
|
||||
TRUSTED_ORG_IDS = {
|
||||
54801242, # clickhouse
|
||||
@ -208,7 +208,7 @@ def main():
|
||||
PENDING,
|
||||
ci_report_url,
|
||||
description,
|
||||
StatusNames.CI,
|
||||
CI.StatusNames.CI,
|
||||
pr_info,
|
||||
)
|
||||
|
||||
|
@ -11,7 +11,7 @@ import boto3 # type: ignore
|
||||
import botocore # type: ignore
|
||||
from compress_files import compress_file_fast
|
||||
from env_helper import (
|
||||
CI,
|
||||
IS_CI,
|
||||
RUNNER_TEMP,
|
||||
S3_BUILDS_BUCKET,
|
||||
S3_DOWNLOAD,
|
||||
@ -111,13 +111,13 @@ class S3Helper:
|
||||
self.client.delete_object(Bucket=bucket_name, Key=s3_path)
|
||||
|
||||
def upload_test_report_to_s3(self, file_path: Path, s3_path: str) -> str:
|
||||
if CI:
|
||||
if IS_CI:
|
||||
return self._upload_file_to_s3(S3_TEST_REPORTS_BUCKET, file_path, s3_path)
|
||||
|
||||
return S3Helper.copy_file_to_local(S3_TEST_REPORTS_BUCKET, file_path, s3_path)
|
||||
|
||||
def upload_build_file_to_s3(self, file_path: Path, s3_path: str) -> str:
|
||||
if CI:
|
||||
if IS_CI:
|
||||
return self._upload_file_to_s3(S3_BUILDS_BUCKET, file_path, s3_path)
|
||||
|
||||
return S3Helper.copy_file_to_local(S3_BUILDS_BUCKET, file_path, s3_path)
|
||||
@ -255,7 +255,7 @@ class S3Helper:
|
||||
|
||||
if full_fs_path.is_symlink():
|
||||
if upload_symlinks:
|
||||
if CI:
|
||||
if IS_CI:
|
||||
return self._upload_file_to_s3(
|
||||
bucket_name,
|
||||
full_fs_path,
|
||||
@ -266,7 +266,7 @@ class S3Helper:
|
||||
)
|
||||
return []
|
||||
|
||||
if CI:
|
||||
if IS_CI:
|
||||
return self._upload_file_to_s3(
|
||||
bucket_name, full_fs_path, full_s3_path + "/" + file_path.name
|
||||
)
|
||||
@ -331,7 +331,7 @@ class S3Helper:
|
||||
return result
|
||||
|
||||
def url_if_exists(self, key: str, bucket: str = S3_BUILDS_BUCKET) -> str:
|
||||
if not CI:
|
||||
if not IS_CI:
|
||||
local_path = self.local_path(bucket, key)
|
||||
if local_path.exists():
|
||||
return local_path.as_uri()
|
||||
@ -345,7 +345,7 @@ class S3Helper:
|
||||
|
||||
@staticmethod
|
||||
def get_url(bucket: str, key: str) -> str:
|
||||
if CI:
|
||||
if IS_CI:
|
||||
return S3Helper.s3_url(bucket, key)
|
||||
return S3Helper.local_path(bucket, key).as_uri()
|
||||
|
||||
|
@ -6,12 +6,13 @@ import subprocess
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
from build_download_helper import get_build_name_for_check, read_build_urls
|
||||
from build_download_helper import read_build_urls
|
||||
from docker_images_helper import DockerImage, get_docker_image, pull_image
|
||||
from env_helper import REPORT_PATH, TEMP_PATH
|
||||
from report import FAILURE, SUCCESS, JobReport, TestResult, TestResults
|
||||
from stopwatch import Stopwatch
|
||||
from tee_popen import TeePopen
|
||||
from ci_config import CI
|
||||
|
||||
IMAGE_NAME = "clickhouse/sqlancer-test"
|
||||
|
||||
@ -43,7 +44,7 @@ def main():
|
||||
|
||||
docker_image = pull_image(get_docker_image(IMAGE_NAME))
|
||||
|
||||
build_name = get_build_name_for_check(check_name)
|
||||
build_name = CI.get_required_build_name(check_name)
|
||||
urls = read_build_urls(build_name, reports_path)
|
||||
if not urls:
|
||||
raise ValueError("No build URLs found")
|
||||
|
@ -6,12 +6,13 @@ import subprocess
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
from build_download_helper import get_build_name_for_check, read_build_urls
|
||||
from build_download_helper import read_build_urls
|
||||
from docker_images_helper import get_docker_image, pull_image
|
||||
from env_helper import REPORT_PATH, TEMP_PATH
|
||||
from pr_info import PRInfo
|
||||
from report import SUCCESS, JobReport, TestResult
|
||||
from stopwatch import Stopwatch
|
||||
from ci_config import CI
|
||||
|
||||
IMAGE_NAME = "clickhouse/sqltest"
|
||||
|
||||
@ -49,7 +50,7 @@ def main():
|
||||
|
||||
docker_image = pull_image(get_docker_image(IMAGE_NAME))
|
||||
|
||||
build_name = get_build_name_for_check(check_name)
|
||||
build_name = CI.get_required_build_name(check_name)
|
||||
print(build_name)
|
||||
urls = read_build_urls(build_name, reports_path)
|
||||
if not urls:
|
||||
|
@ -13,7 +13,7 @@ from typing import List, Tuple, Union
|
||||
import magic
|
||||
|
||||
from docker_images_helper import get_docker_image, pull_image
|
||||
from env_helper import CI, REPO_COPY, TEMP_PATH
|
||||
from env_helper import IS_CI, REPO_COPY, TEMP_PATH
|
||||
from git_helper import GIT_PREFIX, git_runner
|
||||
from pr_info import PRInfo
|
||||
from report import ERROR, FAILURE, SUCCESS, JobReport, TestResults, read_test_results
|
||||
@ -152,7 +152,7 @@ def main():
|
||||
run_cpp_check = True
|
||||
run_shell_check = True
|
||||
run_python_check = True
|
||||
if CI and pr_info.number > 0:
|
||||
if IS_CI and pr_info.number > 0:
|
||||
pr_info.fetch_changed_files()
|
||||
run_cpp_check = any(
|
||||
not (is_python(file) or is_shell(file)) for file in pr_info.changed_files
|
||||
|
@ -5,12 +5,12 @@
|
||||
import argparse
|
||||
import sys
|
||||
|
||||
from ci_config import StatusNames
|
||||
from commit_status_helper import get_commit, post_commit_status
|
||||
from get_robot_token import get_best_robot_token
|
||||
from github_helper import GitHub
|
||||
from pr_info import PRInfo
|
||||
from report import SUCCESS
|
||||
from ci_config import CI
|
||||
|
||||
|
||||
def parse_args() -> argparse.Namespace:
|
||||
@ -75,7 +75,7 @@ def set_sync_status(gh, pr_info, sync_pr):
|
||||
if sync_pr.mergeable_state == "clean":
|
||||
print(f"Sync PR [{sync_pr.number}] is clean")
|
||||
post_commit_status(
|
||||
get_commit(gh, pr_info.sha), SUCCESS, "", "", StatusNames.SYNC
|
||||
get_commit(gh, pr_info.sha), SUCCESS, "", "", CI.StatusNames.SYNC
|
||||
)
|
||||
else:
|
||||
print(
|
||||
|
@ -5,12 +5,12 @@ from pathlib import Path
|
||||
import shutil
|
||||
from typing import Dict, Set
|
||||
import unittest
|
||||
from ci_config import Build, JobNames
|
||||
from s3_helper import S3Helper
|
||||
from ci_cache import CiCache
|
||||
from digest_helper import JOB_DIGEST_LEN
|
||||
from commit_status_helper import CommitStatusData
|
||||
from env_helper import S3_BUILDS_BUCKET, TEMP_PATH
|
||||
from ci_config import CI
|
||||
|
||||
|
||||
def _create_mock_digest_1(string):
|
||||
@ -21,8 +21,8 @@ def _create_mock_digest_2(string):
|
||||
return md5((string + "+nonce").encode("utf-8")).hexdigest()[:JOB_DIGEST_LEN]
|
||||
|
||||
|
||||
DIGESTS = {job: _create_mock_digest_1(job) for job in JobNames}
|
||||
DIGESTS2 = {job: _create_mock_digest_2(job) for job in JobNames}
|
||||
DIGESTS = {job: _create_mock_digest_1(job) for job in CI.JobNames}
|
||||
DIGESTS2 = {job: _create_mock_digest_2(job) for job in CI.JobNames}
|
||||
|
||||
|
||||
# pylint:disable=protected-access
|
||||
@ -84,8 +84,10 @@ class TestCiCache(unittest.TestCase):
|
||||
NUM_BATCHES = 10
|
||||
|
||||
DOCS_JOBS_NUM = 1
|
||||
assert len(set(job for job in JobNames)) == len(list(job for job in JobNames))
|
||||
NONDOCS_JOBS_NUM = len(set(job for job in JobNames)) - DOCS_JOBS_NUM
|
||||
assert len(set(job for job in CI.JobNames)) == len(
|
||||
list(job for job in CI.JobNames)
|
||||
)
|
||||
NONDOCS_JOBS_NUM = len(set(job for job in CI.JobNames)) - DOCS_JOBS_NUM
|
||||
|
||||
PR_NUM = 123456
|
||||
status = CommitStatusData(
|
||||
@ -97,13 +99,13 @@ class TestCiCache(unittest.TestCase):
|
||||
)
|
||||
|
||||
### add some pending statuses for two batches, non-release branch
|
||||
for job in JobNames:
|
||||
for job in CI.JobNames:
|
||||
ci_cache.push_pending(job, [0, 1, 2], NUM_BATCHES, release_branch=False)
|
||||
ci_cache_2.push_pending(job, [0, 1, 2], NUM_BATCHES, release_branch=False)
|
||||
|
||||
### add success status for 0 batch, non-release branch
|
||||
batch = 0
|
||||
for job in JobNames:
|
||||
for job in CI.JobNames:
|
||||
ci_cache.push_successful(
|
||||
job, batch, NUM_BATCHES, status, release_branch=False
|
||||
)
|
||||
@ -113,21 +115,17 @@ class TestCiCache(unittest.TestCase):
|
||||
|
||||
### add failed status for 2 batch, non-release branch
|
||||
batch = 2
|
||||
for job in JobNames:
|
||||
for job in CI.JobNames:
|
||||
ci_cache.push_failed(job, batch, NUM_BATCHES, status, release_branch=False)
|
||||
ci_cache_2.push_failed(
|
||||
job, batch, NUM_BATCHES, status, release_branch=False
|
||||
)
|
||||
|
||||
### check all expected directories were created on s3 mock
|
||||
expected_build_path_1 = f"{CiCache.JobType.SRCS.value}-{_create_mock_digest_1(Build.PACKAGE_RELEASE)}"
|
||||
expected_docs_path_1 = (
|
||||
f"{CiCache.JobType.DOCS.value}-{_create_mock_digest_1(JobNames.DOCS_CHECK)}"
|
||||
)
|
||||
expected_build_path_2 = f"{CiCache.JobType.SRCS.value}-{_create_mock_digest_2(Build.PACKAGE_RELEASE)}"
|
||||
expected_docs_path_2 = (
|
||||
f"{CiCache.JobType.DOCS.value}-{_create_mock_digest_2(JobNames.DOCS_CHECK)}"
|
||||
)
|
||||
expected_build_path_1 = f"{CiCache.JobType.SRCS.value}-{_create_mock_digest_1(CI.BuildNames.PACKAGE_RELEASE)}"
|
||||
expected_docs_path_1 = f"{CiCache.JobType.DOCS.value}-{_create_mock_digest_1(CI.JobNames.DOCS_CHECK)}"
|
||||
expected_build_path_2 = f"{CiCache.JobType.SRCS.value}-{_create_mock_digest_2(CI.BuildNames.PACKAGE_RELEASE)}"
|
||||
expected_docs_path_2 = f"{CiCache.JobType.DOCS.value}-{_create_mock_digest_2(CI.JobNames.DOCS_CHECK)}"
|
||||
self.assertCountEqual(
|
||||
list(s3_mock.files_on_s3_paths.keys()),
|
||||
[
|
||||
@ -174,7 +172,7 @@ class TestCiCache(unittest.TestCase):
|
||||
)
|
||||
|
||||
### check statuses for all jobs in cache
|
||||
for job in JobNames:
|
||||
for job in CI.JobNames:
|
||||
self.assertEqual(
|
||||
ci_cache.is_successful(job, 0, NUM_BATCHES, release_branch=False), True
|
||||
)
|
||||
@ -212,7 +210,7 @@ class TestCiCache(unittest.TestCase):
|
||||
assert status2 is None
|
||||
|
||||
### add some more pending statuses for two batches and for a release branch
|
||||
for job in JobNames:
|
||||
for job in CI.JobNames:
|
||||
ci_cache.push_pending(
|
||||
job, batches=[0, 1], num_batches=NUM_BATCHES, release_branch=True
|
||||
)
|
||||
@ -226,7 +224,7 @@ class TestCiCache(unittest.TestCase):
|
||||
sha="deadbeaf2",
|
||||
pr_num=PR_NUM,
|
||||
)
|
||||
for job in JobNames:
|
||||
for job in CI.JobNames:
|
||||
ci_cache.push_successful(job, 0, NUM_BATCHES, status, release_branch=True)
|
||||
|
||||
### check number of cache files is as expected
|
||||
@ -249,7 +247,7 @@ class TestCiCache(unittest.TestCase):
|
||||
)
|
||||
|
||||
### check statuses
|
||||
for job in JobNames:
|
||||
for job in CI.JobNames:
|
||||
self.assertEqual(ci_cache.is_successful(job, 0, NUM_BATCHES, False), True)
|
||||
self.assertEqual(ci_cache.is_successful(job, 0, NUM_BATCHES, True), True)
|
||||
self.assertEqual(ci_cache.is_successful(job, 1, NUM_BATCHES, False), False)
|
||||
@ -273,7 +271,7 @@ class TestCiCache(unittest.TestCase):
|
||||
|
||||
### create new cache object and verify the same checks
|
||||
ci_cache = CiCache(s3_mock, DIGESTS)
|
||||
for job in JobNames:
|
||||
for job in CI.JobNames:
|
||||
self.assertEqual(ci_cache.is_successful(job, 0, NUM_BATCHES, False), True)
|
||||
self.assertEqual(ci_cache.is_successful(job, 0, NUM_BATCHES, True), True)
|
||||
self.assertEqual(ci_cache.is_successful(job, 1, NUM_BATCHES, False), False)
|
||||
|
@ -1,30 +1,460 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import unittest
|
||||
from ci_config import CIStages, JobNames, CI_CONFIG, Runners
|
||||
from ci_config import CI
|
||||
import ci as CIPY
|
||||
from ci_settings import CiSettings
|
||||
from pr_info import PRInfo, EventType
|
||||
from s3_helper import S3Helper
|
||||
from ci_cache import CiCache
|
||||
from ci_utils import normalize_string
|
||||
|
||||
|
||||
_TEST_EVENT_JSON = {"dummy": "dummy"}
|
||||
|
||||
# pylint:disable=protected-access,union-attr
|
||||
|
||||
|
||||
class TestCIConfig(unittest.TestCase):
|
||||
def test_runner_config(self):
|
||||
"""check runner is provided w/o exception"""
|
||||
for job in JobNames:
|
||||
runner = CI_CONFIG.get_runner_type(job)
|
||||
self.assertIn(runner, Runners)
|
||||
for job in CI.JobNames:
|
||||
self.assertIn(CI.JOB_CONFIGS[job].runner_type, CI.Runners)
|
||||
if (
|
||||
job
|
||||
in (
|
||||
CI.JobNames.STYLE_CHECK,
|
||||
CI.JobNames.BUILD_CHECK,
|
||||
)
|
||||
or "jepsen" in job.lower()
|
||||
):
|
||||
self.assertTrue(
|
||||
"style" in CI.JOB_CONFIGS[job].runner_type,
|
||||
f"Job [{job}] must have style-checker(-aarch64) runner",
|
||||
)
|
||||
elif "binary_" in job.lower() or "package_" in job.lower():
|
||||
self.assertTrue(
|
||||
CI.JOB_CONFIGS[job].runner_type == CI.Runners.BUILDER,
|
||||
f"Job [{job}] must have [{CI.Runners.BUILDER}] runner",
|
||||
)
|
||||
elif "aarch64" in job.lower():
|
||||
self.assertTrue(
|
||||
"aarch" in CI.JOB_CONFIGS[job].runner_type,
|
||||
f"Job [{job}] does not match runner [{CI.JOB_CONFIGS[job].runner_type}]",
|
||||
)
|
||||
else:
|
||||
self.assertTrue(
|
||||
"aarch" not in CI.JOB_CONFIGS[job].runner_type,
|
||||
f"Job [{job}] does not match runner [{CI.JOB_CONFIGS[job].runner_type}]",
|
||||
)
|
||||
|
||||
def test_common_configs_applied_properly(self):
|
||||
for job in CI.JobNames:
|
||||
if CI.JOB_CONFIGS[job].job_name_keyword:
|
||||
self.assertTrue(
|
||||
CI.JOB_CONFIGS[job].job_name_keyword.lower()
|
||||
in normalize_string(job),
|
||||
f"Job [{job}] apparently uses wrong common config with job keyword [{CI.JOB_CONFIGS[job].job_name_keyword}]",
|
||||
)
|
||||
|
||||
def test_required_checks(self):
|
||||
for job in CI.REQUIRED_CHECKS:
|
||||
if job in (CI.StatusNames.PR_CHECK, CI.StatusNames.SYNC):
|
||||
continue
|
||||
self.assertTrue(job in CI.JOB_CONFIGS, f"Job [{job}] not in job config")
|
||||
|
||||
def test_builds_configs(self):
|
||||
"""build name in the build config must match the job name"""
|
||||
for job in CI.JobNames:
|
||||
self.assertTrue(job in CI.JOB_CONFIGS)
|
||||
self.assertTrue(CI.JOB_CONFIGS[job].runner_type in CI.Runners)
|
||||
if job in CI.BuildNames:
|
||||
self.assertTrue(CI.JOB_CONFIGS[job].build_config.name == job)
|
||||
self.assertTrue(CI.JOB_CONFIGS[job].required_builds is None)
|
||||
else:
|
||||
self.assertTrue(CI.JOB_CONFIGS[job].build_config is None)
|
||||
if "asan" in job:
|
||||
self.assertTrue(
|
||||
CI.JOB_CONFIGS[job].required_builds[0]
|
||||
== CI.BuildNames.PACKAGE_ASAN,
|
||||
f"Job [{job}] probably has wrong required build [{CI.JOB_CONFIGS[job].required_builds[0]}] in JobConfig",
|
||||
)
|
||||
elif "msan" in job:
|
||||
self.assertTrue(
|
||||
CI.JOB_CONFIGS[job].required_builds[0]
|
||||
== CI.BuildNames.PACKAGE_MSAN,
|
||||
f"Job [{job}] probably has wrong required build [{CI.JOB_CONFIGS[job].required_builds[0]}] in JobConfig",
|
||||
)
|
||||
elif "tsan" in job:
|
||||
self.assertTrue(
|
||||
CI.JOB_CONFIGS[job].required_builds[0]
|
||||
== CI.BuildNames.PACKAGE_TSAN,
|
||||
f"Job [{job}] probably has wrong required build [{CI.JOB_CONFIGS[job].required_builds[0]}] in JobConfig",
|
||||
)
|
||||
elif "ubsan" in job:
|
||||
self.assertTrue(
|
||||
CI.JOB_CONFIGS[job].required_builds[0]
|
||||
== CI.BuildNames.PACKAGE_UBSAN,
|
||||
f"Job [{job}] probably has wrong required build [{CI.JOB_CONFIGS[job].required_builds[0]}] in JobConfig",
|
||||
)
|
||||
elif "debug" in job:
|
||||
self.assertTrue(
|
||||
CI.JOB_CONFIGS[job].required_builds[0]
|
||||
== CI.BuildNames.PACKAGE_DEBUG,
|
||||
f"Job [{job}] probably has wrong required build [{CI.JOB_CONFIGS[job].required_builds[0]}] in JobConfig",
|
||||
)
|
||||
elif "release" in job:
|
||||
self.assertTrue(
|
||||
CI.JOB_CONFIGS[job].required_builds[0]
|
||||
in (
|
||||
CI.BuildNames.PACKAGE_RELEASE,
|
||||
CI.BuildNames.BINARY_RELEASE,
|
||||
),
|
||||
f"Job [{job}] probably has wrong required build [{CI.JOB_CONFIGS[job].required_builds[0]}] in JobConfig",
|
||||
)
|
||||
elif "coverage" in job:
|
||||
self.assertTrue(
|
||||
CI.JOB_CONFIGS[job].required_builds[0]
|
||||
== CI.BuildNames.PACKAGE_RELEASE_COVERAGE,
|
||||
f"Job [{job}] probably has wrong required build [{CI.JOB_CONFIGS[job].required_builds[0]}] in JobConfig",
|
||||
)
|
||||
elif "aarch" in job:
|
||||
self.assertTrue(
|
||||
CI.JOB_CONFIGS[job].required_builds[0]
|
||||
== CI.BuildNames.PACKAGE_AARCH64,
|
||||
f"Job [{job}] probably has wrong required build [{CI.JOB_CONFIGS[job].required_builds[0]}] in JobConfig",
|
||||
)
|
||||
elif "amd64" in job:
|
||||
self.assertTrue(
|
||||
CI.JOB_CONFIGS[job].required_builds[0]
|
||||
== CI.BuildNames.PACKAGE_RELEASE,
|
||||
f"Job [{job}] probably has wrong required build [{CI.JOB_CONFIGS[job].required_builds[0]}] in JobConfig",
|
||||
)
|
||||
elif "uzzer" in job:
|
||||
self.assertTrue(
|
||||
CI.JOB_CONFIGS[job].required_builds[0] == CI.BuildNames.FUZZERS,
|
||||
f"Job [{job}] probably has wrong required build [{CI.JOB_CONFIGS[job].required_builds[0]}] in JobConfig",
|
||||
)
|
||||
elif "Docker" in job:
|
||||
self.assertTrue(
|
||||
CI.JOB_CONFIGS[job].required_builds[0]
|
||||
in (
|
||||
CI.BuildNames.PACKAGE_RELEASE,
|
||||
CI.BuildNames.PACKAGE_AARCH64,
|
||||
),
|
||||
f"Job [{job}] probably has wrong required build [{CI.JOB_CONFIGS[job].required_builds[0]}] in JobConfig",
|
||||
)
|
||||
elif "SQLTest" in job:
|
||||
self.assertTrue(
|
||||
CI.JOB_CONFIGS[job].required_builds[0]
|
||||
== CI.BuildNames.PACKAGE_RELEASE,
|
||||
f"Job [{job}] probably has wrong required build [{CI.JOB_CONFIGS[job].required_builds[0]}] in JobConfig",
|
||||
)
|
||||
elif "Jepsen" in job:
|
||||
self.assertTrue(
|
||||
CI.JOB_CONFIGS[job].required_builds[0]
|
||||
in (
|
||||
CI.BuildNames.PACKAGE_RELEASE,
|
||||
CI.BuildNames.BINARY_RELEASE,
|
||||
),
|
||||
f"Job [{job}] probably has wrong required build [{CI.JOB_CONFIGS[job].required_builds[0]}] in JobConfig",
|
||||
)
|
||||
elif job in (
|
||||
CI.JobNames.STYLE_CHECK,
|
||||
CI.JobNames.FAST_TEST,
|
||||
CI.JobNames.BUILD_CHECK,
|
||||
CI.JobNames.DOCS_CHECK,
|
||||
CI.JobNames.BUGFIX_VALIDATE,
|
||||
):
|
||||
self.assertTrue(CI.JOB_CONFIGS[job].required_builds is None)
|
||||
else:
|
||||
print(f"Job [{job}] required build not checked")
|
||||
|
||||
def test_job_stage_config(self):
|
||||
"""check runner is provided w/o exception"""
|
||||
for job in JobNames:
|
||||
stage = CI_CONFIG.get_job_ci_stage(job)
|
||||
if job in [
|
||||
JobNames.STYLE_CHECK,
|
||||
JobNames.FAST_TEST,
|
||||
JobNames.JEPSEN_KEEPER,
|
||||
JobNames.BUILD_CHECK,
|
||||
JobNames.BUILD_CHECK_SPECIAL,
|
||||
]:
|
||||
assert (
|
||||
stage == CIStages.NA
|
||||
), "These jobs are not in CI stages, must be NA"
|
||||
"""
|
||||
check runner is provided w/o exception
|
||||
"""
|
||||
# check stages
|
||||
for job in CI.JobNames:
|
||||
if job in CI.BuildNames:
|
||||
self.assertTrue(
|
||||
CI.get_job_ci_stage(job)
|
||||
in (CI.WorkflowStages.BUILDS_1, CI.WorkflowStages.BUILDS_2)
|
||||
)
|
||||
else:
|
||||
assert stage != CIStages.NA, f"stage not found for [{job}]"
|
||||
self.assertIn(stage, CIStages)
|
||||
if job in (
|
||||
CI.JobNames.STYLE_CHECK,
|
||||
CI.JobNames.FAST_TEST,
|
||||
CI.JobNames.JEPSEN_SERVER,
|
||||
CI.JobNames.JEPSEN_KEEPER,
|
||||
CI.JobNames.BUILD_CHECK,
|
||||
):
|
||||
self.assertEqual(
|
||||
CI.get_job_ci_stage(job),
|
||||
CI.WorkflowStages.NA,
|
||||
msg=f"Stage for [{job}] is not correct",
|
||||
)
|
||||
else:
|
||||
self.assertTrue(
|
||||
CI.get_job_ci_stage(job)
|
||||
in (CI.WorkflowStages.TESTS_1, CI.WorkflowStages.TESTS_3),
|
||||
msg=f"Stage for [{job}] is not correct",
|
||||
)
|
||||
|
||||
def test_build_jobs_configs(self):
|
||||
"""
|
||||
check build jobs have non-None build_config attribute
|
||||
check test jobs have None build_config attribute
|
||||
"""
|
||||
for job in CI.JobNames:
|
||||
if job in CI.BuildNames:
|
||||
self.assertTrue(
|
||||
isinstance(CI.JOB_CONFIGS[job].build_config, CI.BuildConfig)
|
||||
)
|
||||
else:
|
||||
self.assertTrue(CI.JOB_CONFIGS[job].build_config is None)
|
||||
|
||||
def test_ci_py_for_pull_request(self):
|
||||
"""
|
||||
checks ci.py job configuration
|
||||
"""
|
||||
settings = CiSettings()
|
||||
settings.no_ci_cache = True
|
||||
settings.ci_sets = [CI.Tags.CI_SET_BUILDS]
|
||||
settings.include_keywords = [
|
||||
"package",
|
||||
"integration",
|
||||
"upgrade",
|
||||
"clickHouse_build_check",
|
||||
"stateless",
|
||||
]
|
||||
settings.exclude_keywords = ["asan", "aarch64"]
|
||||
pr_info = PRInfo(github_event=_TEST_EVENT_JSON)
|
||||
# make it pull request info
|
||||
pr_info.event_type = EventType.PULL_REQUEST
|
||||
pr_info.number = 12345
|
||||
assert pr_info.is_pr and not pr_info.is_release and not pr_info.is_master
|
||||
assert not pr_info.is_merge_queue
|
||||
ci_cache = CIPY._configure_jobs(
|
||||
S3Helper(), pr_info, settings, skip_jobs=False, dry_run=True
|
||||
)
|
||||
actual_jobs_to_do = list(ci_cache.jobs_to_do)
|
||||
expected_jobs_to_do = []
|
||||
for set_ in settings.ci_sets:
|
||||
tag_config = CI.get_tag_config(set_)
|
||||
assert tag_config
|
||||
set_jobs = tag_config.run_jobs
|
||||
for job in set_jobs:
|
||||
if any(k in normalize_string(job) for k in settings.exclude_keywords):
|
||||
continue
|
||||
expected_jobs_to_do.append(job)
|
||||
for job, config in CI.JOB_CONFIGS.items():
|
||||
if not any(
|
||||
keyword in normalize_string(job)
|
||||
for keyword in settings.include_keywords
|
||||
):
|
||||
continue
|
||||
if any(
|
||||
keyword in normalize_string(job)
|
||||
for keyword in settings.exclude_keywords
|
||||
):
|
||||
continue
|
||||
if config.random_bucket:
|
||||
continue
|
||||
if job not in expected_jobs_to_do:
|
||||
expected_jobs_to_do.append(job)
|
||||
|
||||
random_buckets = []
|
||||
for job, config in ci_cache.jobs_to_do.items():
|
||||
if config.random_bucket:
|
||||
self.assertTrue(
|
||||
config.random_bucket not in random_buckets,
|
||||
"Only one job must be picked up from each random bucket",
|
||||
)
|
||||
random_buckets.append(config.random_bucket)
|
||||
actual_jobs_to_do.remove(job)
|
||||
|
||||
self.assertCountEqual(expected_jobs_to_do, actual_jobs_to_do)
|
||||
|
||||
def test_ci_py_for_pull_request_no_settings(self):
|
||||
"""
|
||||
checks ci.py job configuration in PR with empty ci_settings
|
||||
"""
|
||||
settings = CiSettings()
|
||||
settings.no_ci_cache = True
|
||||
pr_info = PRInfo(github_event=_TEST_EVENT_JSON)
|
||||
# make it pull request info
|
||||
pr_info.event_type = EventType.PULL_REQUEST
|
||||
pr_info.number = 12345
|
||||
assert pr_info.is_pr and not pr_info.is_release and not pr_info.is_master
|
||||
assert not pr_info.is_merge_queue
|
||||
ci_cache = CIPY._configure_jobs(
|
||||
S3Helper(), pr_info, settings, skip_jobs=False, dry_run=True
|
||||
)
|
||||
actual_jobs_to_do = list(ci_cache.jobs_to_do)
|
||||
expected_jobs_to_do = []
|
||||
for job, config in CI.JOB_CONFIGS.items():
|
||||
if config.random_bucket:
|
||||
continue
|
||||
if config.release_only:
|
||||
continue
|
||||
if config.run_by_label:
|
||||
continue
|
||||
expected_jobs_to_do.append(job)
|
||||
|
||||
random_buckets = []
|
||||
for job, config in ci_cache.jobs_to_do.items():
|
||||
if config.random_bucket:
|
||||
self.assertTrue(
|
||||
config.random_bucket not in random_buckets,
|
||||
"Only one job must be picked up from each random bucket",
|
||||
)
|
||||
random_buckets.append(config.random_bucket)
|
||||
actual_jobs_to_do.remove(job)
|
||||
|
||||
self.assertCountEqual(expected_jobs_to_do, actual_jobs_to_do)
|
||||
|
||||
def test_ci_py_for_master(self):
|
||||
"""
|
||||
checks ci.py job configuration
|
||||
"""
|
||||
settings = CiSettings()
|
||||
settings.no_ci_cache = True
|
||||
pr_info = PRInfo(github_event=_TEST_EVENT_JSON)
|
||||
pr_info.event_type = EventType.PUSH
|
||||
assert pr_info.number == 0 and pr_info.is_release and not pr_info.is_merge_queue
|
||||
ci_cache = CIPY._configure_jobs(
|
||||
S3Helper(), pr_info, settings, skip_jobs=False, dry_run=True
|
||||
)
|
||||
actual_jobs_to_do = list(ci_cache.jobs_to_do)
|
||||
expected_jobs_to_do = []
|
||||
for job, config in CI.JOB_CONFIGS.items():
|
||||
if config.pr_only:
|
||||
continue
|
||||
if config.run_by_label:
|
||||
continue
|
||||
if job in CI.MQ_JOBS:
|
||||
continue
|
||||
expected_jobs_to_do.append(job)
|
||||
self.assertCountEqual(expected_jobs_to_do, actual_jobs_to_do)
|
||||
|
||||
def test_ci_py_for_merge_queue(self):
|
||||
"""
|
||||
checks ci.py job configuration
|
||||
"""
|
||||
settings = CiSettings()
|
||||
settings.no_ci_cache = True
|
||||
pr_info = PRInfo(github_event=_TEST_EVENT_JSON)
|
||||
# make it merge_queue
|
||||
pr_info.event_type = EventType.MERGE_QUEUE
|
||||
assert (
|
||||
pr_info.number == 0
|
||||
and pr_info.is_merge_queue
|
||||
and not pr_info.is_release
|
||||
and not pr_info.is_master
|
||||
and not pr_info.is_pr
|
||||
)
|
||||
ci_cache = CIPY._configure_jobs(
|
||||
S3Helper(), pr_info, settings, skip_jobs=False, dry_run=True
|
||||
)
|
||||
actual_jobs_to_do = list(ci_cache.jobs_to_do)
|
||||
expected_jobs_to_do = [
|
||||
"Style check",
|
||||
"Fast test",
|
||||
"binary_release",
|
||||
"Unit tests (release)",
|
||||
]
|
||||
self.assertCountEqual(expected_jobs_to_do, actual_jobs_to_do)
|
||||
|
||||
def test_ci_py_await(self):
|
||||
"""
|
||||
checks ci.py job configuration
|
||||
"""
|
||||
settings = CiSettings()
|
||||
settings.no_ci_cache = True
|
||||
pr_info = PRInfo(github_event=_TEST_EVENT_JSON)
|
||||
pr_info.event_type = EventType.PUSH
|
||||
pr_info.number = 0
|
||||
assert pr_info.is_release and not pr_info.is_merge_queue
|
||||
ci_cache = CIPY._configure_jobs(
|
||||
S3Helper(), pr_info, settings, skip_jobs=False, dry_run=True
|
||||
)
|
||||
self.assertTrue(not ci_cache.jobs_to_skip, "Must be no jobs in skip list")
|
||||
all_jobs_in_wf = list(ci_cache.jobs_to_do)
|
||||
assert not ci_cache.jobs_to_wait
|
||||
ci_cache.await_pending_jobs(is_release=pr_info.is_release, dry_run=True)
|
||||
assert not ci_cache.jobs_to_skip
|
||||
assert not ci_cache.jobs_to_wait
|
||||
|
||||
# pretend there are pending jobs that we neet to wait
|
||||
ci_cache.jobs_to_wait = dict(ci_cache.jobs_to_do)
|
||||
for job, config in ci_cache.jobs_to_wait.items():
|
||||
assert not config.pending_batches
|
||||
assert config.batches
|
||||
config.pending_batches = list(config.batches)
|
||||
for job, config in ci_cache.jobs_to_wait.items():
|
||||
for batch in range(config.num_batches):
|
||||
record = CiCache.Record(
|
||||
record_type=CiCache.RecordType.PENDING,
|
||||
job_name=job,
|
||||
job_digest=ci_cache.job_digests[job],
|
||||
batch=batch,
|
||||
num_batches=config.num_batches,
|
||||
release_branch=True,
|
||||
)
|
||||
for record_t_, records_ in ci_cache.records.items():
|
||||
if record_t_.value == CiCache.RecordType.PENDING.value:
|
||||
records_[record.to_str_key()] = record
|
||||
|
||||
def _test_await_for_batch(
|
||||
ci_cache: CiCache, record_type: CiCache.RecordType, batch: int
|
||||
) -> None:
|
||||
assert ci_cache.jobs_to_wait
|
||||
for job_, config_ in ci_cache.jobs_to_wait.items():
|
||||
record = CiCache.Record(
|
||||
record_type=record_type,
|
||||
job_name=job_,
|
||||
job_digest=ci_cache.job_digests[job_],
|
||||
batch=batch,
|
||||
num_batches=config_.num_batches,
|
||||
release_branch=True,
|
||||
)
|
||||
for record_t_, records_ in ci_cache.records.items():
|
||||
if record_t_.value == record_type.value:
|
||||
records_[record.to_str_key()] = record
|
||||
# await
|
||||
ci_cache.await_pending_jobs(is_release=pr_info.is_release, dry_run=True)
|
||||
for _, config_ in ci_cache.jobs_to_wait.items():
|
||||
assert config_.pending_batches
|
||||
if (
|
||||
record_type != CiCache.RecordType.PENDING
|
||||
and batch < config_.num_batches
|
||||
):
|
||||
assert batch not in config_.pending_batches
|
||||
else:
|
||||
assert batch in config_.pending_batches
|
||||
|
||||
for _, config_ in ci_cache.jobs_to_do.items():
|
||||
# jobs to do must have batches to run before/after await
|
||||
# if it's an empty list after await - apparently job has not been removed after await
|
||||
assert config_.batches
|
||||
|
||||
_test_await_for_batch(ci_cache, CiCache.RecordType.SUCCESSFUL, 0)
|
||||
# check all one-batch jobs are in jobs_to_skip
|
||||
for job in all_jobs_in_wf:
|
||||
config = CI.JOB_CONFIGS[job]
|
||||
if config.num_batches == 1:
|
||||
self.assertTrue(job in ci_cache.jobs_to_skip)
|
||||
self.assertTrue(job not in ci_cache.jobs_to_do)
|
||||
else:
|
||||
self.assertTrue(job not in ci_cache.jobs_to_skip)
|
||||
self.assertTrue(job in ci_cache.jobs_to_do)
|
||||
|
||||
_test_await_for_batch(ci_cache, CiCache.RecordType.FAILED, 1)
|
||||
_test_await_for_batch(ci_cache, CiCache.RecordType.SUCCESSFUL, 2)
|
||||
|
||||
self.assertTrue(len(ci_cache.jobs_to_skip) > 0)
|
||||
self.assertTrue(len(ci_cache.jobs_to_do) > 0)
|
||||
self.assertCountEqual(
|
||||
list(ci_cache.jobs_to_do) + ci_cache.jobs_to_skip, all_jobs_in_wf
|
||||
)
|
||||
|
@ -4,7 +4,7 @@
|
||||
|
||||
import unittest
|
||||
from ci_settings import CiSettings
|
||||
from ci_config import JobConfig
|
||||
from ci_config import CI
|
||||
|
||||
_TEST_BODY_1 = """
|
||||
#### Run only:
|
||||
@ -64,8 +64,8 @@ _TEST_JOB_LIST = [
|
||||
"fuzzers",
|
||||
"Docker server image",
|
||||
"Docker keeper image",
|
||||
"Install packages (amd64)",
|
||||
"Install packages (arm64)",
|
||||
"Install packages (release)",
|
||||
"Install packages (aarch64)",
|
||||
"Stateless tests (debug)",
|
||||
"Stateless tests (release)",
|
||||
"Stateless tests (coverage)",
|
||||
@ -120,15 +120,15 @@ _TEST_JOB_LIST = [
|
||||
"AST fuzzer (ubsan)",
|
||||
"ClickHouse Keeper Jepsen",
|
||||
"ClickHouse Server Jepsen",
|
||||
"Performance Comparison",
|
||||
"Performance Comparison Aarch64",
|
||||
"Performance Comparison (release)",
|
||||
"Performance Comparison (aarch64)",
|
||||
"Sqllogic test (release)",
|
||||
"SQLancer (release)",
|
||||
"SQLancer (debug)",
|
||||
"SQLTest",
|
||||
"Compatibility check (amd64)",
|
||||
"Compatibility check (release)",
|
||||
"Compatibility check (aarch64)",
|
||||
"ClickBench (amd64)",
|
||||
"ClickBench (release)",
|
||||
"ClickBench (aarch64)",
|
||||
"libFuzzer tests",
|
||||
"ClickHouse build check",
|
||||
@ -166,7 +166,10 @@ class TestCIOptions(unittest.TestCase):
|
||||
["tsan", "foobar", "aarch64", "analyzer", "s3_storage", "coverage"],
|
||||
)
|
||||
|
||||
jobs_configs = {job: JobConfig() for job in _TEST_JOB_LIST}
|
||||
jobs_configs = {
|
||||
job: CI.JobConfig(runner_type=CI.Runners.STYLE_CHECKER)
|
||||
for job in _TEST_JOB_LIST
|
||||
}
|
||||
jobs_configs[
|
||||
"fuzzers"
|
||||
].run_by_label = (
|
||||
@ -210,7 +213,10 @@ class TestCIOptions(unittest.TestCase):
|
||||
)
|
||||
|
||||
def test_options_applied_2(self):
|
||||
jobs_configs = {job: JobConfig() for job in _TEST_JOB_LIST_2}
|
||||
jobs_configs = {
|
||||
job: CI.JobConfig(runner_type=CI.Runners.STYLE_CHECKER)
|
||||
for job in _TEST_JOB_LIST_2
|
||||
}
|
||||
jobs_configs["Style check"].release_only = True
|
||||
jobs_configs["Fast test"].pr_only = True
|
||||
jobs_configs["fuzzers"].run_by_label = "TEST_LABEL"
|
||||
@ -252,7 +258,10 @@ class TestCIOptions(unittest.TestCase):
|
||||
def test_options_applied_3(self):
|
||||
ci_settings = CiSettings()
|
||||
ci_settings.include_keywords = ["Style"]
|
||||
jobs_configs = {job: JobConfig() for job in _TEST_JOB_LIST_2}
|
||||
jobs_configs = {
|
||||
job: CI.JobConfig(runner_type=CI.Runners.STYLE_CHECKER)
|
||||
for job in _TEST_JOB_LIST_2
|
||||
}
|
||||
jobs_configs["Style check"].release_only = True
|
||||
jobs_configs["Fast test"].pr_only = True
|
||||
# no settings are set
|
||||
@ -296,7 +305,10 @@ class TestCIOptions(unittest.TestCase):
|
||||
)
|
||||
self.assertCountEqual(ci_options.include_keywords, ["analyzer"])
|
||||
self.assertIsNone(ci_options.exclude_keywords)
|
||||
jobs_configs = {job: JobConfig() for job in _TEST_JOB_LIST}
|
||||
jobs_configs = {
|
||||
job: CI.JobConfig(runner_type=CI.Runners.STYLE_CHECKER)
|
||||
for job in _TEST_JOB_LIST
|
||||
}
|
||||
jobs_configs[
|
||||
"fuzzers"
|
||||
].run_by_label = "TEST_LABEL" # check "fuzzers" does not appears in the result
|
||||
|
@ -11,13 +11,15 @@ GIT_ROOT=${GIT_ROOT:-.}
|
||||
CONFIG="$GIT_ROOT/tests/ci/.mypy.ini"
|
||||
DIRS=("$GIT_ROOT/tests/ci/" "$GIT_ROOT/tests/ci/"*/)
|
||||
tmp=$(mktemp)
|
||||
|
||||
for dir in "${DIRS[@]}"; do
|
||||
if ! compgen -G "$dir"/*.py > /dev/null; then
|
||||
continue
|
||||
fi
|
||||
if ! mypy --config-file="$CONFIG" --sqlite-cache "$dir"/*.py > "$tmp" 2>&1; then
|
||||
if ! mypy --config-file="$CONFIG" --sqlite-cache $(find "$dir" -maxdepth 1 -name "*.py" | grep -v "test_") > "$tmp" 2>&1; then
|
||||
echo "Errors while processing $dir":
|
||||
cat "$tmp"
|
||||
fi
|
||||
done
|
||||
|
||||
rm -rf "$tmp"
|
||||
|
@ -10,6 +10,7 @@ function xargs-pylint {
|
||||
xargs -P "$(nproc)" -n "$1" pylint --rcfile="$ROOT_PATH/pyproject.toml" --persistent=no --score=n
|
||||
}
|
||||
|
||||
find "$ROOT_PATH/tests" -maxdepth 2 -type f -exec file -F' ' --mime-type {} + | xargs-pylint 50
|
||||
# exclude ci unittest scripts from check: test_*
|
||||
find "$ROOT_PATH/tests" -maxdepth 2 -type f -exec file -F' ' --mime-type {} + | grep -v "/test_" | xargs-pylint 50
|
||||
# Beware, there lambdas are checked. All of them contain `app`, and it causes brain-cucumber-zalgo
|
||||
find "$ROOT_PATH/tests/ci" -mindepth 2 -type f -exec file -F' ' --mime-type {} + | xargs-pylint 1
|
||||
|
Loading…
Reference in New Issue
Block a user