mirror of
https://github.com/ClickHouse/ClickHouse.git
synced 2024-11-22 07:31:57 +00:00
fixing typos and var names
This commit is contained in:
parent
d7f95ddfcf
commit
22573361de
@ -71,12 +71,12 @@ class PendingState:
|
||||
class CiCache:
|
||||
"""
|
||||
CI cache is a bunch of records. Record is a file stored under special location on s3.
|
||||
The file name has following format
|
||||
The file name has the following format
|
||||
|
||||
<RECORD_TYPE>_[<ATTRIBUTES>]--<JOB_NAME>_<JOB_DIGEST>_<BATCH>_<NUM_BATCHES>.ci
|
||||
|
||||
RECORD_TYPE:
|
||||
SUCCESSFUL - for successfuly finished jobs
|
||||
SUCCESSFUL - for successfully finished jobs
|
||||
PENDING - for pending jobs
|
||||
|
||||
ATTRIBUTES:
|
||||
@ -508,7 +508,7 @@ class CiCache:
|
||||
self, job: str, batch: int, num_batches: int, release_branch: bool
|
||||
) -> bool:
|
||||
"""
|
||||
checks if a given job have already been done successfuly
|
||||
checks if a given job have already been done successfully
|
||||
"""
|
||||
return self.exist(
|
||||
self.RecordType.SUCCESSFUL, job, batch, num_batches, release_branch
|
||||
@ -749,7 +749,7 @@ class CiOptions:
|
||||
# list of specified jobs to run
|
||||
ci_jobs: Optional[List[str]] = None
|
||||
|
||||
# btaches to run for all multi-batch jobs
|
||||
# batches to run for all multi-batch jobs
|
||||
job_batches: Optional[List[int]] = None
|
||||
|
||||
do_not_test: bool = False
|
||||
@ -903,7 +903,7 @@ class CiOptions:
|
||||
if self.ci_sets:
|
||||
for tag in self.ci_sets:
|
||||
label_config = CI_CONFIG.get_label_config(tag)
|
||||
assert label_config, f"Unknonwn tag [{tag}]"
|
||||
assert label_config, f"Unknown tag [{tag}]"
|
||||
print(
|
||||
f"NOTE: CI Set's tag: [{tag}], add jobs: [{label_config.run_jobs}]"
|
||||
)
|
||||
@ -953,7 +953,7 @@ class CiOptions:
|
||||
jobs_params[job] = {
|
||||
"batches": list(range(num_batches)),
|
||||
"num_batches": num_batches,
|
||||
"run_if_ci_option_include_set": job_config.run_by_ci_option
|
||||
"run_by_ci_option": job_config.run_by_ci_option
|
||||
and pr_info.is_pr,
|
||||
}
|
||||
|
||||
@ -969,7 +969,7 @@ class CiOptions:
|
||||
for job in jobs_to_do[:]:
|
||||
job_param = jobs_params[job]
|
||||
if (
|
||||
job_param["run_if_ci_option_include_set"]
|
||||
job_param["run_by_ci_option"]
|
||||
and job not in jobs_to_do_requested
|
||||
):
|
||||
print(
|
||||
@ -1010,7 +1010,7 @@ def parse_args(parser: argparse.ArgumentParser) -> argparse.Namespace:
|
||||
parser.add_argument(
|
||||
"--pre",
|
||||
action="store_true",
|
||||
help="Action that executes prerequesetes for the job provided in --job-name",
|
||||
help="Action that executes prerequisites for the job provided in --job-name",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--run",
|
||||
@ -1080,7 +1080,7 @@ def parse_args(parser: argparse.ArgumentParser) -> argparse.Namespace:
|
||||
"--skip-jobs",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="skip fetching data about job runs, used in --configure action (for debugging and nigthly ci)",
|
||||
help="skip fetching data about job runs, used in --configure action (for debugging and nightly ci)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--force",
|
||||
@ -1298,7 +1298,7 @@ def _configure_docker_jobs(docker_digest_or_latest: bool) -> Dict:
|
||||
missing_amd64 = []
|
||||
missing_aarch64 = []
|
||||
if not docker_digest_or_latest:
|
||||
# look for missing arm and amd images only among missing multiarch manifests @missing_multi_dict
|
||||
# look for missing arm and amd images only among missing multi-arch manifests @missing_multi_dict
|
||||
# to avoid extra dockerhub api calls
|
||||
missing_amd64 = list(
|
||||
check_missing_images_on_dockerhub(missing_multi_dict, "amd64")
|
||||
@ -1396,7 +1396,7 @@ def _configure_jobs(
|
||||
):
|
||||
continue
|
||||
|
||||
# fill job randomization buckets (for jobs with configured @random_bucket property))
|
||||
# fill job randomization buckets (for jobs with configured @random_bucket property)
|
||||
if job_config.random_bucket:
|
||||
if not job_config.random_bucket in randomization_buckets:
|
||||
randomization_buckets[job_config.random_bucket] = set()
|
||||
@ -1445,7 +1445,7 @@ def _configure_jobs(
|
||||
jobs_params[job] = {
|
||||
"batches": batches_to_do,
|
||||
"num_batches": num_batches,
|
||||
"run_if_ci_option_include_set": job_config.run_by_ci_option
|
||||
"run_by_ci_option": job_config.run_by_ci_option
|
||||
and pr_info.is_pr,
|
||||
}
|
||||
elif add_to_skip:
|
||||
@ -1490,8 +1490,8 @@ def _configure_jobs(
|
||||
def _generate_ci_stage_config(jobs_data: Dict[str, Any]) -> Dict[str, Dict[str, Any]]:
|
||||
"""
|
||||
populates GH Actions' workflow with real jobs
|
||||
"Builds_1": [{"job_name": NAME, "runner_type": RUNER_TYPE}]
|
||||
"Tests_1": [{"job_name": NAME, "runner_type": RUNER_TYPE}]
|
||||
"Builds_1": [{"job_name": NAME, "runner_type": RUNNER_TYPE}]
|
||||
"Tests_1": [{"job_name": NAME, "runner_type": RUNNER_TYPE}]
|
||||
...
|
||||
"""
|
||||
result = {} # type: Dict[str, Any]
|
||||
@ -1582,7 +1582,7 @@ def _fetch_commit_tokens(message: str, pr_info: PRInfo) -> List[str]:
|
||||
for match in matches
|
||||
if match in CILabels or match.startswith("job_") or match.startswith("batch_")
|
||||
]
|
||||
print(f"CI modifyers from commit message: [{res}]")
|
||||
print(f"CI modifiers from commit message: [{res}]")
|
||||
res_2 = []
|
||||
if pr_info.is_pr:
|
||||
matches = [match[-1] for match in re.findall(pattern, pr_info.body)]
|
||||
@ -1593,7 +1593,7 @@ def _fetch_commit_tokens(message: str, pr_info: PRInfo) -> List[str]:
|
||||
or match.startswith("job_")
|
||||
or match.startswith("batch_")
|
||||
]
|
||||
print(f"CI modifyers from PR body: [{res_2}]")
|
||||
print(f"CI modifiers from PR body: [{res_2}]")
|
||||
return list(set(res + res_2))
|
||||
|
||||
|
||||
@ -1659,7 +1659,7 @@ def _upload_build_artifacts(
|
||||
report_url = ci_cache.upload_build_report(build_result)
|
||||
print(f"Report file has been uploaded to [{report_url}]")
|
||||
|
||||
# Upload head master binaries
|
||||
# Upload master head's binaries
|
||||
static_bin_name = CI_CONFIG.build_config[build_name].static_binary_name
|
||||
if pr_info.is_master and static_bin_name:
|
||||
# Full binary with debug info:
|
||||
|
@ -50,9 +50,9 @@ class CILabels(metaclass=WithIter):
|
||||
CI_SET_ARM = "ci_set_arm"
|
||||
CI_SET_INTEGRATION = "ci_set_integration"
|
||||
CI_SET_OLD_ANALYZER = "ci_set_old_analyzer"
|
||||
CI_SET_STATLESS = "ci_set_stateless"
|
||||
CI_SET_STATELESS = "ci_set_stateless"
|
||||
CI_SET_STATEFUL = "ci_set_stateful"
|
||||
CI_SET_STATLESS_ASAN = "ci_set_stateless_asan"
|
||||
CI_SET_STATELESS_ASAN = "ci_set_stateless_asan"
|
||||
CI_SET_STATEFUL_ASAN = "ci_set_stateful_asan"
|
||||
|
||||
libFuzzer = "libFuzzer"
|
||||
@ -203,7 +203,7 @@ class DigestConfig:
|
||||
include_paths: List[Union[str, Path]] = field(default_factory=list)
|
||||
# file suffixes to exclude from digest
|
||||
exclude_files: List[str] = field(default_factory=list)
|
||||
# directories to exlude from digest
|
||||
# directories to exclude from digest
|
||||
exclude_dirs: List[Union[str, Path]] = field(default_factory=list)
|
||||
# docker names to include into digest
|
||||
docker: List[str] = field(default_factory=list)
|
||||
@ -214,7 +214,7 @@ class DigestConfig:
|
||||
@dataclass
|
||||
class LabelConfig:
|
||||
"""
|
||||
configures different CI scenarious per GH label
|
||||
configures different CI scenarios per GH label
|
||||
"""
|
||||
|
||||
run_jobs: Iterable[str] = frozenset()
|
||||
@ -228,7 +228,7 @@ class JobConfig:
|
||||
|
||||
# configures digest calculation for the job
|
||||
digest: DigestConfig = field(default_factory=DigestConfig)
|
||||
# will be triggered for the job if omited in CI workflow yml
|
||||
# will be triggered for the job if omitted in CI workflow yml
|
||||
run_command: str = ""
|
||||
# job timeout, seconds
|
||||
timeout: Optional[int] = None
|
||||
@ -239,7 +239,7 @@ class JobConfig:
|
||||
# to run always regardless of the job digest or/and label
|
||||
run_always: bool = False
|
||||
# if the job needs to be run on the release branch, including master (e.g. building packages, docker server).
|
||||
# NOTE: Subsequent runs on the same branch with the similar digest are still considered skippable.
|
||||
# NOTE: Subsequent runs on the same branch with the similar digest are still considered skip-able.
|
||||
required_on_release_branch: bool = False
|
||||
# job is for pr workflow only
|
||||
pr_only: bool = False
|
||||
@ -467,7 +467,7 @@ compatibility_test_common_params = {
|
||||
"digest": compatibility_check_digest,
|
||||
"run_command": "compatibility_check.py",
|
||||
}
|
||||
statless_test_common_params = {
|
||||
stateless_test_common_params = {
|
||||
"digest": stateless_check_digest,
|
||||
"run_command": 'functional_test_check.py "$CHECK_NAME" $KILL_TIMEOUT',
|
||||
"timeout": 10800,
|
||||
@ -661,7 +661,7 @@ class CIConfig:
|
||||
# crosscompile - no arm required
|
||||
pass
|
||||
else:
|
||||
# switch to aarch64 runnner
|
||||
# switch to aarch64 runner
|
||||
result += "-aarch64"
|
||||
|
||||
return result
|
||||
@ -708,7 +708,7 @@ class CIConfig:
|
||||
break
|
||||
assert (
|
||||
res
|
||||
), f"Error: Experimantal feature... Invlid request or not supported job [{check_name}]"
|
||||
), f"Error: Experimental feature... Invalid request or not supported job [{check_name}]"
|
||||
return res
|
||||
|
||||
def get_digest_config(self, check_name: str) -> DigestConfig:
|
||||
@ -811,16 +811,16 @@ class CIConfig:
|
||||
f"The following names of the build report '{build_report_name}' "
|
||||
f"are missed in build_config: {missed_names}",
|
||||
)
|
||||
# And finally, all of tests' requirements must be in the builds
|
||||
# And finally, all tests' requirements must be in the builds
|
||||
for test_name, test_config in self.test_configs.items():
|
||||
if test_config.required_build not in self.build_config.keys():
|
||||
logging.error(
|
||||
"The requierment '%s' for '%s' is not found in builds",
|
||||
"The requirement '%s' for '%s' is not found in builds",
|
||||
test_config,
|
||||
test_name,
|
||||
)
|
||||
errors.append(
|
||||
f"The requierment '{test_config}' for "
|
||||
f"The requirement '{test_config}' for "
|
||||
f"'{test_name}' is not found in builds"
|
||||
)
|
||||
|
||||
@ -861,7 +861,7 @@ CI_CONFIG = CIConfig(
|
||||
JobNames.INTEGRATION_TEST_ASAN_OLD_ANALYZER,
|
||||
]
|
||||
),
|
||||
CILabels.CI_SET_STATLESS: LabelConfig(
|
||||
CILabels.CI_SET_STATELESS: LabelConfig(
|
||||
run_jobs=[
|
||||
JobNames.STYLE_CHECK,
|
||||
JobNames.FAST_TEST,
|
||||
@ -869,7 +869,7 @@ CI_CONFIG = CIConfig(
|
||||
JobNames.STATELESS_TEST_RELEASE,
|
||||
]
|
||||
),
|
||||
CILabels.CI_SET_STATLESS_ASAN: LabelConfig(
|
||||
CILabels.CI_SET_STATELESS_ASAN: LabelConfig(
|
||||
run_jobs=[
|
||||
JobNames.STYLE_CHECK,
|
||||
JobNames.FAST_TEST,
|
||||
@ -1155,68 +1155,63 @@ CI_CONFIG = CIConfig(
|
||||
Build.PACKAGE_DEBUG, job_config=JobConfig(**stateful_test_common_params) # type: ignore
|
||||
),
|
||||
JobNames.STATEFUL_TEST_PARALLEL_REPL_ASAN: TestConfig(
|
||||
Build.PACKAGE_ASAN,
|
||||
job_config=JobConfig(random_bucket="parrepl_with_sanitizer", **stateful_test_common_params) # type: ignore
|
||||
Build.PACKAGE_ASAN, job_config=JobConfig(random_bucket="parrepl_with_sanitizer", **stateful_test_common_params) # type: ignore
|
||||
),
|
||||
JobNames.STATEFUL_TEST_PARALLEL_REPL_MSAN: TestConfig(
|
||||
Build.PACKAGE_MSAN,
|
||||
job_config=JobConfig(random_bucket="parrepl_with_sanitizer", **stateful_test_common_params) # type: ignore
|
||||
Build.PACKAGE_MSAN, job_config=JobConfig(random_bucket="parrepl_with_sanitizer", **stateful_test_common_params) # type: ignore
|
||||
),
|
||||
JobNames.STATEFUL_TEST_PARALLEL_REPL_UBSAN: TestConfig(
|
||||
Build.PACKAGE_UBSAN,
|
||||
job_config=JobConfig(random_bucket="parrepl_with_sanitizer", **stateful_test_common_params) # type: ignore
|
||||
Build.PACKAGE_UBSAN, job_config=JobConfig(random_bucket="parrepl_with_sanitizer", **stateful_test_common_params) # type: ignore
|
||||
),
|
||||
JobNames.STATEFUL_TEST_PARALLEL_REPL_TSAN: TestConfig(
|
||||
Build.PACKAGE_TSAN,
|
||||
job_config=JobConfig(random_bucket="parrepl_with_sanitizer", **stateful_test_common_params) # type: ignore
|
||||
Build.PACKAGE_TSAN, job_config=JobConfig(random_bucket="parrepl_with_sanitizer", **stateful_test_common_params) # type: ignore
|
||||
),
|
||||
# End stateful tests for parallel replicas
|
||||
JobNames.STATELESS_TEST_ASAN: TestConfig(
|
||||
Build.PACKAGE_ASAN,
|
||||
job_config=JobConfig(num_batches=4, **statless_test_common_params), # type: ignore
|
||||
job_config=JobConfig(num_batches=4, **stateless_test_common_params), # type: ignore
|
||||
),
|
||||
JobNames.STATELESS_TEST_TSAN: TestConfig(
|
||||
Build.PACKAGE_TSAN,
|
||||
job_config=JobConfig(num_batches=5, **statless_test_common_params), # type: ignore
|
||||
job_config=JobConfig(num_batches=5, **stateless_test_common_params), # type: ignore
|
||||
),
|
||||
JobNames.STATELESS_TEST_MSAN: TestConfig(
|
||||
Build.PACKAGE_MSAN,
|
||||
job_config=JobConfig(num_batches=6, **statless_test_common_params), # type: ignore
|
||||
job_config=JobConfig(num_batches=6, **stateless_test_common_params), # type: ignore
|
||||
),
|
||||
JobNames.STATELESS_TEST_UBSAN: TestConfig(
|
||||
Build.PACKAGE_UBSAN,
|
||||
job_config=JobConfig(num_batches=2, **statless_test_common_params), # type: ignore
|
||||
job_config=JobConfig(num_batches=2, **stateless_test_common_params), # type: ignore
|
||||
),
|
||||
JobNames.STATELESS_TEST_DEBUG: TestConfig(
|
||||
Build.PACKAGE_DEBUG,
|
||||
job_config=JobConfig(num_batches=5, **statless_test_common_params), # type: ignore
|
||||
job_config=JobConfig(num_batches=5, **stateless_test_common_params), # type: ignore
|
||||
),
|
||||
JobNames.STATELESS_TEST_RELEASE: TestConfig(
|
||||
Build.PACKAGE_RELEASE, job_config=JobConfig(**statless_test_common_params) # type: ignore
|
||||
Build.PACKAGE_RELEASE, job_config=JobConfig(**stateless_test_common_params) # type: ignore
|
||||
),
|
||||
JobNames.STATELESS_TEST_RELEASE_COVERAGE: TestConfig(
|
||||
Build.PACKAGE_RELEASE_COVERAGE,
|
||||
job_config=JobConfig(num_batches=6, **statless_test_common_params), # type: ignore
|
||||
job_config=JobConfig(num_batches=6, **stateless_test_common_params), # type: ignore
|
||||
),
|
||||
JobNames.STATELESS_TEST_AARCH64: TestConfig(
|
||||
Build.PACKAGE_AARCH64, job_config=JobConfig(**statless_test_common_params) # type: ignore
|
||||
Build.PACKAGE_AARCH64, job_config=JobConfig(**stateless_test_common_params) # type: ignore
|
||||
),
|
||||
JobNames.STATELESS_TEST_OLD_ANALYZER_S3_REPLICATED_RELEASE: TestConfig(
|
||||
Build.PACKAGE_RELEASE,
|
||||
job_config=JobConfig(num_batches=4, **statless_test_common_params), # type: ignore
|
||||
job_config=JobConfig(num_batches=4, **stateless_test_common_params), # type: ignore
|
||||
),
|
||||
JobNames.STATELESS_TEST_S3_DEBUG: TestConfig(
|
||||
Build.PACKAGE_DEBUG,
|
||||
job_config=JobConfig(num_batches=6, **statless_test_common_params), # type: ignore
|
||||
job_config=JobConfig(num_batches=6, **stateless_test_common_params), # type: ignore
|
||||
),
|
||||
JobNames.STATELESS_TEST_AZURE_ASAN: TestConfig(
|
||||
Build.PACKAGE_ASAN,
|
||||
job_config=JobConfig(num_batches=4, **statless_test_common_params, release_only=True,
|
||||
run_by_ci_option=True), # type: ignore
|
||||
job_config=JobConfig(num_batches=4, **stateless_test_common_params, release_only=True, run_by_ci_option=True), # type: ignore
|
||||
),
|
||||
JobNames.STATELESS_TEST_S3_TSAN: TestConfig(
|
||||
Build.PACKAGE_TSAN,
|
||||
job_config=JobConfig(num_batches=5, **statless_test_common_params), # type: ignore
|
||||
job_config=JobConfig(num_batches=5, **stateless_test_common_params), # type: ignore
|
||||
),
|
||||
JobNames.STRESS_TEST_DEBUG: TestConfig(
|
||||
Build.PACKAGE_DEBUG, job_config=JobConfig(**stress_test_common_params) # type: ignore
|
||||
@ -1225,39 +1220,28 @@ CI_CONFIG = CIConfig(
|
||||
Build.PACKAGE_TSAN, job_config=JobConfig(**stress_test_common_params) # type: ignore
|
||||
),
|
||||
JobNames.STRESS_TEST_ASAN: TestConfig(
|
||||
Build.PACKAGE_ASAN, job_config=JobConfig(random_bucket="stress_with_sanitizer", **stress_test_common_params)
|
||||
# type: ignore
|
||||
Build.PACKAGE_ASAN, job_config=JobConfig(random_bucket="stress_with_sanitizer", **stress_test_common_params) # type: ignore
|
||||
),
|
||||
JobNames.STRESS_TEST_UBSAN: TestConfig(
|
||||
Build.PACKAGE_UBSAN,
|
||||
job_config=JobConfig(random_bucket="stress_with_sanitizer", **stress_test_common_params) # type: ignore
|
||||
Build.PACKAGE_UBSAN, job_config=JobConfig(random_bucket="stress_with_sanitizer", **stress_test_common_params) # type: ignore
|
||||
),
|
||||
JobNames.STRESS_TEST_MSAN: TestConfig(
|
||||
Build.PACKAGE_MSAN, job_config=JobConfig(random_bucket="stress_with_sanitizer", **stress_test_common_params)
|
||||
# type: ignore
|
||||
Build.PACKAGE_MSAN, job_config=JobConfig(random_bucket="stress_with_sanitizer", **stress_test_common_params) # type: ignore
|
||||
),
|
||||
JobNames.UPGRADE_TEST_ASAN: TestConfig(
|
||||
Build.PACKAGE_ASAN,
|
||||
job_config=JobConfig(pr_only=True, random_bucket="upgrade_with_sanitizer", **upgrade_test_common_params)
|
||||
# type: ignore
|
||||
Build.PACKAGE_ASAN, job_config=JobConfig(pr_only=True, random_bucket="upgrade_with_sanitizer", **upgrade_test_common_params) # type: ignore
|
||||
),
|
||||
JobNames.STRESS_TEST_AZURE_TSAN: TestConfig(
|
||||
Build.PACKAGE_TSAN,
|
||||
job_config=JobConfig(**stress_test_common_params, release_only=True, run_by_ci_option=True) # type: ignore
|
||||
Build.PACKAGE_TSAN, job_config=JobConfig(**stress_test_common_params, release_only=True, run_by_ci_option=True) # type: ignore
|
||||
),
|
||||
JobNames.STRESS_TEST_AZURE_MSAN: TestConfig(
|
||||
Build.PACKAGE_MSAN,
|
||||
job_config=JobConfig(**stress_test_common_params, release_only=True, run_by_ci_option=True) # type: ignore
|
||||
Build.PACKAGE_MSAN, job_config=JobConfig(**stress_test_common_params, release_only=True, run_by_ci_option=True) # type: ignore
|
||||
),
|
||||
JobNames.UPGRADE_TEST_TSAN: TestConfig(
|
||||
Build.PACKAGE_TSAN,
|
||||
job_config=JobConfig(pr_only=True, random_bucket="upgrade_with_sanitizer", **upgrade_test_common_params)
|
||||
# type: ignore
|
||||
Build.PACKAGE_TSAN, job_config=JobConfig(pr_only=True, random_bucket="upgrade_with_sanitizer", **upgrade_test_common_params) # type: ignore
|
||||
),
|
||||
JobNames.UPGRADE_TEST_MSAN: TestConfig(
|
||||
Build.PACKAGE_MSAN,
|
||||
job_config=JobConfig(pr_only=True, random_bucket="upgrade_with_sanitizer", **upgrade_test_common_params)
|
||||
# type: ignore
|
||||
Build.PACKAGE_MSAN, job_config=JobConfig(pr_only=True, random_bucket="upgrade_with_sanitizer", **upgrade_test_common_params) # type: ignore
|
||||
),
|
||||
JobNames.UPGRADE_TEST_DEBUG: TestConfig(
|
||||
Build.PACKAGE_DEBUG, job_config=JobConfig(pr_only=True, **upgrade_test_common_params) # type: ignore
|
||||
@ -1276,7 +1260,8 @@ CI_CONFIG = CIConfig(
|
||||
),
|
||||
JobNames.INTEGRATION_TEST_ARM: TestConfig(
|
||||
Build.PACKAGE_AARCH64,
|
||||
job_config=JobConfig(num_batches=5, **integration_test_common_params), # type: ignore
|
||||
# add [run_by_label="test arm"] to not run in regular pr workflow by default
|
||||
job_config=JobConfig(num_batches=6, **integration_test_common_params, run_by_label="test arm"), # type: ignore
|
||||
),
|
||||
JobNames.INTEGRATION_TEST: TestConfig(
|
||||
Build.PACKAGE_RELEASE,
|
||||
@ -1330,7 +1315,7 @@ CI_CONFIG = CIConfig(
|
||||
JobNames.STATELESS_TEST_FLAKY_ASAN: TestConfig(
|
||||
# replace to non-default
|
||||
Build.PACKAGE_ASAN,
|
||||
job_config=JobConfig(pr_only=True, **{**statless_test_common_params, "timeout": 3600}), # type: ignore
|
||||
job_config=JobConfig(pr_only=True, **{**stateless_test_common_params, "timeout": 3600}), # type: ignore
|
||||
),
|
||||
JobNames.JEPSEN_KEEPER: TestConfig(
|
||||
Build.BINARY_RELEASE,
|
||||
@ -1350,8 +1335,7 @@ CI_CONFIG = CIConfig(
|
||||
),
|
||||
JobNames.PERFORMANCE_TEST_ARM64: TestConfig(
|
||||
Build.PACKAGE_AARCH64,
|
||||
job_config=JobConfig(num_batches=4, run_by_label="pr-performance", **perf_test_common_params),
|
||||
# type: ignore
|
||||
job_config=JobConfig(num_batches=4, run_by_label="pr-performance", **perf_test_common_params), # type: ignore
|
||||
),
|
||||
JobNames.SQLANCER: TestConfig(
|
||||
Build.PACKAGE_RELEASE, job_config=sqllancer_test_common_params
|
||||
@ -1381,6 +1365,7 @@ CI_CONFIG = CIConfig(
|
||||
)
|
||||
CI_CONFIG.validate()
|
||||
|
||||
|
||||
# checks required by Mergeable Check
|
||||
REQUIRED_CHECKS = [
|
||||
"PR Check",
|
||||
@ -1479,7 +1464,7 @@ CHECK_DESCRIPTIONS = [
|
||||
"Checks if new added or modified tests are flaky by running them repeatedly, "
|
||||
"in parallel, with more randomization. Functional tests are run 100 times "
|
||||
"with address sanitizer, and additional randomization of thread scheduling. "
|
||||
"Integrational tests are run up to 10 times. If at least once a new test has "
|
||||
"Integration tests are run up to 10 times. If at least once a new test has "
|
||||
"failed, or was too long, this check will be red. We don't allow flaky tests, "
|
||||
'read <a href="https://clickhouse.com/blog/decorating-a-christmas-tree-with-'
|
||||
'the-help-of-flaky-tests/">the doc</a>',
|
||||
@ -1569,7 +1554,7 @@ CHECK_DESCRIPTIONS = [
|
||||
lambda x: x.startswith("ClickBench"),
|
||||
),
|
||||
CheckDescription(
|
||||
"Falback for unknown",
|
||||
"Fallback for unknown",
|
||||
"There's no description for the check yet, please add it to "
|
||||
"tests/ci/ci_config.py:CHECK_DESCRIPTIONS",
|
||||
lambda x: True,
|
||||
|
@ -161,7 +161,7 @@ class TestCIOptions(unittest.TestCase):
|
||||
"Stateless tests (azure, asan)": {
|
||||
"batches": list(range(3)),
|
||||
"num_batches": 3,
|
||||
"run_if_ci_option_include_set": True,
|
||||
"run_by_ci_option": True,
|
||||
}
|
||||
}
|
||||
jobs_to_do, jobs_to_skip, job_params = ci_options.apply(
|
||||
@ -226,10 +226,10 @@ class TestCIOptions(unittest.TestCase):
|
||||
job_params[job] = {
|
||||
"batches": list(range(3)),
|
||||
"num_batches": 3,
|
||||
"run_if_ci_option_include_set": "azure" in job,
|
||||
"run_by_ci_option": "azure" in job,
|
||||
}
|
||||
else:
|
||||
job_params[job] = {"run_if_ci_option_include_set": False}
|
||||
job_params[job] = {"run_by_ci_option": False}
|
||||
|
||||
jobs_to_do, jobs_to_skip, job_params = ci_options.apply(
|
||||
jobs_to_do, jobs_to_skip, job_params, PRInfo()
|
||||
|
Loading…
Reference in New Issue
Block a user