),"Provide S3_ARTIFACT_PATH setting in any .py file in ./ci/settings/* to be able to use s3 for artifacts"
forjobinworkflow.jobs:
ifjob.requiresandworkflow.artifacts:
forrequireinjob.requires:
if(
requireinworkflow.artifacts
andworkflow.artifacts[require].is_s3_artifact()
):
assertnotany(
[rinGHRunnersforrinjob.runs_on]
),f"GH runners [{job.name}:{job.runs_on}] must not be used with S3 as artifact storage"
ifjob.allow_merge_on_failure:
assert(
workflow.enable_merge_ready_status
),f"Job property allow_merge_on_failure must be used only with enabled workflow.enable_merge_ready_status, workflow [{workflow.name}], job [{job.name}]"
ifworkflow.enable_cache:
assert(
Settings.CI_CONFIG_RUNS_ON
),f"Runner label to run workflow config job must be provided via CACHE_CONFIG_RUNS_ON setting if enable_cache=True, workflow [{workflow.name}]"
assert(
Settings.CACHE_S3_PATH
),f"CACHE_S3_PATH Setting must be defined if enable_cache=True, workflow [{workflow.name}]"
ifworkflow.dockers:
cls.evaluate_check(
Settings.DOCKER_BUILD_RUNS_ON,
f"DOCKER_BUILD_RUNS_ON settings must be defined if workflow has dockers",
workflow_name=workflow.name,
)
ifworkflow.enable_report:
assert(
Settings.HTML_S3_PATH
),f"HTML_S3_PATH Setting must be defined if enable_html=True, workflow [{workflow.name}]"
assert(
Settings.S3_BUCKET_TO_HTTP_ENDPOINT
),f"S3_BUCKET_TO_HTTP_ENDPOINT Setting must be defined if enable_html=True, workflow [{workflow.name}]"
assert(
Settings.HTML_S3_PATH.split("/")[0]
inSettings.S3_BUCKET_TO_HTTP_ENDPOINT
),f"S3_BUCKET_TO_HTTP_ENDPOINT Setting must include bucket name [{Settings.HTML_S3_PATH}] from HTML_S3_PATH, workflow [{workflow.name}]"
ifworkflow.enable_cache:
forartifactinworkflow.artifactsor[]:
assert(
artifact.is_s3_artifact()
),f"All artifacts must be of S3 type if enable_cache|enable_html=True, artifact [{artifact.name}], type [{artifact.type}], workflow [{workflow.name}]"
ifworkflow.dockers:
assert(
Settings.DOCKERHUB_USERNAME
),f"Settings.DOCKERHUB_USERNAME must be provided if workflow has dockers, workflow [{workflow.name}]"
assert(
Settings.DOCKERHUB_SECRET
),f"Settings.DOCKERHUB_SECRET must be provided if workflow has dockers, workflow [{workflow.name}]"
assertworkflow.get_secret(
Settings.DOCKERHUB_SECRET
),f"Secret [{Settings.DOCKERHUB_SECRET}] must have configuration in workflow.secrets, workflow [{workflow.name}]"
if(
workflow.enable_cache
orworkflow.enable_report
orworkflow.enable_merge_ready_status
):
forjobinworkflow.jobs:
assertnotany(
jobin("ubuntu-latest",)forjobinjob.runs_on
),f"GitHub Runners must not be used for workflow with enabled: workflow.enable_cache, workflow.enable_html or workflow.enable_merge_ready_status as s3 access is required, workflow [{workflow.name}], job [{job.name}]"
ifworkflow.enable_cidb:
assert(
Settings.SECRET_CI_DB_URL
),f"Settings.CI_DB_URL_SECRET must be provided if workflow.enable_cidb=True, workflow [{workflow.name}]"
assert(
Settings.SECRET_CI_DB_PASSWORD
),f"Settings.CI_DB_PASSWORD_SECRET must be provided if workflow.enable_cidb=True, workflow [{workflow.name}]"
assert(
Settings.CI_DB_DB_NAME
),f"Settings.CI_DB_DB_NAME must be provided if workflow.enable_cidb=True, workflow [{workflow.name}]"
assert(
Settings.CI_DB_TABLE_NAME
),f"Settings.CI_DB_TABLE_NAME must be provided if workflow.enable_cidb=True, workflow [{workflow.name}]"