Fix docker jobs dependencies

This commit is contained in:
Max K 2024-07-29 14:16:20 +02:00
parent 56c15b37eb
commit 9f8c90065e
3 changed files with 31 additions and 36 deletions

View File

@ -145,7 +145,9 @@ runs:
cd "./tests/ci"
python3 ./create_release.py --set-progress-started --progress "docker server release"
export CHECK_NAME="Docker server image"
python3 docker_server.py --release-type auto --version ${{ env.RELEASE_TAG }} --check-name "$CHECK_NAME" --sha ${{ env.COMMIT_SHA }} ${{ ! inputs.dry-run && '--push' || '' }}
git checkout ${{ env.RELEASE_TAG }}
python3 docker_server.py --release-type auto --version ${{ env.RELEASE_TAG }} --check-name "$CHECK_NAME" ${{ ! inputs.dry-run && '--push' || '' }}
git checkout -
python3 ./create_release.py --set-progress-completed
- name: Docker clickhouse/clickhouse-keeper building
if: ${{ inputs.type == 'patch' }}
@ -154,15 +156,19 @@ runs:
cd "./tests/ci"
python3 ./create_release.py --set-progress-started --progress "docker keeper release"
export CHECK_NAME="Docker keeper image"
python3 docker_server.py --release-type auto --version ${{ env.RELEASE_TAG }} --check-name "$CHECK_NAME" --sha ${{ env.COMMIT_SHA }} ${{ ! inputs.dry-run && '--push' || '' }}
git checkout ${{ env.RELEASE_TAG }}
python3 docker_server.py --release-type auto --version ${{ env.RELEASE_TAG }} --check-name "$CHECK_NAME" ${{ ! inputs.dry-run && '--push' || '' }}
git checkout -
python3 ./create_release.py --set-progress-completed
- name: Set current Release progress to Completed with OK
shell: bash
run: |
git checkout "$GITHUB_REF_NAME"
python3 ./tests/ci/create_release.py --set-progress-started --progress "completed"
python3 ./tests/ci/create_release.py --set-progress-completed
- name: Post Slack Message
if: ${{ !cancelled() }}
shell: bash
run: |
git checkout "$GITHUB_REF_NAME"
python3 ./tests/ci/create_release.py --post-status ${{ inputs.dry-run && '--dry-run' || '' }}

View File

@ -508,10 +508,10 @@ class CI:
runner_type=Runners.STYLE_CHECKER,
),
JobNames.DOCKER_SERVER: CommonJobConfigs.DOCKER_SERVER.with_properties(
required_builds=[BuildNames.PACKAGE_RELEASE]
required_builds=[BuildNames.PACKAGE_RELEASE, BuildNames.PACKAGE_AARCH64]
),
JobNames.DOCKER_KEEPER: CommonJobConfigs.DOCKER_SERVER.with_properties(
required_builds=[BuildNames.PACKAGE_RELEASE]
required_builds=[BuildNames.PACKAGE_RELEASE, BuildNames.PACKAGE_AARCH64]
),
JobNames.DOCS_CHECK: JobConfig(
digest=DigestConfig(

View File

@ -62,12 +62,6 @@ def parse_args() -> argparse.Namespace:
help="a version to build, automaticaly got from version_helper, accepts either "
"tag ('refs/tags/' is removed automatically) or a normal 22.2.2.2 format",
)
parser.add_argument(
"--sha",
type=str,
default="",
help="sha of the commit to use packages from",
)
parser.add_argument(
"--release-type",
type=str,
@ -128,17 +122,9 @@ def parse_args() -> argparse.Namespace:
def retry_popen(cmd: str, log_file: Path) -> int:
max_retries = 2
sleep_seconds = 10
retcode = -1
for retry in range(max_retries):
# From time to time docker build may failed. Curl issues, or even push
# It will sleep progressively 5, 15, 30 and 50 seconds between retries
progressive_sleep = 5 * sum(i + 1 for i in range(retry))
if progressive_sleep:
logging.warning(
"The following command failed, sleep %s before retry: %s",
progressive_sleep,
cmd,
)
time.sleep(progressive_sleep)
with TeePopen(
cmd,
log_file=log_file,
@ -146,7 +132,14 @@ def retry_popen(cmd: str, log_file: Path) -> int:
retcode = process.wait()
if retcode == 0:
return 0
else:
# From time to time docker build may failed. Curl issues, or even push
logging.error(
"The following command failed, sleep %s before retry: %s",
sleep_seconds,
cmd,
)
time.sleep(sleep_seconds)
return retcode
@ -377,21 +370,6 @@ def main():
direct_urls: Dict[str, List[str]] = {}
for arch, build_name in zip(ARCH, ("package_release", "package_aarch64")):
if args.bucket_prefix:
assert not args.allow_build_reuse
repo_urls[arch] = f"{args.bucket_prefix}/{build_name}"
elif args.sha:
# CreateRelease workflow only. TODO
version = args.version
repo_urls[arch] = (
f"{S3_DOWNLOAD}/{S3_BUILDS_BUCKET}/"
f"{version.major}.{version.minor}/{args.sha}/{build_name}"
)
else:
# In all other cases urls must be fetched from build reports. TODO: script needs refactoring
repo_urls[arch] = ""
assert args.allow_build_reuse
if args.allow_build_reuse:
# read s3 urls from pre-downloaded build reports
if "clickhouse-server" in image_repo:
@ -413,6 +391,17 @@ def main():
for url in urls
if any(package in url for package in PACKAGES) and "-dbg" not in url
]
elif args.bucket_prefix:
assert not args.allow_build_reuse
repo_urls[arch] = f"{args.bucket_prefix}/{build_name}"
print(f"Bucket prefix is set: Fetching packages from [{repo_urls}]")
else:
version = args.version
repo_urls[arch] = (
f"{S3_DOWNLOAD}/{S3_BUILDS_BUCKET}/"
f"{version.major}.{version.minor}/{git.sha}/{build_name}"
)
print(f"Fetching packages from [{repo_urls}]")
if push:
docker_login()