more info for buddy

This commit is contained in:
Max K 2024-07-19 20:43:14 +02:00
parent 3b84288577
commit 3767f72348
7 changed files with 338 additions and 173 deletions

View File

@ -58,14 +58,11 @@ runs:
shell: bash
run: |
python3 ./tests/ci/create_release.py --create-bump-version-pr ${{ inputs.dry-run && '--dry-run' || '' }}
- name: Checkout master
shell: bash
run: |
git checkout master
- name: Bump Docker versions, Changelog, Security
if: ${{ inputs.type == 'patch' }}
shell: bash
run: |
python3 ./tests/ci/create_release.py --set-progress-started --progress "update ChangeLog"
[ "$(git branch --show-current)" != "master" ] && echo "not on the master" && exit 1
echo "List versions"
./utils/list-versions/list-versions.sh > ./utils/list-versions/version_date.tsv
@ -108,12 +105,13 @@ runs:
shell: bash
run: |
git checkout "$GITHUB_REF_NAME"
# set current progress to OK
python3 ./tests/ci/create_release.py --set-progress-completed
- name: Create GH Release
shell: bash
if: ${{ inputs.type == 'patch' }}
run: |
python3 ./tests/ci/create_release.py --create-gh-release \
${{ inputs.dry-run && '--dry-run' || '' }}
python3 ./tests/ci/create_release.py --create-gh-release ${{ inputs.dry-run && '--dry-run' || '' }}
- name: Export TGZ Packages
if: ${{ inputs.type == 'patch' }}
shell: bash
@ -148,16 +146,26 @@ runs:
if: ${{ inputs.type == 'patch' }}
shell: bash
run: |
python3 ./tests/ci/create_release.py --set-progress-started --progress "docker server release"
cd "./tests/ci"
export CHECK_NAME="Docker server image"
python3 docker_server.py --release-type auto --version ${{ env.RELEASE_TAG }} --check-name "$CHECK_NAME" --sha ${{ env.COMMIT_SHA }} ${{ ! inputs.dry-run && '--push' || '' }}
python3 ./tests/ci/create_release.py --set-progress-completed
- name: Docker clickhouse/clickhouse-keeper building
if: ${{ inputs.type == 'patch' }}
shell: bash
run: |
python3 ./tests/ci/create_release.py --set-progress-started --progress "docker keeper release"
cd "./tests/ci"
export CHECK_NAME="Docker keeper image"
python3 docker_server.py --release-type auto --version ${{ env.RELEASE_TAG }} --check-name "$CHECK_NAME" --sha ${{ env.COMMIT_SHA }} ${{ ! inputs.dry-run && '--push' || '' }}
python3 ./tests/ci/create_release.py --set-progress-completed
- name: Set Release progress completed
shell: bash
run: |
# If we here - set completed status, to post proper Slack OK or FAIL message in the next step
python3 ./tests/ci/create_release.py --set-progress-started --progress "completed"
python3 ./tests/ci/create_release.py --set-progress-completed
- name: Post Slack Message
if: ${{ !cancelled() }}
shell: bash

View File

@ -102,7 +102,8 @@ jobs:
- name: Post Slack Message
if: ${{ !cancelled() }}
run: |
echo Slack Message
cd "$GITHUB_WORKSPACE/tests/ci"
python3 auto_release.py --post-auto-release-complete --wf-status ${{ job.status }}
- name: Clean up
run: |
docker ps --quiet | xargs --no-run-if-empty docker kill ||:

View File

@ -3,7 +3,12 @@ import time
from pathlib import Path
from typing import Optional
from shutil import copy2
from create_release import PackageDownloader, ReleaseInfo, ShellRunner
from create_release import (
PackageDownloader,
ReleaseInfo,
ReleaseContextManager,
ReleaseProgress,
)
from ci_utils import WithIter, Shell
@ -76,19 +81,20 @@ class R2MountPoint:
)
_TEST_MOUNT_CMD = f"mount | grep -q {self.MOUNT_POINT}"
ShellRunner.run(_CLEAN_LOG_FILE_CMD)
ShellRunner.run(_UNMOUNT_CMD)
ShellRunner.run(_MKDIR_CMD)
ShellRunner.run(_MKDIR_FOR_CACHE)
ShellRunner.run(self.mount_cmd, async_=self.async_mount)
Shell.run(_CLEAN_LOG_FILE_CMD)
Shell.run(_UNMOUNT_CMD)
Shell.run(_MKDIR_CMD)
Shell.run(_MKDIR_FOR_CACHE)
# didn't manage to use simple run() and not block or fail
Shell.run_as_daemon(self.mount_cmd)
if self.async_mount:
time.sleep(3)
ShellRunner.run(_TEST_MOUNT_CMD)
Shell.run(_TEST_MOUNT_CMD, check=True)
@classmethod
def teardown(cls):
print(f"Unmount [{cls.MOUNT_POINT}]")
ShellRunner.run(f"umount {cls.MOUNT_POINT}")
Shell.run(f"umount {cls.MOUNT_POINT}")
class RepoCodenames(metaclass=WithIter):
@ -124,8 +130,8 @@ class DebianArtifactory:
cmd = f"{REPREPRO_CMD_PREFIX} includedeb {self.codename} {' '.join(paths)}"
print("Running export command:")
print(f" {cmd}")
ShellRunner.run(cmd)
ShellRunner.run("sync")
Shell.run(cmd, check=True)
Shell.run("sync")
if self.codename == RepoCodenames.LTS:
packages_with_version = [
@ -137,8 +143,8 @@ class DebianArtifactory:
cmd = f"{REPREPRO_CMD_PREFIX} copy {RepoCodenames.STABLE} {RepoCodenames.LTS} {' '.join(packages_with_version)}"
print("Running copy command:")
print(f" {cmd}")
ShellRunner.run(cmd)
ShellRunner.run("sync")
Shell.run(cmd, check=True)
Shell.run("sync")
def test_packages(self):
Shell.run("docker pull ubuntu:latest")
@ -206,12 +212,12 @@ class RpmArtifactory:
for command in commands:
print("Running command:")
print(f" {command}")
ShellRunner.run(command)
Shell.run(command, check=True)
update_public_key = f"gpg --armor --export {self._SIGN_KEY}"
pub_key_path = dest_dir / "repodata" / "repomd.xml.key"
print("Updating repomd.xml.key")
pub_key_path.write_text(ShellRunner.run(update_public_key)[1])
pub_key_path.write_text(Shell.run(update_public_key, check=True))
if codename == RepoCodenames.LTS:
self.export_packages(RepoCodenames.STABLE)
Shell.run("sync")
@ -264,23 +270,29 @@ class TgzArtifactory:
if codename == RepoCodenames.LTS:
self.export_packages(RepoCodenames.STABLE)
ShellRunner.run("sync")
Shell.run("sync")
def test_packages(self):
tgz_file = "/tmp/tmp.tgz"
tgz_sha_file = "/tmp/tmp.tgz.sha512"
ShellRunner.run(
f"curl -o {tgz_file} -f0 {self.repo_url}/stable/clickhouse-client-{self.version}-arm64.tgz"
cmd = f"curl -o {tgz_file} -f0 {self.repo_url}/stable/clickhouse-client-{self.version}-arm64.tgz"
Shell.run(
cmd,
check=True,
)
ShellRunner.run(
f"curl -o {tgz_sha_file} -f0 {self.repo_url}/stable/clickhouse-client-{self.version}-arm64.tgz.sha512"
Shell.run(
f"curl -o {tgz_sha_file} -f0 {self.repo_url}/stable/clickhouse-client-{self.version}-arm64.tgz.sha512",
check=True,
)
expected_checksum = ShellRunner.run(f"cut -d ' ' -f 1 {tgz_sha_file}")
actual_checksum = ShellRunner.run(f"sha512sum {tgz_file} | cut -d ' ' -f 1")
expected_checksum = Shell.run(f"cut -d ' ' -f 1 {tgz_sha_file}", check=True)
actual_checksum = Shell.run(f"sha512sum {tgz_file} | cut -d ' ' -f 1")
assert (
expected_checksum == actual_checksum
), f"[{actual_checksum} != {expected_checksum}]"
ShellRunner.run("rm /tmp/tmp.tgz*")
Shell.run("rm /tmp/tmp.tgz*")
release_info = ReleaseInfo.from_file()
release_info.tgz_command = cmd
release_info.dump()
def parse_args() -> argparse.Namespace:
@ -338,20 +350,26 @@ if __name__ == "__main__":
"""
mp = R2MountPoint(MountPointApp.S3FS, dry_run=args.dry_run)
if args.export_debian:
mp.init()
DebianArtifactory(release_info, dry_run=args.dry_run).export_packages()
mp.teardown()
with ReleaseContextManager(release_progress=ReleaseProgress.EXPORT_DEB) as _:
mp.init()
DebianArtifactory(release_info, dry_run=args.dry_run).export_packages()
mp.teardown()
if args.export_rpm:
mp.init()
RpmArtifactory(release_info, dry_run=args.dry_run).export_packages()
mp.teardown()
with ReleaseContextManager(release_progress=ReleaseProgress.EXPORT_RPM) as _:
mp.init()
RpmArtifactory(release_info, dry_run=args.dry_run).export_packages()
mp.teardown()
if args.export_tgz:
mp.init()
TgzArtifactory(release_info, dry_run=args.dry_run).export_packages()
mp.teardown()
with ReleaseContextManager(release_progress=ReleaseProgress.EXPORT_TGZ) as _:
mp.init()
TgzArtifactory(release_info, dry_run=args.dry_run).export_packages()
mp.teardown()
if args.test_debian:
DebianArtifactory(release_info, dry_run=args.dry_run).test_packages()
with ReleaseContextManager(release_progress=ReleaseProgress.TEST_DEB) as _:
DebianArtifactory(release_info, dry_run=args.dry_run).test_packages()
if args.test_tgz:
TgzArtifactory(release_info, dry_run=args.dry_run).test_packages()
with ReleaseContextManager(release_progress=ReleaseProgress.TEST_TGZ) as _:
TgzArtifactory(release_info, dry_run=args.dry_run).test_packages()
if args.test_rpm:
RpmArtifactory(release_info, dry_run=args.dry_run).test_packages()
with ReleaseContextManager(release_progress=ReleaseProgress.TEST_RPM) as _:
RpmArtifactory(release_info, dry_run=args.dry_run).test_packages()

View File

@ -25,11 +25,22 @@ def parse_args():
action="store_true",
help="Post release branch statuses",
)
parser.add_argument(
"--post-auto-release-complete",
action="store_true",
help="Post autorelease completion status",
)
parser.add_argument(
"--prepare",
action="store_true",
help="Prepare autorelease info",
)
parser.add_argument(
"--wf-status",
type=str,
default="",
help="overall workflow status [success|failure]",
)
return parser.parse_args(), parser
@ -180,6 +191,22 @@ def main():
title=f"Auto Release Status for {release_info.release_branch}",
body=release_info.to_dict(),
)
if args.post_auto_release_complete:
assert args.wf_status, "--wf-status Required with --post-auto-release-complete"
if args.wf_status != SUCCESS:
CIBuddy(dry_run=False).post_job_error(
error_description="Autorelease workflow failed",
job_name="Autorelease",
with_instance_info=False,
with_wf_link=True,
critical=True,
)
else:
CIBuddy(dry_run=False).post_info(
title=f"Autorelease completed",
body="",
with_wf_link=True,
)
elif args.prepare:
_prepare(token=args.token or get_best_robot_token())
else:

View File

@ -109,6 +109,7 @@ class CIBuddy:
job_name: str = "",
with_instance_info: bool = True,
with_wf_link: bool = True,
critical: bool = False,
) -> None:
instance_id, instance_type = "unknown", "unknown"
if with_instance_info:
@ -116,7 +117,8 @@ class CIBuddy:
instance_type = Shell.run("ec2metadata --instance-type") or instance_type
if not job_name:
job_name = os.getenv("CHECK_NAME", "unknown")
line_err = f":red_circle: *Error: {error_description}*\n\n"
sign = ":red_circle:" if not critical else ":black_circle:"
line_err = f"{sign} *Error: {error_description}*\n\n"
line_ghr = f" *Runner:* `{instance_type}`, `{instance_id}`\n"
line_job = f" *Job:* `{job_name}`\n"
line_pr_ = f" *PR:* <https://github.com/{self.repo}/pull/{self.pr_number}|#{self.pr_number}>, <{self.commit_url}|{self.sha}>\n"

View File

@ -4,7 +4,7 @@ import subprocess
import time
from contextlib import contextmanager
from pathlib import Path
from typing import Any, Iterator, List, Union, Optional, Sequence
from typing import Any, Iterator, List, Union, Optional, Sequence, Tuple
import requests
@ -142,6 +142,16 @@ class GHActions:
return False
@staticmethod
def get_pr_url_by_branch(repo, branch):
get_url_cmd = (
f"gh pr list --repo {repo} --head {branch} --json url --jq '.[0].url'"
)
url = Shell.run(get_url_cmd)
if not url:
print(f"ERROR: PR nor found, branch [{branch}]")
return url
class Shell:
@classmethod
@ -157,7 +167,10 @@ class Shell:
return res.stdout.strip()
@classmethod
def run(cls, command, check=False):
def run(cls, command, check=False, dry_run=False):
if dry_run:
print(f"Dry-ryn. Would run command [{command}]")
return ""
print(f"Run command [{command}]")
res = ""
result = subprocess.run(
@ -178,6 +191,12 @@ class Shell:
assert result.returncode == 0
return res.strip()
@classmethod
def run_as_daemon(cls, command):
print(f"Run daemon command [{command}]")
subprocess.Popen(command.split(" ")) # pylint:disable=consider-using-with
return 0, ""
@classmethod
def check(cls, command):
result = subprocess.run(

View File

@ -2,7 +2,6 @@ import argparse
import dataclasses
import json
import os
import subprocess
from contextlib import contextmanager
from copy import copy
@ -13,7 +12,7 @@ from git_helper import Git, GIT_PREFIX
from ssh import SSHAgent
from env_helper import GITHUB_REPOSITORY, S3_BUILDS_BUCKET
from s3_helper import S3Helper
from ci_utils import Shell
from ci_utils import Shell, GHActions
from ci_buddy import CIBuddy
from version_helper import (
FILE_WITH_VERSION_PATH,
@ -31,32 +30,62 @@ CONTRIBUTORS_PATH = get_abs_path(GENERATED_CONTRIBUTORS)
RELEASE_INFO_FILE = "/tmp/release_info.json"
class ShellRunner:
class ReleaseProgress:
STARTED = "started"
DOWNLOAD_PACKAGES = "download packages"
PUSH_RELEASE_TAG = "push release tag"
PUSH_NEW_RELEASE_BRANCH = "push new release branch"
BUMP_VERSION = "bump version"
CREATE_GH_RELEASE = "create GH release"
EXPORT_TGZ = "export TGZ packages"
EXPORT_RPM = "export RPM packages"
EXPORT_DEB = "export DEB packages"
TEST_TGZ = "test TGZ packages"
TEST_RPM = "test RPM packages"
TEST_DEB = "test DEB packages"
@classmethod
def run(
cls, command, check_retcode=True, print_output=True, async_=False, dry_run=False
):
if dry_run:
print(f"Dry-run: Would run shell command: [{command}]")
return 0, ""
print(f"Running shell command: [{command}]")
if async_:
subprocess.Popen(command.split(" ")) # pylint:disable=consider-using-with
return 0, ""
result = subprocess.run(
command + " 2>&1",
shell=True,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
text=True,
check=True,
)
if print_output:
print(result.stdout)
if check_retcode:
assert result.returncode == 0, f"Return code [{result.returncode}]"
return result.returncode, result.stdout
class ReleaseProgressDescription:
OK = "OK"
FAILED = "FAILED"
class ReleaseContextManager:
def __init__(self, release_progress):
self.release_progress = release_progress
self.release_info = None
def __enter__(self):
if self.release_progress == ReleaseProgress.STARTED:
# create initial release info
self.release_info = ReleaseInfo(
release_branch="NA",
commit_sha=args.ref,
release_tag="NA",
version="NA",
codename="NA",
previous_release_tag="NA",
previous_release_sha="NA",
release_progress=ReleaseProgress.STARTED,
).dump()
else:
# fetch release info from fs and update
self.release_info = ReleaseInfo.from_file()
assert self.release_info
assert (
self.release_info.progress_description == ReleaseProgressDescription.OK
), "Must be OK on the start of new context"
self.release_info.release_progress = self.release_progress
self.release_info.dump()
return self.release_info
def __exit__(self, exc_type, exc_value, traceback):
assert self.release_info
if exc_type is not None:
self.release_info.progress_description = ReleaseProgressDescription.FAILED
else:
self.release_info.progress_description = ReleaseProgressDescription.OK
self.release_info.dump()
@dataclasses.dataclass
@ -74,6 +103,10 @@ class ReleaseInfo:
release_url: str = ""
debian_command: str = ""
rpm_command: str = ""
tgz_command: str = ""
docker_command: str = ""
release_progress: str = ""
progress_description: str = ""
@staticmethod
def from_file() -> "ReleaseInfo":
@ -85,9 +118,9 @@ class ReleaseInfo:
print(f"Dump release info into [{RELEASE_INFO_FILE}]")
with open(RELEASE_INFO_FILE, "w", encoding="utf-8") as f:
print(json.dumps(dataclasses.asdict(self), indent=2), file=f)
return self
@staticmethod
def prepare(commit_ref: str, release_type: str) -> None:
def prepare(self, commit_ref: str, release_type: str) -> "ReleaseInfo":
version = None
release_branch = None
release_tag = None
@ -97,8 +130,9 @@ class ReleaseInfo:
assert release_type in ("patch", "new")
if release_type == "new":
# check commit_ref is right and on a right branch
ShellRunner.run(
f"git merge-base --is-ancestor origin/{commit_ref} origin/master"
Shell.run(
f"git merge-base --is-ancestor origin/{commit_ref} origin/master",
check=True,
)
with checkout(commit_ref):
commit_sha = Shell.run(f"git rev-parse {commit_ref}", check=True)
@ -130,10 +164,11 @@ class ReleaseInfo:
version.with_description(codename)
release_branch = f"{version.major}.{version.minor}"
release_tag = version.describe
ShellRunner.run(f"{GIT_PREFIX} fetch origin {release_branch} --tags")
Shell.run(f"{GIT_PREFIX} fetch origin {release_branch} --tags", check=True)
# check commit is right and on a right branch
ShellRunner.run(
f"git merge-base --is-ancestor {commit_ref} origin/{release_branch}"
Shell.run(
f"git merge-base --is-ancestor {commit_ref} origin/{release_branch}",
check=True,
)
if version.patch == 1:
expected_version = copy(version)
@ -172,21 +207,22 @@ class ReleaseInfo:
and version
and codename in ("lts", "stable")
)
res = ReleaseInfo(
release_branch=release_branch,
commit_sha=commit_sha,
release_tag=release_tag,
version=version.string,
codename=codename,
previous_release_tag=previous_release_tag,
previous_release_sha=previous_release_sha,
)
res.dump()
self.release_branch = release_branch
self.commit_sha = commit_sha
self.release_tag = release_tag
self.version = version.string
self.codename = codename
self.previous_release_tag = previous_release_tag
self.previous_release_sha = previous_release_sha
self.release_progress = ReleaseProgress.STARTED
self.progress_description = ReleaseProgressDescription.OK
return self
def push_release_tag(self, dry_run: bool) -> None:
if dry_run:
# remove locally created tag from prev run
ShellRunner.run(
Shell.run(
f"{GIT_PREFIX} tag -l | grep -q {self.release_tag} && git tag -d {self.release_tag} ||:"
)
# Create release tag
@ -194,16 +230,17 @@ class ReleaseInfo:
f"Create and push release tag [{self.release_tag}], commit [{self.commit_sha}]"
)
tag_message = f"Release {self.release_tag}"
ShellRunner.run(
f"{GIT_PREFIX} tag -a -m '{tag_message}' {self.release_tag} {self.commit_sha}"
Shell.run(
f"{GIT_PREFIX} tag -a -m '{tag_message}' {self.release_tag} {self.commit_sha}",
check=True,
)
cmd_push_tag = f"{GIT_PREFIX} push origin {self.release_tag}:{self.release_tag}"
ShellRunner.run(cmd_push_tag, dry_run=dry_run)
Shell.run(cmd_push_tag, dry_run=dry_run, check=True)
@staticmethod
def _create_gh_label(label: str, color_hex: str, dry_run: bool) -> None:
cmd = f"gh api repos/{GITHUB_REPOSITORY}/labels -f name={label} -f color={color_hex}"
ShellRunner.run(cmd, dry_run=dry_run)
Shell.run(cmd, dry_run=dry_run, check=True)
def push_new_release_branch(self, dry_run: bool) -> None:
assert (
@ -220,8 +257,8 @@ class ReleaseInfo:
), f"Unexpected current version in git, must precede [{self.version}] by one step, actual [{version.string}]"
if dry_run:
# remove locally created branch from prev run
ShellRunner.run(
f"{GIT_PREFIX} branch -l | grep -q {new_release_branch} && git branch -d {new_release_branch} ||:"
Shell.run(
f"{GIT_PREFIX} branch -l | grep -q {new_release_branch} && git branch -d {new_release_branch}"
)
print(
f"Create and push new release branch [{new_release_branch}], commit [{self.commit_sha}]"
@ -234,7 +271,7 @@ class ReleaseInfo:
cmd_push_branch = (
f"{GIT_PREFIX} push --set-upstream origin {new_release_branch}"
)
ShellRunner.run(cmd_push_branch, dry_run=dry_run)
Shell.run(cmd_push_branch, dry_run=dry_run, check=True)
print("Create and push backport tags for new release branch")
ReleaseInfo._create_gh_label(
@ -243,12 +280,13 @@ class ReleaseInfo:
ReleaseInfo._create_gh_label(
f"v{new_release_branch}-affected", "c2bfff", dry_run=dry_run
)
ShellRunner.run(
Shell.run(
f"""gh pr create --repo {GITHUB_REPOSITORY} --title 'Release pull request for branch {new_release_branch}'
--head {new_release_branch} {pr_labels}
--body 'This PullRequest is a part of ClickHouse release cycle. It is used by CI system only. Do not perform any changes with it.'
""",
dry_run=dry_run,
check=True,
)
def update_version_and_contributors_list(self, dry_run: bool) -> None:
@ -274,31 +312,34 @@ class ReleaseInfo:
body_file = get_abs_path(".github/PULL_REQUEST_TEMPLATE.md")
actor = os.getenv("GITHUB_ACTOR", "") or "me"
cmd_create_pr = f"gh pr create --repo {GITHUB_REPOSITORY} --title 'Update version after release' --head {branch_upd_version_contributors} --base {self.release_branch} --body-file '{body_file} --label 'do not test' --assignee @{actor}"
ShellRunner.run(cmd_commit_version_upd, dry_run=dry_run)
ShellRunner.run(cmd_push_branch, dry_run=dry_run)
ShellRunner.run(cmd_create_pr, dry_run=dry_run)
Shell.run(cmd_commit_version_upd, check=True, dry_run=dry_run)
Shell.run(cmd_push_branch, check=True, dry_run=dry_run)
Shell.run(cmd_create_pr, check=True, dry_run=dry_run)
if dry_run:
ShellRunner.run(
f"{GIT_PREFIX} diff '{CMAKE_PATH}' '{CONTRIBUTORS_PATH}'"
)
ShellRunner.run(
Shell.run(f"{GIT_PREFIX} diff '{CMAKE_PATH}' '{CONTRIBUTORS_PATH}'")
Shell.run(
f"{GIT_PREFIX} checkout '{CMAKE_PATH}' '{CONTRIBUTORS_PATH}'"
)
self.version_bump_pr = GHActions.get_pr_url_by_branch(
repo=GITHUB_REPOSITORY, branch=branch_upd_version_contributors
)
def update_release_info(self, dry_run: bool) -> None:
def update_release_info(self, dry_run: bool) -> "ReleaseInfo":
branch = f"auto/{release_info.release_tag}"
if not dry_run:
get_url_cmd = f"gh pr list --repo {GITHUB_REPOSITORY} --head {branch} --json url --jq '.[0].url'"
url = Shell.run(get_url_cmd)
if url:
print(f"Update release info with Changelog PR link [{url}]")
else:
print(f"WARNING: Changelog PR not found, branch [{branch}]")
url = GHActions.get_pr_url_by_branch(repo=GITHUB_REPOSITORY, branch=branch)
else:
url = "dry-run"
print(f"ChangeLog PR url [{url}]")
self.changelog_pr = url
print(f"Release url [{url}]")
self.release_url = (
f"https://github.com/{GITHUB_REPOSITORY}/releases/tag/{self.release_tag}"
)
self.docker_command = f"docker run --rm clickhouse/clickhouse:{self.release_branch} clickhouse --version"
self.dump()
return self
def create_gh_release(self, packages_files: List[str], dry_run: bool) -> None:
repo = os.getenv("GITHUB_REPOSITORY")
@ -376,7 +417,7 @@ class PackageDownloader:
self.macos_package_files = ["clickhouse-macos", "clickhouse-macos-aarch64"]
self.file_to_type = {}
ShellRunner.run(f"mkdir -p {self.LOCAL_DIR}")
Shell.run(f"mkdir -p {self.LOCAL_DIR}")
for package_type in self.PACKAGE_TYPES:
for package in self.package_names:
@ -426,7 +467,7 @@ class PackageDownloader:
return res
def run(self):
ShellRunner.run(f"rm -rf {self.LOCAL_DIR}/*")
Shell.run(f"rm -rf {self.LOCAL_DIR}/*")
for package_file in (
self.deb_package_files + self.rpm_package_files + self.tgz_package_files
):
@ -499,6 +540,37 @@ class PackageDownloader:
return True
@contextmanager
def checkout(ref: str) -> Iterator[None]:
orig_ref = Shell.run(f"{GIT_PREFIX} symbolic-ref --short HEAD", check=True)
rollback_cmd = f"{GIT_PREFIX} checkout {orig_ref}"
assert orig_ref
if ref not in (orig_ref,):
Shell.run(f"{GIT_PREFIX} checkout {ref}")
try:
yield
except (Exception, KeyboardInterrupt) as e:
print(f"ERROR: Exception [{e}]")
Shell.run(rollback_cmd)
raise
Shell.run(rollback_cmd)
@contextmanager
def checkout_new(ref: str) -> Iterator[None]:
orig_ref = Shell.run(f"{GIT_PREFIX} symbolic-ref --short HEAD", check=True)
rollback_cmd = f"{GIT_PREFIX} checkout {orig_ref}"
assert orig_ref
Shell.run(f"{GIT_PREFIX} checkout -b {ref}", check=True)
try:
yield
except (Exception, KeyboardInterrupt) as e:
print(f"ERROR: Exception [{e}]")
Shell.run(rollback_cmd)
raise
Shell.run(rollback_cmd)
def parse_args() -> argparse.Namespace:
parser = argparse.ArgumentParser(
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
@ -556,44 +628,26 @@ def parse_args() -> argparse.Namespace:
action="store_true",
help="do not make any actual changes in the repo, just show what will be done",
)
parser.add_argument(
"--set-progress-started",
action="store_true",
help="Set new progress step, --progress <PROGRESS STEP> must be set",
)
parser.add_argument(
"--progress",
type=str,
help="Progress step name, see @ReleaseProgress",
)
parser.add_argument(
"--set-progress-completed",
action="store_true",
help="Set current progress step to OK (completed)",
)
return parser.parse_args()
@contextmanager
def checkout(ref: str) -> Iterator[None]:
_, orig_ref = ShellRunner.run(f"{GIT_PREFIX} symbolic-ref --short HEAD")
rollback_cmd = f"{GIT_PREFIX} checkout {orig_ref}"
assert orig_ref
if ref not in (orig_ref,):
Shell.run(f"{GIT_PREFIX} checkout {ref}")
try:
yield
except (Exception, KeyboardInterrupt) as e:
print(f"ERROR: Exception [{e}]")
ShellRunner.run(rollback_cmd)
raise
ShellRunner.run(rollback_cmd)
@contextmanager
def checkout_new(ref: str) -> Iterator[None]:
_, orig_ref = ShellRunner.run(f"{GIT_PREFIX} symbolic-ref --short HEAD")
rollback_cmd = f"{GIT_PREFIX} checkout {orig_ref}"
assert orig_ref
ShellRunner.run(f"{GIT_PREFIX} checkout -b {ref}")
try:
yield
except (Exception, KeyboardInterrupt) as e:
print(f"ERROR: Exception [{e}]")
ShellRunner.run(rollback_cmd)
raise
ShellRunner.run(rollback_cmd)
if __name__ == "__main__":
args = parse_args()
assert args.dry_run
# prepare ssh for git if needed
_ssh_agent = None
@ -605,35 +659,56 @@ if __name__ == "__main__":
_ssh_agent.print_keys()
if args.prepare_release_info:
assert (
args.ref and args.release_type
), "--ref and --release-type must be provided with --prepare-release-info"
ReleaseInfo.prepare(commit_ref=args.ref, release_type=args.release_type)
if args.push_release_tag:
release_info = ReleaseInfo.from_file()
release_info.push_release_tag(dry_run=args.dry_run)
if args.push_new_release_branch:
release_info = ReleaseInfo.from_file()
release_info.push_new_release_branch(dry_run=args.dry_run)
if args.create_bump_version_pr:
# TODO: store link to PR in release info
release_info = ReleaseInfo.from_file()
release_info.update_version_and_contributors_list(dry_run=args.dry_run)
with ReleaseContextManager(
release_progress=ReleaseProgress.STARTED
) as release_info:
assert (
args.ref and args.release_type
), "--ref and --release-type must be provided with --prepare-release-info"
release_info.prepare(commit_ref=args.ref, release_type=args.release_type)
if args.download_packages:
release_info = ReleaseInfo.from_file()
p = PackageDownloader(
release=release_info.release_branch,
commit_sha=release_info.commit_sha,
version=release_info.version,
)
p.run()
with ReleaseContextManager(
release_progress=ReleaseProgress.DOWNLOAD_PACKAGES
) as release_info:
p = PackageDownloader(
release=release_info.release_branch,
commit_sha=release_info.commit_sha,
version=release_info.version,
)
p.run()
if args.push_release_tag:
with ReleaseContextManager(
release_progress=ReleaseProgress.PUSH_RELEASE_TAG
) as release_info:
release_info.push_release_tag(dry_run=args.dry_run)
if args.push_new_release_branch:
with ReleaseContextManager(
release_progress=ReleaseProgress.PUSH_NEW_RELEASE_BRANCH
) as release_info:
release_info.push_new_release_branch(dry_run=args.dry_run)
if args.create_bump_version_pr:
with ReleaseContextManager(
release_progress=ReleaseProgress.BUMP_VERSION
) as release_info:
release_info.update_version_and_contributors_list(dry_run=args.dry_run)
if args.create_gh_release:
release_info = ReleaseInfo.from_file()
p = PackageDownloader(
release=release_info.release_branch,
commit_sha=release_info.commit_sha,
version=release_info.version,
)
with ReleaseContextManager(
release_progress=ReleaseProgress.CREATE_GH_RELEASE
) as release_info:
p = PackageDownloader(
release=release_info.release_branch,
commit_sha=release_info.commit_sha,
version=release_info.version,
)
release_info.create_gh_release(
packages_files=p.get_all_packages_files(), dry_run=args.dry_run
)
if args.post_status:
release_info = ReleaseInfo.from_file()
release_info.update_release_info(dry_run=args.dry_run)
@ -646,6 +721,21 @@ if __name__ == "__main__":
f"Failed to issue new release", dataclasses.asdict(release_info)
)
if args.set_progress_started:
ri = ReleaseInfo.from_file()
ri.release_progress = args.progress
ri.progress_description = ReleaseProgressDescription.FAILED
ri.dump()
assert args.progress, "Progress step name must be provided"
if args.set_progress_completed:
ri = ReleaseInfo.from_file()
assert (
ri.progress_description == ReleaseProgressDescription.FAILED
), "Must be FAILED before set to OK"
ri.progress_description = ReleaseProgressDescription.OK
ri.dump()
# tear down ssh
if _ssh_agent and _key_pub:
_ssh_agent.remove(_key_pub)