more fixes

This commit is contained in:
Max K 2024-08-02 09:23:40 +02:00
parent e034558f74
commit cebb366838
15 changed files with 213 additions and 194 deletions

View File

@ -0,0 +1,21 @@
name: CheckWorkflowResults
description: Check overall workflow status and post error to slack if any
inputs:
needs:
description: github needs context as a json string
required: true
type: string
runs:
using: "composite"
steps:
- name: Check Workflow
shell: bash
run: |
export WORKFLOW_RESULT_FILE="/tmp/workflow_results.json"
cat > "$WORKFLOW_RESULT_FILE" << 'EOF'
${{ inputs.needs }}
EOF
python3 ./tests/ci/ci_buddy.py --check-wf-status

View File

@ -24,7 +24,7 @@ concurrency:
dry-run: dry-run:
description: 'Dry run' description: 'Dry run'
required: false required: false
default: true default: false
type: boolean type: boolean
jobs: jobs:
@ -43,16 +43,27 @@ jobs:
- name: Prepare Release Info - name: Prepare Release Info
shell: bash shell: bash
run: | run: |
if [ ${{ inputs.only-repo }} == "true" ]; then
git tag -l ${{ inputs.ref }} || { echo "With only-repo option ref must be a valid release tag"; exit 1; }
fi
python3 ./tests/ci/create_release.py --prepare-release-info \ python3 ./tests/ci/create_release.py --prepare-release-info \
--ref ${{ inputs.ref }} --release-type ${{ inputs.type }} ${{ inputs.dry-run == true && '--dry-run' || '' }} --ref ${{ inputs.ref }} --release-type ${{ inputs.type }} \
${{ inputs.dry-run == true && '--dry-run' || '' }} \
${{ inputs.only-repo == true && '--skip-tag-check' || '' }}
echo "::group::Release Info" echo "::group::Release Info"
python3 -m json.tool /tmp/release_info.json python3 -m json.tool /tmp/release_info.json
echo "::endgroup::" echo "::endgroup::"
release_tag=$(jq -r '.release_tag' /tmp/release_info.json) release_tag=$(jq -r '.release_tag' /tmp/release_info.json)
commit_sha=$(jq -r '.commit_sha' /tmp/release_info.json) commit_sha=$(jq -r '.commit_sha' /tmp/release_info.json)
is_latest=$(jq -r '.latest' /tmp/release_info.json)
echo "Release Tag: $release_tag" echo "Release Tag: $release_tag"
echo "RELEASE_TAG=$release_tag" >> "$GITHUB_ENV" echo "RELEASE_TAG=$release_tag" >> "$GITHUB_ENV"
echo "COMMIT_SHA=$commit_sha" >> "$GITHUB_ENV" echo "COMMIT_SHA=$commit_sha" >> "$GITHUB_ENV"
if [ "$is_latest" == "true" ]; then
echo "DOCKER_TAG_TYPE=release-latest" >> "$GITHUB_ENV"
else
echo "DOCKER_TAG_TYPE=release" >> "$GITHUB_ENV"
fi
- name: Download All Release Artifacts - name: Download All Release Artifacts
if: ${{ inputs.type == 'patch' }} if: ${{ inputs.type == 'patch' }}
shell: bash shell: bash
@ -85,10 +96,11 @@ jobs:
echo "Generate ChangeLog" echo "Generate ChangeLog"
export CI=1 export CI=1
docker run -u "${UID}:${GID}" -e PYTHONUNBUFFERED=1 -e CI=1 --network=host \ docker run -u "${UID}:${GID}" -e PYTHONUNBUFFERED=1 -e CI=1 --network=host \
--volume=".:/ClickHouse" clickhouse/style-test \ --volume=".:/wd" --workdir="/wd" \
/ClickHouse/tests/ci/changelog.py -v --debug-helpers \ clickhouse/style-test \
./tests/ci/changelog.py -v --debug-helpers \
--jobs=5 \ --jobs=5 \
--output="/ClickHouse/docs/changelogs/${{ env.RELEASE_TAG }}.md" ${{ env.RELEASE_TAG }} --output="./docs/changelogs/${{ env.RELEASE_TAG }}.md" ${{ env.RELEASE_TAG }}
git add ./docs/changelogs/${{ env.RELEASE_TAG }}.md git add ./docs/changelogs/${{ env.RELEASE_TAG }}.md
echo "Generate Security" echo "Generate Security"
python3 ./utils/security-generator/generate_security.py > SECURITY.md python3 ./utils/security-generator/generate_security.py > SECURITY.md
@ -160,7 +172,7 @@ jobs:
cd "./tests/ci" cd "./tests/ci"
python3 ./create_release.py --set-progress-started --progress "docker server release" python3 ./create_release.py --set-progress-started --progress "docker server release"
export CHECK_NAME="Docker server image" export CHECK_NAME="Docker server image"
python3 docker_server.py --release-type auto --version ${{ env.RELEASE_TAG }} --check-name "$CHECK_NAME" --sha ${{ env.COMMIT_SHA }} ${{ ! inputs.dry-run && '--push' || '' }} python3 docker_server.py --tag-type ${{ env.DOCKER_TAG_TYPE }} --version ${{ env.RELEASE_TAG }} --check-name "$CHECK_NAME" --sha ${{ env.COMMIT_SHA }} ${{ ! inputs.dry-run && '--push' || '' }}
python3 ./create_release.py --set-progress-completed python3 ./create_release.py --set-progress-completed
- name: Docker clickhouse/clickhouse-keeper building - name: Docker clickhouse/clickhouse-keeper building
if: ${{ inputs.type == 'patch' }} if: ${{ inputs.type == 'patch' }}
@ -169,7 +181,7 @@ jobs:
cd "./tests/ci" cd "./tests/ci"
python3 ./create_release.py --set-progress-started --progress "docker keeper release" python3 ./create_release.py --set-progress-started --progress "docker keeper release"
export CHECK_NAME="Docker keeper image" export CHECK_NAME="Docker keeper image"
python3 docker_server.py --release-type auto --version ${{ env.RELEASE_TAG }} --check-name "$CHECK_NAME" --sha ${{ env.COMMIT_SHA }} ${{ ! inputs.dry-run && '--push' || '' }} python3 docker_server.py --tag-type ${{ env.DOCKER_TAG_TYPE }} --version ${{ env.RELEASE_TAG }} --check-name "$CHECK_NAME" --sha ${{ env.COMMIT_SHA }} ${{ ! inputs.dry-run && '--push' || '' }}
python3 ./create_release.py --set-progress-completed python3 ./create_release.py --set-progress-completed
- name: Update release info. Merge created PRs - name: Update release info. Merge created PRs
shell: bash shell: bash
@ -178,6 +190,7 @@ jobs:
- name: Set current Release progress to Completed with OK - name: Set current Release progress to Completed with OK
shell: bash shell: bash
run: | run: |
# dummy stage to finalize release info with "progress: completed; status: OK"
python3 ./tests/ci/create_release.py --set-progress-started --progress "completed" python3 ./tests/ci/create_release.py --set-progress-started --progress "completed"
python3 ./tests/ci/create_release.py --set-progress-completed python3 ./tests/ci/create_release.py --set-progress-completed
- name: Post Slack Message - name: Post Slack Message

View File

@ -172,12 +172,9 @@ jobs:
cd "$GITHUB_WORKSPACE/tests/ci" cd "$GITHUB_WORKSPACE/tests/ci"
python3 merge_pr.py --set-ci-status --wf-status ${{ contains(needs.*.result, 'failure') && 'failure' || 'success' }} python3 merge_pr.py --set-ci-status --wf-status ${{ contains(needs.*.result, 'failure') && 'failure' || 'success' }}
- name: Check Workflow results - name: Check Workflow results
run: | uses: ./.github/actions/check_workflow
export WORKFLOW_RESULT_FILE="/tmp/workflow_results.json" with:
cat > "$WORKFLOW_RESULT_FILE" << 'EOF' needs: ${{ toJson(needs) }}
${{ toJson(needs) }}
EOF
python3 ./tests/ci/ci_buddy.py --check-wf-status
################################# Stage Final ################################# ################################# Stage Final #################################
# #

View File

@ -158,7 +158,7 @@ class DebianArtifactory:
print("Running test command:") print("Running test command:")
print(f" {cmd}") print(f" {cmd}")
assert Shell.check(cmd) assert Shell.check(cmd)
self.release_info.debian_command = debian_command self.release_info.debian = debian_command
self.release_info.dump() self.release_info.dump()
@ -240,7 +240,7 @@ class RpmArtifactory:
print("Running test command:") print("Running test command:")
print(f" {cmd}") print(f" {cmd}")
assert Shell.check(cmd) assert Shell.check(cmd)
self.release_info.rpm_command = rpm_command self.release_info.rpm = rpm_command
self.release_info.dump() self.release_info.dump()
@ -304,7 +304,7 @@ class TgzArtifactory:
expected_checksum == actual_checksum expected_checksum == actual_checksum
), f"[{actual_checksum} != {expected_checksum}]" ), f"[{actual_checksum} != {expected_checksum}]"
Shell.check("rm /tmp/tmp.tgz*", verbose=True) Shell.check("rm /tmp/tmp.tgz*", verbose=True)
self.release_info.tgz_command = cmd self.release_info.tgz = cmd
self.release_info.dump() self.release_info.dump()

View File

@ -127,15 +127,13 @@ def _prepare(token):
) )
commit_num -= 1 commit_num -= 1
is_completed = CI.GHActions.check_wf_completed( is_completed = CI.GH.check_wf_completed(token=token, commit_sha=commit)
token=token, commit_sha=commit
)
if not is_completed: if not is_completed:
print(f"CI is in progress for [{commit}] - check previous commit") print(f"CI is in progress for [{commit}] - check previous commit")
commits_to_branch_head += 1 commits_to_branch_head += 1
continue continue
commit_ci_status = CI.GHActions.get_commit_status_by_name( commit_ci_status = CI.GH.get_commit_status_by_name(
token=token, token=token,
commit_sha=commit, commit_sha=commit,
status_name=(CI.JobNames.BUILD_CHECK, "ClickHouse build check"), status_name=(CI.JobNames.BUILD_CHECK, "ClickHouse build check"),

View File

@ -16,7 +16,7 @@ import upload_result_helper
from build_check import get_release_or_pr from build_check import get_release_or_pr
from ci_config import CI from ci_config import CI
from ci_metadata import CiMetadata from ci_metadata import CiMetadata
from ci_utils import GHActions, normalize_string, Utils from ci_utils import GH, normalize_string, Utils
from clickhouse_helper import ( from clickhouse_helper import (
CiLogsCredentials, CiLogsCredentials,
ClickHouseHelper, ClickHouseHelper,
@ -368,7 +368,7 @@ def _pre_action(s3, job_name, batch, indata, pr_info):
) )
to_be_skipped = True to_be_skipped = True
# skip_status = SUCCESS already there # skip_status = SUCCESS already there
GHActions.print_in_group("Commit Status Data", job_status) GH.print_in_group("Commit Status Data", job_status)
# create pre report # create pre report
jr = JobReport.create_pre_report(status=skip_status, job_skipped=to_be_skipped) jr = JobReport.create_pre_report(status=skip_status, job_skipped=to_be_skipped)

View File

@ -8,7 +8,7 @@ import requests
from botocore.exceptions import ClientError from botocore.exceptions import ClientError
from pr_info import PRInfo from pr_info import PRInfo
from ci_utils import Shell, GHActions from ci_config import CI
class CIBuddy: class CIBuddy:
@ -31,10 +31,19 @@ class CIBuddy:
self.sha = pr_info.sha[:10] self.sha = pr_info.sha[:10]
def check_workflow(self): def check_workflow(self):
GHActions.print_workflow_results() CI.GH.print_workflow_results()
res = GHActions.get_workflow_job_result(GHActions.ActionsNames.RunConfig) if CI.Envs.GITHUB_WORKFLOW == CI.WorkFlowNames.CreateRelease:
if res != GHActions.ActionStatuses.SUCCESS: if not CI.GH.is_workflow_ok():
self.post_job_error("Workflow Configuration Failed", critical=True) self.post_job_error(
f"{CI.Envs.GITHUB_WORKFLOW} Workflow Failed", critical=True
)
else:
res = CI.GH.get_workflow_job_result(CI.GH.ActionsNames.RunConfig)
if res != CI.GH.ActionStatuses.SUCCESS:
print(f"ERROR: RunConfig status is [{res}] - post report to slack")
self.post_job_error(
f"{CI.Envs.GITHUB_WORKFLOW} Workflow Failed", critical=True
)
@staticmethod @staticmethod
def _get_webhooks(): def _get_webhooks():
@ -74,10 +83,13 @@ class CIBuddy:
message = title message = title
if isinstance(body, dict): if isinstance(body, dict):
for name, value in body.items(): for name, value in body.items():
if "commit_sha" in name: if "sha" in name and value and len(value) == 40:
value = ( value = (
f"<https://github.com/{self.repo}/commit/{value}|{value[:8]}>" f"<https://github.com/{self.repo}/commit/{value}|{value[:8]}>"
) )
elif isinstance(value, str) and value.startswith("https://github.com/"):
value_shorten = value.split("/")[-1]
value = f"<{value}|{value_shorten}>"
message += f" *{name}*: {value}\n" message += f" *{name}*: {value}\n"
else: else:
message += body + "\n" message += body + "\n"
@ -120,9 +132,11 @@ class CIBuddy:
) -> None: ) -> None:
instance_id, instance_type = "unknown", "unknown" instance_id, instance_type = "unknown", "unknown"
if with_instance_info: if with_instance_info:
instance_id = Shell.get_output("ec2metadata --instance-id") or instance_id instance_id = (
CI.Shell.get_output("ec2metadata --instance-id") or instance_id
)
instance_type = ( instance_type = (
Shell.get_output("ec2metadata --instance-type") or instance_type CI.Shell.get_output("ec2metadata --instance-type") or instance_type
) )
if not job_name: if not job_name:
job_name = os.getenv("CHECK_NAME", "unknown") job_name = os.getenv("CHECK_NAME", "unknown")

View File

@ -7,7 +7,7 @@ from typing import Dict, Optional, Any, Union, Sequence, List, Set
from ci_config import CI from ci_config import CI
from ci_utils import is_hex, GHActions from ci_utils import is_hex, GH
from commit_status_helper import CommitStatusData from commit_status_helper import CommitStatusData
from env_helper import ( from env_helper import (
TEMP_PATH, TEMP_PATH,
@ -258,15 +258,15 @@ class CiCache:
def print_status(self): def print_status(self):
print(f"Cache enabled: [{self.enabled}]") print(f"Cache enabled: [{self.enabled}]")
for record_type in self.RecordType: for record_type in self.RecordType:
GHActions.print_in_group( GH.print_in_group(
f"Cache records: [{record_type}]", list(self.records[record_type]) f"Cache records: [{record_type}]", list(self.records[record_type])
) )
GHActions.print_in_group( GH.print_in_group(
"Jobs to do:", "Jobs to do:",
list(self.jobs_to_do.items()), list(self.jobs_to_do.items()),
) )
GHActions.print_in_group("Jobs to skip:", self.jobs_to_skip) GH.print_in_group("Jobs to skip:", self.jobs_to_skip)
GHActions.print_in_group( GH.print_in_group(
"Jobs to wait:", "Jobs to wait:",
list(self.jobs_to_wait.items()), list(self.jobs_to_wait.items()),
) )
@ -788,7 +788,7 @@ class CiCache:
while round_cnt < MAX_ROUNDS_TO_WAIT: while round_cnt < MAX_ROUNDS_TO_WAIT:
round_cnt += 1 round_cnt += 1
GHActions.print_in_group( GH.print_in_group(
f"Wait pending jobs, round [{round_cnt}/{MAX_ROUNDS_TO_WAIT}]:", f"Wait pending jobs, round [{round_cnt}/{MAX_ROUNDS_TO_WAIT}]:",
list(self.jobs_to_wait), list(self.jobs_to_wait),
) )
@ -853,7 +853,7 @@ class CiCache:
# make up for 2 iterations in dry_run # make up for 2 iterations in dry_run
expired_sec += int(TIMEOUT / 2) + 1 expired_sec += int(TIMEOUT / 2) + 1
GHActions.print_in_group( GH.print_in_group(
"Remaining jobs:", "Remaining jobs:",
[list(self.jobs_to_wait)], [list(self.jobs_to_wait)],
) )

View File

@ -34,7 +34,8 @@ class CI:
from ci_definitions import Runners as Runners from ci_definitions import Runners as Runners
from ci_utils import Envs as Envs from ci_utils import Envs as Envs
from ci_utils import Utils as Utils from ci_utils import Utils as Utils
from ci_utils import GHActions as GHActions from ci_utils import GH as GH
from ci_utils import Shell as Shell
from ci_definitions import Labels as Labels from ci_definitions import Labels as Labels
from ci_definitions import TRUSTED_CONTRIBUTORS as TRUSTED_CONTRIBUTORS from ci_definitions import TRUSTED_CONTRIBUTORS as TRUSTED_CONTRIBUTORS
from ci_definitions import WorkFlowNames as WorkFlowNames from ci_definitions import WorkFlowNames as WorkFlowNames

View File

@ -112,6 +112,7 @@ class WorkFlowNames(metaclass=WithIter):
""" """
JEPSEN = "JepsenWorkflow" JEPSEN = "JepsenWorkflow"
CreateRelease = "CreateRelease"
class BuildNames(metaclass=WithIter): class BuildNames(metaclass=WithIter):
@ -578,7 +579,7 @@ class CommonJobConfigs:
DOCKER_SERVER = JobConfig( DOCKER_SERVER = JobConfig(
job_name_keyword="docker", job_name_keyword="docker",
required_on_release_branch=True, required_on_release_branch=True,
run_command='docker_server.py --check-name "$CHECK_NAME" --release-type head --allow-build-reuse', run_command='docker_server.py --check-name "$CHECK_NAME" --tag-type head --allow-build-reuse',
digest=DigestConfig( digest=DigestConfig(
include_paths=[ include_paths=[
"tests/ci/docker_server.py", "tests/ci/docker_server.py",

View File

@ -9,7 +9,7 @@ from env_helper import (
S3_BUILDS_BUCKET_PUBLIC, S3_BUILDS_BUCKET_PUBLIC,
) )
from s3_helper import S3Helper from s3_helper import S3Helper
from ci_utils import GHActions from ci_utils import GH
from synchronizer_utils import SYNC_BRANCH_PREFIX from synchronizer_utils import SYNC_BRANCH_PREFIX
@ -111,7 +111,7 @@ class CiMetadata:
else: else:
log_title = f"Storing workflow metadata: PR [{self.pr_number}], upstream PR [{self.upstream_pr_number}]" log_title = f"Storing workflow metadata: PR [{self.pr_number}], upstream PR [{self.upstream_pr_number}]"
GHActions.print_in_group( GH.print_in_group(
log_title, log_title,
[f"run_id: {self.run_id}"], [f"run_id: {self.run_id}"],
) )

View File

@ -16,6 +16,8 @@ class Envs:
WORKFLOW_RESULT_FILE = os.getenv( WORKFLOW_RESULT_FILE = os.getenv(
"WORKFLOW_RESULT_FILE", "/tmp/workflow_results.json" "WORKFLOW_RESULT_FILE", "/tmp/workflow_results.json"
) )
S3_BUILDS_BUCKET = os.getenv("S3_BUILDS_BUCKET", "clickhouse-builds")
GITHUB_WORKFLOW = os.getenv("GITHUB_WORKFLOW", "")
LABEL_CATEGORIES = { LABEL_CATEGORIES = {
@ -83,7 +85,7 @@ def normalize_string(string: str) -> str:
return res return res
class GHActions: class GH:
class ActionsNames: class ActionsNames:
RunConfig = "RunConfig" RunConfig = "RunConfig"
@ -117,6 +119,14 @@ class GHActions:
results = [f"{job}: {data['result']}" for job, data in res.items()] results = [f"{job}: {data['result']}" for job, data in res.items()]
cls.print_in_group("Workflow results", results) cls.print_in_group("Workflow results", results)
@classmethod
def is_workflow_ok(cls) -> bool:
res = cls._get_workflow_results()
for _job, data in res.items():
if data["result"] == "failure":
return False
return bool(res)
@classmethod @classmethod
def get_workflow_job_result(cls, wf_job_name: str) -> Optional[str]: def get_workflow_job_result(cls, wf_job_name: str) -> Optional[str]:
res = cls._get_workflow_results() res = cls._get_workflow_results()
@ -189,15 +199,25 @@ class GHActions:
return False return False
@staticmethod @staticmethod
def get_pr_url_by_branch(repo, branch): def get_pr_url_by_branch(branch, repo=None):
get_url_cmd = ( repo = repo or Envs.GITHUB_REPOSITORY
f"gh pr list --repo {repo} --head {branch} --json url --jq '.[0].url'" get_url_cmd = f"gh pr list --repo {repo} --head {branch} --json url --jq '.[0].url' --state open"
)
url = Shell.get_output(get_url_cmd) url = Shell.get_output(get_url_cmd)
if not url:
print(f"WARNING: No open PR found, branch [{branch}] - search for merged")
get_url_cmd = f"gh pr list --repo {repo} --head {branch} --json url --jq '.[0].url' --state merged"
url = Shell.get_output(get_url_cmd)
if not url: if not url:
print(f"ERROR: PR nor found, branch [{branch}]") print(f"ERROR: PR nor found, branch [{branch}]")
return url return url
@staticmethod
def is_latest_release_branch(branch):
latest_branch = Shell.get_output(
'gh pr list --label release --repo ClickHouse/ClickHouse --search "sort:created" -L1 --json headRefName'
)
return latest_branch == branch
class Shell: class Shell:
@classmethod @classmethod

View File

@ -10,9 +10,8 @@ from typing import Iterator, List
from git_helper import Git, GIT_PREFIX from git_helper import Git, GIT_PREFIX
from ssh import SSHAgent from ssh import SSHAgent
from env_helper import GITHUB_REPOSITORY, S3_BUILDS_BUCKET
from s3_helper import S3Helper from s3_helper import S3Helper
from ci_utils import Shell, GHActions from ci_utils import Shell, GH
from ci_buddy import CIBuddy from ci_buddy import CIBuddy
from version_helper import ( from version_helper import (
FILE_WITH_VERSION_PATH, FILE_WITH_VERSION_PATH,
@ -69,13 +68,14 @@ class ReleaseContextManager:
previous_release_tag="NA", previous_release_tag="NA",
previous_release_sha="NA", previous_release_sha="NA",
release_progress=ReleaseProgress.STARTED, release_progress=ReleaseProgress.STARTED,
latest=False,
).dump() ).dump()
else: else:
# fetch release info from fs and update # fetch release info from fs and update
self.release_info = ReleaseInfo.from_file() self.release_info = ReleaseInfo.from_file()
assert self.release_info assert self.release_info
assert ( assert (
self.release_info.progress_description == ReleaseProgressDescription.OK self.release_info.progress_status == ReleaseProgressDescription.OK
), "Must be OK on the start of new context" ), "Must be OK on the start of new context"
self.release_info.release_progress = self.release_progress self.release_info.release_progress = self.release_progress
self.release_info.dump() self.release_info.dump()
@ -84,9 +84,9 @@ class ReleaseContextManager:
def __exit__(self, exc_type, exc_value, traceback): def __exit__(self, exc_type, exc_value, traceback):
assert self.release_info assert self.release_info
if exc_type is not None: if exc_type is not None:
self.release_info.progress_description = ReleaseProgressDescription.FAILED self.release_info.progress_status = ReleaseProgressDescription.FAILED
else: else:
self.release_info.progress_description = ReleaseProgressDescription.OK self.release_info.progress_status = ReleaseProgressDescription.OK
self.release_info.dump() self.release_info.dump()
@ -96,6 +96,7 @@ class ReleaseInfo:
release_tag: str release_tag: str
release_branch: str release_branch: str
commit_sha: str commit_sha: str
latest: bool
# lts or stable # lts or stable
codename: str codename: str
previous_release_tag: str previous_release_tag: str
@ -104,12 +105,12 @@ class ReleaseInfo:
version_bump_pr: str = "" version_bump_pr: str = ""
prs_merged: bool = False prs_merged: bool = False
release_url: str = "" release_url: str = ""
debian_command: str = "" debian: str = ""
rpm_command: str = "" rpm: str = ""
tgz_command: str = "" tgz: str = ""
docker_command: str = "" docker: str = ""
release_progress: str = "" release_progress: str = ""
progress_description: str = "" progress_status: str = ""
def is_patch(self): def is_patch(self):
return self.release_branch != "master" return self.release_branch != "master"
@ -129,12 +130,15 @@ class ReleaseInfo:
print(json.dumps(dataclasses.asdict(self), indent=2), file=f) print(json.dumps(dataclasses.asdict(self), indent=2), file=f)
return self return self
def prepare(self, commit_ref: str, release_type: str) -> "ReleaseInfo": def prepare(
self, commit_ref: str, release_type: str, skip_tag_check: bool
) -> "ReleaseInfo":
version = None version = None
release_branch = None release_branch = None
release_tag = None release_tag = None
previous_release_tag = None previous_release_tag = None
previous_release_sha = None previous_release_sha = None
latest_release = False
codename = "" codename = ""
assert release_type in ("patch", "new") assert release_type in ("patch", "new")
if release_type == "new": if release_type == "new":
@ -145,7 +149,7 @@ class ReleaseInfo:
verbose=True, verbose=True,
) )
with checkout(commit_ref): with checkout(commit_ref):
commit_sha = Shell.get_output_or_raise(f"git rev-parse {commit_ref}") commit_sha = Shell.get_output_or_raise(f"git rev-list -n1 {commit_ref}")
# Git() must be inside "with checkout" contextmanager # Git() must be inside "with checkout" contextmanager
git = Git() git = Git()
version = get_version_from_repo(git=git) version = get_version_from_repo(git=git)
@ -158,12 +162,12 @@ class ReleaseInfo:
release_tag = version.describe release_tag = version.describe
previous_release_tag = expected_prev_tag previous_release_tag = expected_prev_tag
previous_release_sha = Shell.get_output_or_raise( previous_release_sha = Shell.get_output_or_raise(
f"git rev-parse {previous_release_tag}" f"git rev-list -n1 {previous_release_tag}"
) )
assert previous_release_sha assert previous_release_sha
if release_type == "patch": if release_type == "patch":
with checkout(commit_ref): with checkout(commit_ref):
commit_sha = Shell.get_output_or_raise(f"git rev-parse {commit_ref}") commit_sha = Shell.get_output_or_raise(f"git rev-list -n1 {commit_ref}")
# Git() must be inside "with checkout" contextmanager # Git() must be inside "with checkout" contextmanager
git = Git() git = Git()
version = get_version_from_repo(git=git) version = get_version_from_repo(git=git)
@ -200,16 +204,20 @@ class ReleaseInfo:
expected_tag_prefix expected_tag_prefix
) and git.latest_tag.endswith(expected_tag_suffix): ) and git.latest_tag.endswith(expected_tag_suffix):
pass pass
else: elif not skip_tag_check:
assert ( assert (
False False
), f"BUG: Unexpected latest tag [{git.latest_tag}] expected [{expected_tag_prefix}*{expected_tag_suffix}]" ), f"BUG: Unexpected latest tag [{git.latest_tag}] expected [{expected_tag_prefix}*{expected_tag_suffix}]. Already Released?"
previous_release_sha = Shell.get_output_or_raise( previous_release_sha = Shell.get_output_or_raise(
f"git rev-parse {previous_release_tag}" f"git rev-list -n1 {previous_release_tag}"
) )
assert previous_release_sha assert previous_release_sha
if CI.GH.is_latest_release_branch(release_branch):
print("This is going to be the latest release!")
latest_release = True
assert ( assert (
release_branch release_branch
and previous_release_tag and previous_release_tag
@ -218,7 +226,7 @@ class ReleaseInfo:
and release_tag and release_tag
and version and version
and (codename in ("lts", "stable") or release_type == "new") and (codename in ("lts", "stable") or release_type == "new")
) ), f"Check: {release_branch}, {previous_release_tag}, {previous_release_sha}, {commit_sha}, {release_tag}, {version}"
self.release_branch = release_branch self.release_branch = release_branch
self.commit_sha = commit_sha self.commit_sha = commit_sha
@ -228,7 +236,8 @@ class ReleaseInfo:
self.previous_release_tag = previous_release_tag self.previous_release_tag = previous_release_tag
self.previous_release_sha = previous_release_sha self.previous_release_sha = previous_release_sha
self.release_progress = ReleaseProgress.STARTED self.release_progress = ReleaseProgress.STARTED
self.progress_description = ReleaseProgressDescription.OK self.progress_status = ReleaseProgressDescription.OK
self.latest = latest_release
return self return self
def push_release_tag(self, dry_run: bool) -> None: def push_release_tag(self, dry_run: bool) -> None:
@ -252,7 +261,7 @@ class ReleaseInfo:
@staticmethod @staticmethod
def _create_gh_label(label: str, color_hex: str, dry_run: bool) -> None: def _create_gh_label(label: str, color_hex: str, dry_run: bool) -> None:
cmd = f"gh api repos/{GITHUB_REPOSITORY}/labels -f name={label} -f color={color_hex}" cmd = f"gh api repos/{CI.Envs.GITHUB_REPOSITORY}/labels -f name={label} -f color={color_hex}"
Shell.check(cmd, dry_run=dry_run, strict=True) Shell.check(cmd, dry_run=dry_run, strict=True)
def push_new_release_branch(self, dry_run: bool) -> None: def push_new_release_branch(self, dry_run: bool) -> None:
@ -294,7 +303,7 @@ class ReleaseInfo:
f"v{new_release_branch}-affected", "c2bfff", dry_run=dry_run f"v{new_release_branch}-affected", "c2bfff", dry_run=dry_run
) )
Shell.check( Shell.check(
f"""gh pr create --repo {GITHUB_REPOSITORY} --title 'Release pull request for branch {new_release_branch}' f"""gh pr create --repo {CI.Envs.GITHUB_REPOSITORY} --title 'Release pull request for branch {new_release_branch}'
--head {new_release_branch} {pr_labels} --head {new_release_branch} {pr_labels}
--body 'This PullRequest is a part of ClickHouse release cycle. It is used by CI system only. Do not perform any changes with it.' --body 'This PullRequest is a part of ClickHouse release cycle. It is used by CI system only. Do not perform any changes with it.'
""", """,
@ -303,9 +312,12 @@ class ReleaseInfo:
verbose=True, verbose=True,
) )
def get_version_bump_branch(self):
return f"bump_version_{self.version}"
def update_version_and_contributors_list(self, dry_run: bool) -> None: def update_version_and_contributors_list(self, dry_run: bool) -> None:
# Bump version, update contributors list, create PR # Bump version, update contributors list, create PR
branch_upd_version_contributors = f"bump_version_{self.version}" branch_upd_version_contributors = self.get_version_bump_branch()
with checkout(self.commit_sha): with checkout(self.commit_sha):
git = Git() git = Git()
version = get_version_from_repo(git=git) version = get_version_from_repo(git=git)
@ -323,9 +335,9 @@ class ReleaseInfo:
update_contributors(raise_error=True) update_contributors(raise_error=True)
cmd_commit_version_upd = f"{GIT_PREFIX} commit '{CMAKE_PATH}' '{CONTRIBUTORS_PATH}' -m 'Update autogenerated version to {self.version} and contributors'" cmd_commit_version_upd = f"{GIT_PREFIX} commit '{CMAKE_PATH}' '{CONTRIBUTORS_PATH}' -m 'Update autogenerated version to {self.version} and contributors'"
cmd_push_branch = f"{GIT_PREFIX} push --set-upstream origin {branch_upd_version_contributors}" cmd_push_branch = f"{GIT_PREFIX} push --set-upstream origin {branch_upd_version_contributors}"
body_file = get_abs_path(".github/PULL_REQUEST_TEMPLATE.md")
actor = os.getenv("GITHUB_ACTOR", "") or "me" actor = os.getenv("GITHUB_ACTOR", "") or "me"
cmd_create_pr = f"gh pr create --repo {GITHUB_REPOSITORY} --title 'Update version after release' --head {branch_upd_version_contributors} --base {self.release_branch} --body-file {body_file} --label 'do not test' --assignee {actor}" body = f"Automatic version bump after release {self.release_tag}\n### Changelog category (leave one):\n- Not for changelog (changelog entry is not required)\n"
cmd_create_pr = f"gh pr create --repo {CI.Envs.GITHUB_REPOSITORY} --title 'Update version after release' --head {branch_upd_version_contributors} --base {self.release_branch} --body \"{body}\" --assignee {actor}"
Shell.check( Shell.check(
cmd_commit_version_upd, strict=True, dry_run=dry_run, verbose=True cmd_commit_version_upd, strict=True, dry_run=dry_run, verbose=True
) )
@ -342,30 +354,42 @@ class ReleaseInfo:
) )
self.version_bump_pr = "dry-run" self.version_bump_pr = "dry-run"
else: else:
self.version_bump_pr = GHActions.get_pr_url_by_branch( self.version_bump_pr = GH.get_pr_url_by_branch(
repo=GITHUB_REPOSITORY, branch=branch_upd_version_contributors branch=branch_upd_version_contributors
) )
def get_change_log_branch(self):
return f"auto/{self.release_tag}"
def update_release_info(self, dry_run: bool) -> "ReleaseInfo": def update_release_info(self, dry_run: bool) -> "ReleaseInfo":
if self.release_branch != "master": if self.release_branch != "master":
branch = f"auto/{release_info.release_tag}" if not self.changelog_pr:
if not dry_run: branch = self.get_change_log_branch()
url = GHActions.get_pr_url_by_branch( if not dry_run:
repo=GITHUB_REPOSITORY, branch=branch url = GH.get_pr_url_by_branch(branch=branch)
) else:
else: url = "dry-run"
url = "dry-run" print(f"ChangeLog PR url [{url}]")
print(f"ChangeLog PR url [{url}]") self.changelog_pr = url
self.changelog_pr = url
print(f"Release url [{url}]") if not self.version_bump_pr:
self.release_url = f"https://github.com/{GITHUB_REPOSITORY}/releases/tag/{self.release_tag}" branch = self.get_version_bump_branch()
if self.release_progress == ReleaseProgress.COMPLETED: if not dry_run:
self.docker_command = f"docker run --rm clickhouse/clickhouse:{self.version} clickhouse --version" url = GH.get_pr_url_by_branch(branch=branch)
else:
url = "dry-run"
print(f"Version bump PR url [{url}]")
self.version_bump_pr = url
self.release_url = f"https://github.com/{CI.Envs.GITHUB_REPOSITORY}/releases/tag/{self.release_tag}"
print(f"Release url [{self.release_url}]")
self.docker = f"docker run --rm clickhouse/clickhouse:{self.version} clickhouse --version"
self.dump() self.dump()
return self return self
def create_gh_release(self, packages_files: List[str], dry_run: bool) -> None: def create_gh_release(self, packages_files: List[str], dry_run: bool) -> None:
repo = os.getenv("GITHUB_REPOSITORY") repo = CI.Envs.GITHUB_REPOSITORY
assert repo assert repo
cmds = [ cmds = [
f"gh release create --repo {repo} --title 'Release {self.release_tag}' {self.release_tag}" f"gh release create --repo {repo} --title 'Release {self.release_tag}' {self.release_tag}"
@ -375,7 +399,9 @@ class ReleaseInfo:
if not dry_run: if not dry_run:
for cmd in cmds: for cmd in cmds:
Shell.check(cmd, strict=True, verbose=True) Shell.check(cmd, strict=True, verbose=True)
self.release_url = f"https://github.com/{GITHUB_REPOSITORY}/releases/tag/{self.release_tag}" self.release_url = (
f"https://github.com/{repo}/releases/tag/{self.release_tag}"
)
else: else:
print("Dry-run, would run commands:") print("Dry-run, would run commands:")
print("\n * ".join(cmds)) print("\n * ".join(cmds))
@ -536,7 +562,7 @@ class PackageDownloader:
] ]
) )
self.s3.download_file( self.s3.download_file(
bucket=S3_BUILDS_BUCKET, bucket=CI.Envs.S3_BUILDS_BUCKET,
s3_path=s3_path, s3_path=s3_path,
local_file_path="/".join([self.LOCAL_DIR, package_file]), local_file_path="/".join([self.LOCAL_DIR, package_file]),
) )
@ -557,7 +583,7 @@ class PackageDownloader:
] ]
) )
self.s3.download_file( self.s3.download_file(
bucket=S3_BUILDS_BUCKET, bucket=CI.Envs.S3_BUILDS_BUCKET,
s3_path=s3_path, s3_path=s3_path,
local_file_path="/".join([self.LOCAL_DIR, destination_binary_name]), local_file_path="/".join([self.LOCAL_DIR, destination_binary_name]),
) )
@ -636,6 +662,11 @@ def parse_args() -> argparse.Namespace:
action="store_true", action="store_true",
help="Initial step to prepare info like release branch, release tag, etc.", help="Initial step to prepare info like release branch, release tag, etc.",
) )
parser.add_argument(
"--skip-tag-check",
action="store_true",
help="To skip check against latest git tag on a release branch",
)
parser.add_argument( parser.add_argument(
"--push-release-tag", "--push-release-tag",
action="store_true", action="store_true",
@ -725,7 +756,11 @@ if __name__ == "__main__":
assert ( assert (
args.ref and args.release_type args.ref and args.release_type
), "--ref and --release-type must be provided with --prepare-release-info" ), "--ref and --release-type must be provided with --prepare-release-info"
release_info.prepare(commit_ref=args.ref, release_type=args.release_type) release_info.prepare(
commit_ref=args.ref,
release_type=args.release_type,
skip_tag_check=args.skip_tag_check,
)
if args.download_packages: if args.download_packages:
with ReleaseContextManager( with ReleaseContextManager(
@ -776,7 +811,7 @@ if __name__ == "__main__":
else: else:
title = "New release" title = "New release"
if ( if (
release_info.progress_description == ReleaseProgressDescription.OK release_info.progress_status == ReleaseProgressDescription.OK
and release_info.release_progress == ReleaseProgress.COMPLETED and release_info.release_progress == ReleaseProgress.COMPLETED
): ):
title = "Completed: " + title title = "Completed: " + title
@ -792,16 +827,16 @@ if __name__ == "__main__":
if args.set_progress_started: if args.set_progress_started:
ri = ReleaseInfo.from_file() ri = ReleaseInfo.from_file()
ri.release_progress = args.progress ri.release_progress = args.progress
ri.progress_description = ReleaseProgressDescription.FAILED ri.progress_status = ReleaseProgressDescription.FAILED
ri.dump() ri.dump()
assert args.progress, "Progress step name must be provided" assert args.progress, "Progress step name must be provided"
if args.set_progress_completed: if args.set_progress_completed:
ri = ReleaseInfo.from_file() ri = ReleaseInfo.from_file()
assert ( assert (
ri.progress_description == ReleaseProgressDescription.FAILED ri.progress_status == ReleaseProgressDescription.FAILED
), "Must be FAILED before set to OK" ), "Must be FAILED before set to OK"
ri.progress_description = ReleaseProgressDescription.OK ri.progress_status = ReleaseProgressDescription.OK
ri.dump() ri.dump()
if args.merge_prs: if args.merge_prs:

View File

@ -69,13 +69,14 @@ def parse_args() -> argparse.Namespace:
help="sha of the commit to use packages from", help="sha of the commit to use packages from",
) )
parser.add_argument( parser.add_argument(
"--release-type", "--tag-type",
type=str, type=str,
choices=("auto", "latest", "major", "minor", "patch", "head"), choices=("head", "release", "latest-release"),
default="head", default="head",
help="version part that will be updated when '--version' is set; " help="defines required tags for resulting docker image. "
"'auto' is a special case, it will get versions from github and detect the " "head - for master image (tag: head) "
"release type (latest, major, minor or patch) automatically", "release - for release image (tags: XX, XX.XX, XX.XX.XX, XX.XX.XX.XX) "
"release-latest - for latest release image (tags: XX, XX.XX, XX.XX.XX, XX.XX.XX.XX, latest) ",
) )
parser.add_argument( parser.add_argument(
"--image-path", "--image-path",
@ -149,74 +150,35 @@ def retry_popen(cmd: str, log_file: Path) -> int:
return retcode return retcode
def auto_release_type(version: ClickHouseVersion, release_type: str) -> str: def gen_tags(version: ClickHouseVersion, tag_type: str) -> List[str]:
if release_type != "auto":
return release_type
git_versions = get_tagged_versions()
reference_version = git_versions[0]
for i in reversed(range(len(git_versions))):
if git_versions[i] <= version:
if i == len(git_versions) - 1:
return "latest"
reference_version = git_versions[i + 1]
break
if version.major < reference_version.major:
return "major"
if version.minor < reference_version.minor:
return "minor"
if version.patch < reference_version.patch:
return "patch"
raise ValueError(
"Release type 'tweak' is not supported for "
f"{version.string} < {reference_version.string}"
)
def gen_tags(version: ClickHouseVersion, release_type: str) -> List[str]:
""" """
22.2.2.2 + latest: @tag_type release-latest, @version 22.2.2.2:
- latest - latest
- 22 - 22
- 22.2 - 22.2
- 22.2.2 - 22.2.2
- 22.2.2.2 - 22.2.2.2
22.2.2.2 + major: @tag_type release, @version 22.2.2.2:
- 22 - 22
- 22.2 - 22.2
- 22.2.2 - 22.2.2
- 22.2.2.2 - 22.2.2.2
22.2.2.2 + minor: @tag_type head:
- 22.2
- 22.2.2
- 22.2.2.2
22.2.2.2 + patch:
- 22.2.2
- 22.2.2.2
22.2.2.2 + head:
- head - head
""" """
parts = version.string.split(".") parts = version.string.split(".")
tags = [] tags = []
if release_type == "latest": if tag_type == "release-latest":
tags.append(release_type) tags.append("latest")
for i in range(len(parts)): for i in range(len(parts)):
tags.append(".".join(parts[: i + 1])) tags.append(".".join(parts[: i + 1]))
elif release_type == "major": elif tag_type == "head":
tags.append(tag_type)
elif tag_type == "release":
for i in range(len(parts)): for i in range(len(parts)):
tags.append(".".join(parts[: i + 1])) tags.append(".".join(parts[: i + 1]))
elif release_type == "minor":
for i in range(1, len(parts)):
tags.append(".".join(parts[: i + 1]))
elif release_type == "patch":
for i in range(2, len(parts)):
tags.append(".".join(parts[: i + 1]))
elif release_type == "head":
tags.append(release_type)
else: else:
raise ValueError(f"{release_type} is not valid release part") assert False, f"Invalid release type [{tag_type}]"
return tags return tags
@ -370,8 +332,7 @@ def main():
push = True push = True
image = DockerImageData(image_path, image_repo, False) image = DockerImageData(image_path, image_repo, False)
args.release_type = auto_release_type(args.version, args.release_type) tags = gen_tags(args.version, args.tag_type)
tags = gen_tags(args.version, args.release_type)
repo_urls = {} repo_urls = {}
direct_urls: Dict[str, List[str]] = {} direct_urls: Dict[str, List[str]] = {}

View File

@ -1,61 +1,19 @@
#!/usr/bin/env python #!/usr/bin/env python
import unittest import unittest
from unittest.mock import patch, MagicMock
from version_helper import get_version_from_string from version_helper import get_version_from_string
import docker_server as ds import docker_server as ds
# di.logging.basicConfig(level=di.logging.INFO)
class TestDockerServer(unittest.TestCase): class TestDockerServer(unittest.TestCase):
def test_gen_tags(self): def test_gen_tags(self):
version = get_version_from_string("22.2.2.2") version = get_version_from_string("22.2.2.2")
cases = ( cases = (
("latest", ["latest", "22", "22.2", "22.2.2", "22.2.2.2"]), ("release-latest", ["latest", "22", "22.2", "22.2.2", "22.2.2.2"]),
("major", ["22", "22.2", "22.2.2", "22.2.2.2"]), ("release", ["22", "22.2", "22.2.2", "22.2.2.2"]),
("minor", ["22.2", "22.2.2", "22.2.2.2"]),
("patch", ["22.2.2", "22.2.2.2"]),
("head", ["head"]), ("head", ["head"]),
) )
for case in cases: for case in cases:
release_type = case[0] release_type = case[0]
self.assertEqual(case[1], ds.gen_tags(version, release_type)) self.assertEqual(case[1], ds.gen_tags(version, release_type))
with self.assertRaises(ValueError):
ds.gen_tags(version, "auto")
@patch("docker_server.get_tagged_versions")
def test_auto_release_type(self, mock_tagged_versions: MagicMock) -> None:
mock_tagged_versions.return_value = [
get_version_from_string("1.1.1.1"),
get_version_from_string("1.2.1.1"),
get_version_from_string("2.1.1.1"),
get_version_from_string("2.2.1.1"),
get_version_from_string("2.2.2.1"),
]
cases_less = (
(get_version_from_string("1.0.1.1"), "minor"),
(get_version_from_string("1.1.2.1"), "minor"),
(get_version_from_string("1.3.1.1"), "major"),
(get_version_from_string("2.1.2.1"), "minor"),
(get_version_from_string("2.2.1.3"), "patch"),
(get_version_from_string("2.2.3.1"), "latest"),
(get_version_from_string("2.3.1.1"), "latest"),
)
for case in cases_less:
release = ds.auto_release_type(case[0], "auto")
self.assertEqual(case[1], release)
cases_equal = (
(get_version_from_string("1.1.1.1"), "minor"),
(get_version_from_string("1.2.1.1"), "major"),
(get_version_from_string("2.1.1.1"), "minor"),
(get_version_from_string("2.2.1.1"), "patch"),
(get_version_from_string("2.2.2.1"), "latest"),
)
for case in cases_equal:
release = ds.auto_release_type(case[0], "auto")
self.assertEqual(case[1], release)