2021-11-08 14:30:27 +00:00
|
|
|
#!/usr/bin/env python3
|
2022-07-14 18:57:03 +00:00
|
|
|
"""
|
|
|
|
A plan:
|
2022-07-15 15:10:37 +00:00
|
|
|
- TODO: consider receiving GH objects cache from S3, but it's really a few
|
|
|
|
of requests to API currently
|
|
|
|
- Get all open release PRs (20.10, 21.8, 22.5, etc.)
|
2022-07-14 18:57:03 +00:00
|
|
|
- Get all pull-requests between the date of the merge-base for the oldest PR with
|
|
|
|
labels pr-must-backport and version-specific v21.8-must-backport, but without
|
|
|
|
pr-backported
|
|
|
|
- Iterate over gotten PRs:
|
|
|
|
- for pr-must-backport:
|
|
|
|
- check if all backport-PRs are created. If yes,
|
2022-07-15 15:10:37 +00:00
|
|
|
set pr-backported label and finish
|
2022-07-14 18:57:03 +00:00
|
|
|
- If not, create either cherrypick PRs or merge cherrypick (in the same
|
2022-07-15 15:10:37 +00:00
|
|
|
stage, if mergable) and create backport-PRs
|
2022-07-14 18:57:03 +00:00
|
|
|
- If successfull, set pr-backported label on the PR
|
|
|
|
|
|
|
|
- for version-specific labels:
|
2022-07-15 15:10:37 +00:00
|
|
|
- the same, check, cherry-pick, backport, pr-backported
|
2022-07-14 18:57:03 +00:00
|
|
|
|
|
|
|
Cherry-pick stage:
|
|
|
|
- From time to time the cherry-pick fails, if it was done manually. In the
|
2022-07-15 15:10:37 +00:00
|
|
|
case we check if it's even needed, and mark the release as done somehow.
|
2022-07-14 18:57:03 +00:00
|
|
|
"""
|
2021-11-08 14:30:27 +00:00
|
|
|
|
2022-06-16 11:20:03 +00:00
|
|
|
import argparse
|
2021-11-08 14:30:27 +00:00
|
|
|
import logging
|
|
|
|
import os
|
2022-07-14 18:57:03 +00:00
|
|
|
from contextlib import contextmanager
|
2023-03-21 16:34:56 +00:00
|
|
|
from datetime import date, datetime, timedelta
|
2023-09-27 14:27:37 +00:00
|
|
|
from pathlib import Path
|
2022-07-14 18:57:03 +00:00
|
|
|
from subprocess import CalledProcessError
|
|
|
|
from typing import List, Optional
|
2021-11-08 14:45:19 +00:00
|
|
|
|
2024-01-10 13:44:21 +00:00
|
|
|
import __main__
|
2024-05-29 09:14:21 +00:00
|
|
|
|
2022-07-14 18:57:03 +00:00
|
|
|
from env_helper import TEMP_PATH
|
2022-06-16 14:27:49 +00:00
|
|
|
from get_robot_token import get_best_robot_token
|
2024-05-29 09:14:21 +00:00
|
|
|
from git_helper import GIT_PREFIX, git_runner, is_shallow
|
2023-12-15 11:56:35 +00:00
|
|
|
from github_helper import GitHub, PullRequest, PullRequests, Repository
|
2024-03-15 15:57:59 +00:00
|
|
|
from lambda_shared_package.lambda_shared.pr import Labels
|
2021-11-08 14:30:27 +00:00
|
|
|
from ssh import SSHKey
|
2022-07-14 18:57:03 +00:00
|
|
|
|
|
|
|
|
|
|
|
class ReleaseBranch:
|
2023-03-20 11:36:35 +00:00
|
|
|
CHERRYPICK_DESCRIPTION = """Original pull-request #{pr_number}
|
|
|
|
|
|
|
|
This pull-request is a first step of an automated backporting.
|
2023-11-28 11:18:20 +00:00
|
|
|
It contains changes similar to calling `git cherry-pick` locally.
|
|
|
|
If you intend to continue backporting the changes, then resolve all conflicts if any.
|
2022-07-14 18:57:03 +00:00
|
|
|
Otherwise, if you do not want to backport them, then just close this pull-request.
|
|
|
|
|
|
|
|
The check results does not matter at this step - you can safely ignore them.
|
2023-03-20 11:36:35 +00:00
|
|
|
|
|
|
|
### Note
|
|
|
|
|
2023-11-28 10:51:06 +00:00
|
|
|
This pull-request will be merged automatically. Please, **do not merge it manually** \
|
|
|
|
(but if you accidentally did, nothing bad will happen).
|
2023-02-27 16:59:19 +00:00
|
|
|
|
2023-11-28 11:18:20 +00:00
|
|
|
### Troubleshooting
|
|
|
|
|
|
|
|
#### If the PR was manually reopened after being closed
|
2023-03-20 11:36:35 +00:00
|
|
|
|
2023-11-28 10:51:06 +00:00
|
|
|
If this PR is stuck (i.e. not automatically merged after one day), check {pr_url} for \
|
|
|
|
`{backport_created_label}` *label* and delete it.
|
2023-05-31 11:04:29 +00:00
|
|
|
|
2023-11-28 10:51:06 +00:00
|
|
|
Manually merging will do nothing. The `{backport_created_label}` *label* prevents the \
|
|
|
|
original PR {pr_url} from being processed.
|
|
|
|
|
2023-11-28 11:18:20 +00:00
|
|
|
#### If the conflicts were resolved in a wrong way
|
2023-11-28 11:01:33 +00:00
|
|
|
|
2023-11-28 10:51:06 +00:00
|
|
|
If this cherry-pick PR is completely screwed by a wrong conflicts resolution, and you \
|
|
|
|
want to recreate it:
|
|
|
|
|
|
|
|
- delete the `{label_cherrypick}` label from the PR
|
|
|
|
- delete this branch from the repository
|
|
|
|
|
|
|
|
You also need to check the original PR {pr_url} for `{backport_created_label}`, and \
|
|
|
|
delete if it's presented there
|
2022-07-14 18:57:03 +00:00
|
|
|
"""
|
|
|
|
BACKPORT_DESCRIPTION = """This pull-request is a last step of an automated \
|
|
|
|
backporting.
|
|
|
|
Treat it as a standard pull-request: look at the checks and resolve conflicts.
|
|
|
|
Merge it only if you intend to backport changes to the target branch, otherwise just \
|
2023-02-27 16:59:19 +00:00
|
|
|
close it.
|
2022-07-14 18:57:03 +00:00
|
|
|
"""
|
|
|
|
REMOTE = ""
|
|
|
|
|
2023-05-31 11:04:29 +00:00
|
|
|
def __init__(
|
|
|
|
self,
|
|
|
|
name: str,
|
|
|
|
pr: PullRequest,
|
|
|
|
repo: Repository,
|
2024-05-29 18:55:45 +00:00
|
|
|
backport_created_label: str,
|
2023-05-31 11:04:29 +00:00
|
|
|
):
|
2022-07-14 18:57:03 +00:00
|
|
|
self.name = name
|
|
|
|
self.pr = pr
|
2023-05-23 10:47:18 +00:00
|
|
|
self.repo = repo
|
|
|
|
|
2022-07-14 18:57:03 +00:00
|
|
|
self.cherrypick_branch = f"cherrypick/{name}/{pr.merge_commit_sha}"
|
|
|
|
self.backport_branch = f"backport/{name}/{pr.number}"
|
|
|
|
self.cherrypick_pr = None # type: Optional[PullRequest]
|
|
|
|
self.backport_pr = None # type: Optional[PullRequest]
|
2023-05-23 10:47:18 +00:00
|
|
|
self._backported = False
|
|
|
|
|
2023-05-31 11:04:29 +00:00
|
|
|
self.backport_created_label = backport_created_label
|
|
|
|
|
2022-07-15 14:08:29 +00:00
|
|
|
self.pre_check()
|
|
|
|
|
|
|
|
def pre_check(self):
|
|
|
|
branch_updated = git_runner(
|
|
|
|
f"git branch -a --contains={self.pr.merge_commit_sha} "
|
|
|
|
f"{self.REMOTE}/{self.name}"
|
|
|
|
)
|
|
|
|
if branch_updated:
|
|
|
|
self._backported = True
|
2022-07-14 18:57:03 +00:00
|
|
|
|
2024-05-30 15:35:44 +00:00
|
|
|
def pop_prs(self, prs: PullRequests) -> PullRequests:
|
2022-11-15 13:46:48 +00:00
|
|
|
"""the method processes all prs and pops the ReleaseBranch related prs"""
|
2022-07-14 18:57:03 +00:00
|
|
|
to_pop = [] # type: List[int]
|
|
|
|
for i, pr in enumerate(prs):
|
2024-05-30 15:35:44 +00:00
|
|
|
if self.name not in pr.head.ref:
|
|
|
|
# this pr is not for the current branch
|
|
|
|
continue
|
2022-07-14 18:57:03 +00:00
|
|
|
if pr.head.ref.startswith(f"cherrypick/{self.name}"):
|
|
|
|
self.cherrypick_pr = pr
|
|
|
|
to_pop.append(i)
|
|
|
|
elif pr.head.ref.startswith(f"backport/{self.name}"):
|
|
|
|
self.backport_pr = pr
|
2024-06-07 18:33:31 +00:00
|
|
|
self._backported = True
|
2022-07-14 18:57:03 +00:00
|
|
|
to_pop.append(i)
|
|
|
|
else:
|
2024-05-30 15:35:44 +00:00
|
|
|
assert False, f"BUG! Invalid PR's branch [{pr.head.ref}]"
|
2024-05-30 15:59:10 +00:00
|
|
|
|
2022-07-14 18:57:03 +00:00
|
|
|
for i in reversed(to_pop):
|
|
|
|
# Going from the tail to keep the order and pop greater index first
|
|
|
|
prs.pop(i)
|
2024-05-30 15:35:44 +00:00
|
|
|
return prs
|
2022-07-14 18:57:03 +00:00
|
|
|
|
2023-03-21 16:34:56 +00:00
|
|
|
def process( # pylint: disable=too-many-return-statements
|
|
|
|
self, dry_run: bool
|
|
|
|
) -> None:
|
2022-07-14 18:57:03 +00:00
|
|
|
if self.backported:
|
|
|
|
return
|
2024-05-30 15:35:44 +00:00
|
|
|
|
2022-07-14 18:57:03 +00:00
|
|
|
if not self.cherrypick_pr:
|
|
|
|
if dry_run:
|
|
|
|
logging.info(
|
|
|
|
"DRY RUN: Would create cherrypick PR for #%s", self.pr.number
|
|
|
|
)
|
|
|
|
return
|
|
|
|
self.create_cherrypick()
|
2024-05-30 15:35:44 +00:00
|
|
|
assert self.cherrypick_pr, "BUG!"
|
|
|
|
|
|
|
|
if self.cherrypick_pr.mergeable and self.cherrypick_pr.state != "closed":
|
|
|
|
if dry_run:
|
|
|
|
logging.info(
|
|
|
|
"DRY RUN: Would merge cherry-pick PR for #%s", self.pr.number
|
|
|
|
)
|
2022-07-14 18:57:03 +00:00
|
|
|
return
|
2024-05-30 15:35:44 +00:00
|
|
|
self.cherrypick_pr.merge()
|
|
|
|
# The PR needs update, since PR.merge doesn't update the object
|
|
|
|
self.cherrypick_pr.update()
|
|
|
|
if self.cherrypick_pr.merged:
|
|
|
|
if dry_run:
|
2022-07-14 18:57:03 +00:00
|
|
|
logging.info(
|
2024-05-30 15:35:44 +00:00
|
|
|
"DRY RUN: Would create backport PR for #%s", self.pr.number
|
2022-07-14 18:57:03 +00:00
|
|
|
)
|
|
|
|
return
|
2024-05-30 15:35:44 +00:00
|
|
|
self.create_backport()
|
|
|
|
return
|
|
|
|
if self.cherrypick_pr.state == "closed":
|
2022-07-14 18:57:03 +00:00
|
|
|
logging.info(
|
2024-05-30 15:35:44 +00:00
|
|
|
"The cherry-pick PR #%s for PR #%s is discarded",
|
2022-07-14 18:57:03 +00:00
|
|
|
self.cherrypick_pr.number,
|
|
|
|
self.pr.number,
|
|
|
|
)
|
2024-05-30 15:35:44 +00:00
|
|
|
self._backported = True
|
|
|
|
return
|
|
|
|
logging.info(
|
|
|
|
"Cherry-pick PR #%s for PR #%s has conflicts and unable to be merged",
|
|
|
|
self.cherrypick_pr.number,
|
|
|
|
self.pr.number,
|
|
|
|
)
|
|
|
|
self.ping_cherry_pick_assignees(dry_run)
|
2022-07-14 18:57:03 +00:00
|
|
|
|
|
|
|
def create_cherrypick(self):
|
|
|
|
# First, create backport branch:
|
|
|
|
# Checkout release branch with discarding every change
|
2024-05-29 09:14:21 +00:00
|
|
|
git_runner(f"{GIT_PREFIX} checkout -f {self.name}")
|
2022-07-14 18:57:03 +00:00
|
|
|
# Create or reset backport branch
|
2024-05-29 09:14:21 +00:00
|
|
|
git_runner(f"{GIT_PREFIX} checkout -B {self.backport_branch}")
|
2022-07-14 18:57:03 +00:00
|
|
|
# Merge all changes from PR's the first parent commit w/o applying anything
|
2022-07-15 15:10:37 +00:00
|
|
|
# It will allow to create a merge commit like it would be a cherry-pick
|
2022-07-14 18:57:03 +00:00
|
|
|
first_parent = git_runner(f"git rev-parse {self.pr.merge_commit_sha}^1")
|
2024-05-29 09:14:21 +00:00
|
|
|
git_runner(f"{GIT_PREFIX} merge -s ours --no-edit {first_parent}")
|
2022-07-14 18:57:03 +00:00
|
|
|
|
|
|
|
# Second step, create cherrypick branch
|
|
|
|
git_runner(
|
2024-05-29 09:14:21 +00:00
|
|
|
f"{GIT_PREFIX} branch -f "
|
2022-07-14 18:57:03 +00:00
|
|
|
f"{self.cherrypick_branch} {self.pr.merge_commit_sha}"
|
|
|
|
)
|
|
|
|
|
2023-05-23 10:47:18 +00:00
|
|
|
# Check if there are actually any changes between branches. If no, then no
|
2022-07-15 15:10:37 +00:00
|
|
|
# other actions are required. It's possible when changes are backported
|
|
|
|
# manually to the release branch already
|
2022-07-14 18:57:03 +00:00
|
|
|
try:
|
|
|
|
output = git_runner(
|
2024-05-29 09:14:21 +00:00
|
|
|
f"{GIT_PREFIX} merge --no-commit --no-ff {self.cherrypick_branch}"
|
2022-07-14 18:57:03 +00:00
|
|
|
)
|
2022-07-15 15:10:37 +00:00
|
|
|
# 'up-to-date', 'up to date', who knows what else (╯°v°)╯ ^┻━┻
|
2022-07-14 18:57:03 +00:00
|
|
|
if output.startswith("Already up") and output.endswith("date."):
|
|
|
|
# The changes are already in the release branch, we are done here
|
|
|
|
logging.info(
|
|
|
|
"Release branch %s already contain changes from %s",
|
|
|
|
self.name,
|
|
|
|
self.pr.number,
|
|
|
|
)
|
2024-06-07 18:33:31 +00:00
|
|
|
self._backported = True
|
2022-07-14 18:57:03 +00:00
|
|
|
return
|
|
|
|
except CalledProcessError:
|
|
|
|
# There are most probably conflicts, they'll be resolved in PR
|
2024-05-29 09:14:21 +00:00
|
|
|
git_runner(f"{GIT_PREFIX} reset --merge")
|
2022-07-14 18:57:03 +00:00
|
|
|
else:
|
2022-07-15 15:10:37 +00:00
|
|
|
# There are changes to apply, so continue
|
2024-05-29 09:14:21 +00:00
|
|
|
git_runner(f"{GIT_PREFIX} reset --merge")
|
2022-07-14 18:57:03 +00:00
|
|
|
|
2024-05-29 18:55:45 +00:00
|
|
|
# Push, create the cherry-pick PR, label and assign it
|
2022-07-14 18:57:03 +00:00
|
|
|
for branch in [self.cherrypick_branch, self.backport_branch]:
|
2024-05-29 09:14:21 +00:00
|
|
|
git_runner(f"{GIT_PREFIX} push -f {self.REMOTE} {branch}:{branch}")
|
2022-07-14 18:57:03 +00:00
|
|
|
|
2023-05-23 10:47:18 +00:00
|
|
|
self.cherrypick_pr = self.repo.create_pull(
|
2022-07-14 18:57:03 +00:00
|
|
|
title=f"Cherry pick #{self.pr.number} to {self.name}: {self.pr.title}",
|
2023-03-20 11:36:35 +00:00
|
|
|
body=self.CHERRYPICK_DESCRIPTION.format(
|
|
|
|
pr_number=self.pr.number,
|
2023-05-23 10:47:18 +00:00
|
|
|
pr_url=self.pr.html_url,
|
2023-05-31 11:04:29 +00:00
|
|
|
backport_created_label=self.backport_created_label,
|
2024-03-15 15:57:59 +00:00
|
|
|
label_cherrypick=Labels.PR_CHERRYPICK,
|
2023-03-20 11:36:35 +00:00
|
|
|
),
|
2022-07-14 18:57:03 +00:00
|
|
|
base=self.backport_branch,
|
|
|
|
head=self.cherrypick_branch,
|
|
|
|
)
|
2024-03-15 15:57:59 +00:00
|
|
|
self.cherrypick_pr.add_to_labels(Labels.PR_CHERRYPICK)
|
2022-09-16 08:23:49 +00:00
|
|
|
self.cherrypick_pr.add_to_labels(Labels.DO_NOT_TEST)
|
2024-05-27 16:43:24 +00:00
|
|
|
if Labels.PR_CRITICAL_BUGFIX in [label.name for label in self.pr.labels]:
|
|
|
|
self.cherrypick_pr.add_to_labels(Labels.PR_CRITICAL_BUGFIX)
|
|
|
|
elif Labels.PR_BUGFIX in [label.name for label in self.pr.labels]:
|
|
|
|
self.cherrypick_pr.add_to_labels(Labels.PR_BUGFIX)
|
2022-08-31 11:05:40 +00:00
|
|
|
self._assign_new_pr(self.cherrypick_pr)
|
2023-02-02 15:13:50 +00:00
|
|
|
# update cherrypick PR to get the state for PR.mergable
|
|
|
|
self.cherrypick_pr.update()
|
2022-07-14 18:57:03 +00:00
|
|
|
|
|
|
|
def create_backport(self):
|
2022-11-15 13:46:48 +00:00
|
|
|
assert self.cherrypick_pr is not None
|
2022-07-15 15:10:37 +00:00
|
|
|
# Checkout the backport branch from the remote and make all changes to
|
|
|
|
# apply like they are only one cherry-pick commit on top of release
|
2023-03-21 16:34:56 +00:00
|
|
|
logging.info("Creating backport for PR #%s", self.pr.number)
|
2024-05-29 09:14:21 +00:00
|
|
|
git_runner(f"{GIT_PREFIX} checkout -f {self.backport_branch}")
|
|
|
|
git_runner(f"{GIT_PREFIX} pull --ff-only {self.REMOTE} {self.backport_branch}")
|
2022-07-14 18:57:03 +00:00
|
|
|
merge_base = git_runner(
|
2024-05-29 09:14:21 +00:00
|
|
|
f"{GIT_PREFIX} merge-base "
|
2022-07-14 18:57:03 +00:00
|
|
|
f"{self.REMOTE}/{self.name} {self.backport_branch}"
|
|
|
|
)
|
2024-05-29 09:14:21 +00:00
|
|
|
git_runner(f"{GIT_PREFIX} reset --soft {merge_base}")
|
2022-07-14 18:57:03 +00:00
|
|
|
title = f"Backport #{self.pr.number} to {self.name}: {self.pr.title}"
|
2024-05-29 09:14:21 +00:00
|
|
|
git_runner(f"{GIT_PREFIX} commit --allow-empty -F -", input=title)
|
2022-07-15 15:10:37 +00:00
|
|
|
|
|
|
|
# Push with force, create the backport PR, lable and assign it
|
2022-07-14 18:57:03 +00:00
|
|
|
git_runner(
|
2024-05-29 09:14:21 +00:00
|
|
|
f"{GIT_PREFIX} push -f {self.REMOTE} "
|
2022-07-14 18:57:03 +00:00
|
|
|
f"{self.backport_branch}:{self.backport_branch}"
|
|
|
|
)
|
2023-05-23 10:47:18 +00:00
|
|
|
self.backport_pr = self.repo.create_pull(
|
2022-07-14 18:57:03 +00:00
|
|
|
title=title,
|
2023-05-25 15:51:47 +00:00
|
|
|
body=f"Original pull-request {self.pr.html_url}\n"
|
2022-07-14 18:57:03 +00:00
|
|
|
f"Cherry-pick pull-request #{self.cherrypick_pr.number}\n\n"
|
|
|
|
f"{self.BACKPORT_DESCRIPTION}",
|
|
|
|
base=self.name,
|
|
|
|
head=self.backport_branch,
|
|
|
|
)
|
2024-03-15 15:57:59 +00:00
|
|
|
self.backport_pr.add_to_labels(Labels.PR_BACKPORT)
|
2024-05-27 16:43:24 +00:00
|
|
|
if Labels.PR_CRITICAL_BUGFIX in [label.name for label in self.pr.labels]:
|
2024-05-28 10:53:52 +00:00
|
|
|
self.backport_pr.add_to_labels(Labels.PR_CRITICAL_BUGFIX)
|
2024-05-27 16:43:24 +00:00
|
|
|
elif Labels.PR_BUGFIX in [label.name for label in self.pr.labels]:
|
2024-05-28 10:53:52 +00:00
|
|
|
self.backport_pr.add_to_labels(Labels.PR_BUGFIX)
|
2022-08-31 11:05:40 +00:00
|
|
|
self._assign_new_pr(self.backport_pr)
|
|
|
|
|
2023-03-21 16:34:56 +00:00
|
|
|
def ping_cherry_pick_assignees(self, dry_run: bool) -> None:
|
|
|
|
assert self.cherrypick_pr is not None
|
|
|
|
logging.info(
|
|
|
|
"Checking if cherry-pick PR #%s needs to be pinged",
|
|
|
|
self.cherrypick_pr.number,
|
|
|
|
)
|
2023-10-04 11:53:55 +00:00
|
|
|
# The `updated_at` is Optional[datetime]
|
2023-11-15 09:46:40 +00:00
|
|
|
cherrypick_updated_ts = (
|
|
|
|
self.cherrypick_pr.updated_at or datetime.now()
|
|
|
|
).timestamp()
|
|
|
|
since_updated = int(datetime.now().timestamp() - cherrypick_updated_ts)
|
2023-03-21 16:34:56 +00:00
|
|
|
since_updated_str = (
|
2023-11-15 09:46:40 +00:00
|
|
|
f"{since_updated // 86400}d{since_updated // 3600}"
|
|
|
|
f"h{since_updated // 60 % 60}m{since_updated % 60}s"
|
2023-03-21 16:34:56 +00:00
|
|
|
)
|
2023-11-15 09:46:40 +00:00
|
|
|
if since_updated < 86400:
|
2023-03-21 16:34:56 +00:00
|
|
|
logging.info(
|
2023-11-15 09:46:40 +00:00
|
|
|
"The cherry-pick PR was updated %s ago, "
|
2023-03-21 16:34:56 +00:00
|
|
|
"waiting for the next running",
|
|
|
|
since_updated_str,
|
|
|
|
)
|
|
|
|
return
|
|
|
|
assignees = ", ".join(f"@{user.login}" for user in self.cherrypick_pr.assignees)
|
|
|
|
comment_body = (
|
|
|
|
f"Dear {assignees}, the PR is not updated for {since_updated_str}. "
|
|
|
|
"Please, either resolve the conflicts, or close it to finish "
|
|
|
|
f"the backport process of #{self.pr.number}"
|
|
|
|
)
|
|
|
|
if dry_run:
|
|
|
|
logging.info(
|
|
|
|
"DRY RUN: would comment the cherry-pick PR #%s:\n",
|
|
|
|
self.cherrypick_pr.number,
|
|
|
|
)
|
|
|
|
return
|
|
|
|
|
|
|
|
self.cherrypick_pr.create_issue_comment(comment_body)
|
|
|
|
|
2022-11-15 13:46:48 +00:00
|
|
|
def _assign_new_pr(self, new_pr: PullRequest) -> None:
|
2022-09-01 11:05:10 +00:00
|
|
|
"""Assign `new_pr` to author, merger and assignees of an original PR"""
|
2022-08-31 11:05:40 +00:00
|
|
|
# It looks there some race when multiple .add_to_assignees are executed,
|
|
|
|
# so we'll add all at once
|
2022-09-01 10:38:30 +00:00
|
|
|
assignees = [self.pr.user, self.pr.merged_by]
|
2022-08-12 21:30:32 +00:00
|
|
|
if self.pr.assignees:
|
2022-08-31 11:05:40 +00:00
|
|
|
assignees.extend(self.pr.assignees)
|
|
|
|
logging.info(
|
|
|
|
"Assing #%s to author and assignees of the original PR: %s",
|
|
|
|
new_pr.number,
|
|
|
|
", ".join(user.login for user in assignees),
|
|
|
|
)
|
|
|
|
new_pr.add_to_assignees(*assignees)
|
2022-07-14 18:57:03 +00:00
|
|
|
|
|
|
|
@property
|
|
|
|
def backported(self) -> bool:
|
2024-05-30 15:59:10 +00:00
|
|
|
return self._backported
|
2022-07-14 18:57:03 +00:00
|
|
|
|
|
|
|
def __repr__(self):
|
|
|
|
return self.name
|
|
|
|
|
|
|
|
|
|
|
|
class Backport:
|
2023-05-23 10:47:18 +00:00
|
|
|
def __init__(
|
|
|
|
self,
|
|
|
|
gh: GitHub,
|
|
|
|
repo: str,
|
|
|
|
fetch_from: Optional[str],
|
|
|
|
dry_run: bool,
|
|
|
|
):
|
2022-07-14 18:57:03 +00:00
|
|
|
self.gh = gh
|
|
|
|
self._repo_name = repo
|
2023-05-23 10:47:18 +00:00
|
|
|
self._fetch_from = fetch_from
|
2022-07-14 18:57:03 +00:00
|
|
|
self.dry_run = dry_run
|
2022-07-15 15:10:37 +00:00
|
|
|
|
2024-06-07 18:33:31 +00:00
|
|
|
self.must_create_backport_labels = (
|
|
|
|
[Labels.MUST_BACKPORT]
|
2024-05-29 18:55:45 +00:00
|
|
|
if self._repo_name == self._fetch_from
|
2024-06-07 18:33:31 +00:00
|
|
|
else [Labels.MUST_BACKPORT_CLOUD, Labels.MUST_BACKPORT]
|
2024-05-29 18:55:45 +00:00
|
|
|
)
|
|
|
|
self.backport_created_label = (
|
|
|
|
Labels.PR_BACKPORTS_CREATED
|
|
|
|
if self._repo_name == self._fetch_from
|
|
|
|
else Labels.PR_BACKPORTS_CREATED_CLOUD
|
|
|
|
)
|
2023-05-23 10:47:18 +00:00
|
|
|
|
2022-07-15 15:10:37 +00:00
|
|
|
self._remote = ""
|
2023-05-23 10:47:18 +00:00
|
|
|
self._remote_line = ""
|
|
|
|
|
2022-07-15 15:10:37 +00:00
|
|
|
self._repo = None # type: Optional[Repository]
|
2022-07-14 18:57:03 +00:00
|
|
|
self.release_prs = [] # type: PullRequests
|
|
|
|
self.release_branches = [] # type: List[str]
|
|
|
|
self.labels_to_backport = [] # type: List[str]
|
|
|
|
self.prs_for_backport = [] # type: PullRequests
|
2022-07-15 14:33:01 +00:00
|
|
|
self.error = None # type: Optional[Exception]
|
2022-07-14 18:57:03 +00:00
|
|
|
|
|
|
|
@property
|
2023-05-23 10:47:18 +00:00
|
|
|
def remote_line(self) -> str:
|
|
|
|
if not self._remote_line:
|
2022-07-14 18:57:03 +00:00
|
|
|
# lines of "origin git@github.com:ClickHouse/ClickHouse.git (fetch)"
|
|
|
|
remotes = git_runner("git remote -v").split("\n")
|
|
|
|
# We need the first word from the first matching result
|
2023-05-23 10:47:18 +00:00
|
|
|
self._remote_line = next(
|
|
|
|
iter(
|
|
|
|
remote
|
|
|
|
for remote in remotes
|
|
|
|
if f"github.com/{self._repo_name}" in remote # https
|
|
|
|
or f"github.com:{self._repo_name}" in remote # ssh
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
|
|
|
return self._remote_line
|
|
|
|
|
|
|
|
@property
|
|
|
|
def remote(self) -> str:
|
|
|
|
if not self._remote:
|
|
|
|
self._remote = self.remote_line.split(maxsplit=1)[0]
|
2022-07-14 18:57:03 +00:00
|
|
|
git_runner(f"git fetch {self._remote}")
|
|
|
|
ReleaseBranch.REMOTE = self._remote
|
|
|
|
return self._remote
|
|
|
|
|
2023-05-23 10:47:18 +00:00
|
|
|
@property
|
|
|
|
def is_remote_ssh(self) -> bool:
|
|
|
|
return "github.com:" in self.remote_line
|
|
|
|
|
2022-07-14 18:57:03 +00:00
|
|
|
def receive_release_prs(self):
|
|
|
|
logging.info("Getting release PRs")
|
2023-09-13 17:05:31 +00:00
|
|
|
self.release_prs = self.gh.get_release_pulls(self._repo_name)
|
2022-07-14 18:57:03 +00:00
|
|
|
self.release_branches = [pr.head.ref for pr in self.release_prs]
|
2023-05-23 10:47:18 +00:00
|
|
|
|
2024-05-02 16:21:26 +00:00
|
|
|
if not self._fetch_from:
|
|
|
|
self.labels_to_backport = [
|
|
|
|
f"v{branch}-must-backport" for branch in self.release_branches
|
|
|
|
]
|
|
|
|
else:
|
|
|
|
fetch_release_prs = self.gh.get_release_pulls(self._fetch_from)
|
|
|
|
fetch_release_branches = [pr.head.ref for pr in fetch_release_prs]
|
|
|
|
self.labels_to_backport = [
|
|
|
|
f"v{branch}-must-backport" for branch in fetch_release_branches
|
|
|
|
]
|
|
|
|
|
2023-05-23 10:47:18 +00:00
|
|
|
logging.info("Fetching from %s", self._fetch_from)
|
|
|
|
fetch_from_repo = self.gh.get_repo(self._fetch_from)
|
|
|
|
git_runner(
|
2023-12-15 11:56:35 +00:00
|
|
|
"git fetch "
|
|
|
|
f"{fetch_from_repo.ssh_url if self.is_remote_ssh else fetch_from_repo.clone_url} "
|
|
|
|
f"{fetch_from_repo.default_branch} --no-tags"
|
2023-05-23 10:47:18 +00:00
|
|
|
)
|
|
|
|
|
2022-07-14 18:57:03 +00:00
|
|
|
logging.info("Active releases: %s", ", ".join(self.release_branches))
|
|
|
|
|
2023-02-04 00:07:20 +00:00
|
|
|
def update_local_release_branches(self):
|
|
|
|
logging.info("Update local release branches")
|
|
|
|
branches = git_runner("git branch").split()
|
|
|
|
for branch in self.release_branches:
|
|
|
|
if branch not in branches:
|
|
|
|
# the local branch is not exist, so continue
|
|
|
|
continue
|
|
|
|
local_ref = git_runner(f"git rev-parse {branch}")
|
|
|
|
remote_ref = git_runner(f"git rev-parse {self.remote}/{branch}")
|
|
|
|
if local_ref == remote_ref:
|
|
|
|
# Do not need to update, continue
|
|
|
|
continue
|
|
|
|
logging.info("Resetting %s to %s/%s", branch, self.remote, branch)
|
|
|
|
git_runner(f"git branch -f {branch} {self.remote}/{branch}")
|
|
|
|
|
2023-12-15 11:56:35 +00:00
|
|
|
def receive_prs_for_backport(self, reserve_search_days: int) -> None:
|
2023-01-20 12:51:34 +00:00
|
|
|
# The commits in the oldest open release branch
|
|
|
|
oldest_branch_commits = git_runner(
|
|
|
|
"git log --no-merges --format=%H --reverse "
|
|
|
|
f"{self.remote}/{self.default_branch}..{self.remote}/{self.release_branches[0]}"
|
2022-07-14 18:57:03 +00:00
|
|
|
)
|
2023-01-20 12:51:34 +00:00
|
|
|
# The first commit is the one we are looking for
|
|
|
|
since_commit = oldest_branch_commits.split("\n", 1)[0]
|
2022-07-14 18:57:03 +00:00
|
|
|
since_date = date.fromisoformat(
|
|
|
|
git_runner.run(f"git log -1 --format=format:%cs {since_commit}")
|
2023-12-15 11:56:35 +00:00
|
|
|
) - timedelta(days=reserve_search_days)
|
2022-07-15 15:10:37 +00:00
|
|
|
# To not have a possible TZ issues
|
2022-07-14 18:57:03 +00:00
|
|
|
tomorrow = date.today() + timedelta(days=1)
|
2024-05-02 16:21:26 +00:00
|
|
|
logging.info("Receive PRs supposed to be backported")
|
2023-05-23 10:47:18 +00:00
|
|
|
|
2024-02-26 17:46:15 +00:00
|
|
|
query_args = {
|
|
|
|
"query": f"type:pr repo:{self._fetch_from} -label:{self.backport_created_label}",
|
|
|
|
"label": ",".join(
|
2024-06-07 18:33:31 +00:00
|
|
|
self.labels_to_backport + self.must_create_backport_labels
|
2024-02-26 17:46:15 +00:00
|
|
|
),
|
|
|
|
"merged": [since_date, tomorrow],
|
|
|
|
}
|
2023-12-15 12:28:25 +00:00
|
|
|
logging.info("Query to find the backport PRs:\n %s", query_args)
|
|
|
|
self.prs_for_backport = self.gh.get_pulls_from_search(**query_args)
|
2022-07-14 18:57:03 +00:00
|
|
|
logging.info(
|
|
|
|
"PRs to be backported:\n %s",
|
|
|
|
"\n ".join([pr.html_url for pr in self.prs_for_backport]),
|
|
|
|
)
|
|
|
|
|
|
|
|
def process_backports(self):
|
|
|
|
for pr in self.prs_for_backport:
|
2022-07-15 14:10:00 +00:00
|
|
|
try:
|
|
|
|
self.process_pr(pr)
|
|
|
|
except Exception as e:
|
|
|
|
logging.error(
|
2024-05-30 15:35:44 +00:00
|
|
|
"During processing the PR #%s error occurred: %s", pr.number, e
|
2022-07-15 14:10:00 +00:00
|
|
|
)
|
2022-07-15 14:33:01 +00:00
|
|
|
self.error = e
|
2022-07-14 18:57:03 +00:00
|
|
|
|
2022-11-15 13:46:48 +00:00
|
|
|
def process_pr(self, pr: PullRequest) -> None:
|
2022-07-14 18:57:03 +00:00
|
|
|
pr_labels = [label.name for label in pr.labels]
|
2024-05-02 17:10:56 +00:00
|
|
|
|
2024-06-07 18:33:31 +00:00
|
|
|
# FIXME: currently backport to all branches, for branch-specified backports too
|
|
|
|
# Handle different branch name formats in cloud
|
|
|
|
# if self.must_create_backport_label in pr_labels:
|
|
|
|
branches = [
|
|
|
|
ReleaseBranch(br, pr, self.repo, self.backport_created_label)
|
|
|
|
for br in self.release_branches
|
|
|
|
] # type: List[ReleaseBranch]
|
|
|
|
# else:
|
|
|
|
# branches = [
|
|
|
|
# ReleaseBranch(br, pr, self.repo, self.backport_created_label)
|
|
|
|
# for br in [
|
|
|
|
# label.split("-", 1)[0][1:] # v21.8-must-backport
|
|
|
|
# for label in pr_labels
|
|
|
|
# if label in self.labels_to_backport
|
|
|
|
# ]
|
|
|
|
# ]
|
2024-05-30 15:35:44 +00:00
|
|
|
assert branches, "BUG!"
|
2022-07-14 18:57:03 +00:00
|
|
|
|
|
|
|
logging.info(
|
2024-05-02 16:21:26 +00:00
|
|
|
" PR #%s is supposed to be backported to %s",
|
2022-07-14 18:57:03 +00:00
|
|
|
pr.number,
|
|
|
|
", ".join(map(str, branches)),
|
|
|
|
)
|
2024-05-29 18:55:45 +00:00
|
|
|
# All PRs for cherry-pick and backport branches as heads
|
2024-05-02 17:20:58 +00:00
|
|
|
query_suffix = " ".join(
|
|
|
|
[
|
|
|
|
f"head:{branch.backport_branch} head:{branch.cherrypick_branch}"
|
|
|
|
for branch in branches
|
|
|
|
]
|
|
|
|
)
|
|
|
|
bp_cp_prs = self.gh.get_pulls_from_search(
|
|
|
|
query=f"type:pr repo:{self._repo_name} {query_suffix}",
|
|
|
|
label=f"{Labels.PR_BACKPORT},{Labels.PR_CHERRYPICK}",
|
|
|
|
)
|
|
|
|
for br in branches:
|
2024-05-30 15:35:44 +00:00
|
|
|
bp_cp_prs = br.pop_prs(bp_cp_prs)
|
|
|
|
assert not bp_cp_prs, "BUG!"
|
2024-05-02 17:20:58 +00:00
|
|
|
|
|
|
|
for br in branches:
|
|
|
|
br.process(self.dry_run)
|
|
|
|
|
2024-05-30 15:59:10 +00:00
|
|
|
for br in branches:
|
|
|
|
assert br.backported, f"BUG! backport to branch [{br}] failed"
|
2024-05-30 15:35:44 +00:00
|
|
|
self.mark_pr_backported(pr)
|
2022-07-14 18:57:03 +00:00
|
|
|
|
2022-11-15 13:46:48 +00:00
|
|
|
def mark_pr_backported(self, pr: PullRequest) -> None:
|
2022-07-14 18:57:03 +00:00
|
|
|
if self.dry_run:
|
|
|
|
logging.info("DRY RUN: would mark PR #%s as done", pr.number)
|
|
|
|
return
|
2023-05-23 10:47:18 +00:00
|
|
|
pr.add_to_labels(self.backport_created_label)
|
2022-07-14 18:57:03 +00:00
|
|
|
logging.info(
|
|
|
|
"PR #%s is successfully labeled with `%s`",
|
|
|
|
pr.number,
|
2023-05-23 10:47:18 +00:00
|
|
|
self.backport_created_label,
|
2022-07-14 18:57:03 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
@property
|
|
|
|
def repo(self) -> Repository:
|
|
|
|
if self._repo is None:
|
2023-05-23 10:47:18 +00:00
|
|
|
self._repo = self.gh.get_repo(self._repo_name)
|
2022-07-14 18:57:03 +00:00
|
|
|
return self._repo
|
|
|
|
|
|
|
|
@property
|
|
|
|
def default_branch(self) -> str:
|
|
|
|
return self.repo.default_branch
|
2021-11-08 14:30:27 +00:00
|
|
|
|
|
|
|
|
2022-06-16 11:20:03 +00:00
|
|
|
def parse_args():
|
|
|
|
parser = argparse.ArgumentParser("Create cherry-pick and backport PRs")
|
|
|
|
parser.add_argument("--token", help="github token, if not set, used from smm")
|
2022-07-14 18:57:03 +00:00
|
|
|
parser.add_argument(
|
|
|
|
"--repo", default="ClickHouse/ClickHouse", help="repo owner/name"
|
|
|
|
)
|
2023-05-23 10:47:18 +00:00
|
|
|
parser.add_argument(
|
|
|
|
"--from-repo",
|
2023-05-24 17:47:50 +00:00
|
|
|
default="ClickHouse/ClickHouse",
|
2023-05-23 10:47:18 +00:00
|
|
|
help="if set, the commits will be taken from this repo, but PRs will be created in the main repo",
|
|
|
|
)
|
2022-06-16 11:20:03 +00:00
|
|
|
parser.add_argument("--dry-run", action="store_true", help="do not create anything")
|
2023-05-23 10:47:18 +00:00
|
|
|
|
2023-12-15 11:56:35 +00:00
|
|
|
parser.add_argument(
|
|
|
|
"--reserve-search-days",
|
|
|
|
default=0,
|
|
|
|
type=int,
|
|
|
|
help="safity reserve for the PRs search days, necessary for cloud",
|
|
|
|
)
|
2023-05-23 10:47:18 +00:00
|
|
|
|
2022-07-14 18:57:03 +00:00
|
|
|
parser.add_argument(
|
|
|
|
"--debug-helpers",
|
|
|
|
action="store_true",
|
|
|
|
help="add debug logging for git_helper and github_helper",
|
|
|
|
)
|
2022-06-16 11:20:03 +00:00
|
|
|
return parser.parse_args()
|
|
|
|
|
|
|
|
|
2022-07-14 18:57:03 +00:00
|
|
|
@contextmanager
|
|
|
|
def clear_repo():
|
2024-01-10 13:30:21 +00:00
|
|
|
def ref():
|
|
|
|
return git_runner("git branch --show-current") or git_runner(
|
|
|
|
"git rev-parse HEAD"
|
|
|
|
)
|
|
|
|
|
|
|
|
orig_ref = ref()
|
2022-07-14 18:57:03 +00:00
|
|
|
try:
|
|
|
|
yield
|
2024-01-10 13:30:21 +00:00
|
|
|
finally:
|
|
|
|
current_ref = ref()
|
|
|
|
if orig_ref != current_ref:
|
|
|
|
git_runner(f"git checkout -f {orig_ref}")
|
2022-06-16 11:20:03 +00:00
|
|
|
|
|
|
|
|
2022-07-14 18:57:03 +00:00
|
|
|
@contextmanager
|
|
|
|
def stash():
|
2023-02-03 22:17:06 +00:00
|
|
|
# diff.ignoreSubmodules=all don't show changed submodules
|
|
|
|
need_stash = bool(git_runner("git -c diff.ignoreSubmodules=all diff HEAD"))
|
2022-07-14 18:57:03 +00:00
|
|
|
if need_stash:
|
2024-01-10 13:44:21 +00:00
|
|
|
script = (
|
|
|
|
__main__.__file__ if hasattr(__main__, "__file__") else "unknown script"
|
|
|
|
)
|
|
|
|
git_runner(f"git stash push --no-keep-index -m 'running {script}'")
|
2022-06-16 11:20:03 +00:00
|
|
|
try:
|
2022-07-14 18:57:03 +00:00
|
|
|
with clear_repo():
|
|
|
|
yield
|
2024-01-10 13:30:21 +00:00
|
|
|
finally:
|
2022-07-14 18:57:03 +00:00
|
|
|
if need_stash:
|
|
|
|
git_runner("git stash pop")
|
2022-06-16 11:20:03 +00:00
|
|
|
|
|
|
|
|
2022-07-14 18:57:03 +00:00
|
|
|
def main():
|
2023-09-27 14:27:37 +00:00
|
|
|
temp_path = Path(TEMP_PATH)
|
|
|
|
temp_path.mkdir(parents=True, exist_ok=True)
|
2022-06-16 11:20:03 +00:00
|
|
|
|
2022-07-14 18:57:03 +00:00
|
|
|
args = parse_args()
|
|
|
|
if args.debug_helpers:
|
|
|
|
logging.getLogger("github_helper").setLevel(logging.DEBUG)
|
|
|
|
logging.getLogger("git_helper").setLevel(logging.DEBUG)
|
|
|
|
token = args.token or get_best_robot_token()
|
|
|
|
|
2023-02-22 15:46:31 +00:00
|
|
|
gh = GitHub(token, create_cache_dir=False)
|
2023-05-23 10:47:18 +00:00
|
|
|
bp = Backport(
|
|
|
|
gh,
|
|
|
|
args.repo,
|
|
|
|
args.from_repo,
|
|
|
|
args.dry_run,
|
|
|
|
)
|
2022-11-15 13:46:48 +00:00
|
|
|
# https://github.com/python/mypy/issues/3004
|
2023-09-27 14:27:37 +00:00
|
|
|
bp.gh.cache_path = temp_path / "gh_cache"
|
2022-07-14 18:57:03 +00:00
|
|
|
bp.receive_release_prs()
|
2023-02-04 00:07:20 +00:00
|
|
|
bp.update_local_release_branches()
|
2023-12-15 11:56:35 +00:00
|
|
|
bp.receive_prs_for_backport(args.reserve_search_days)
|
2022-07-14 18:57:03 +00:00
|
|
|
bp.process_backports()
|
2022-07-15 14:33:01 +00:00
|
|
|
if bp.error is not None:
|
2024-05-02 16:21:26 +00:00
|
|
|
logging.error("Finished successfully, but errors occurred!")
|
2022-07-15 14:33:01 +00:00
|
|
|
raise bp.error
|
2022-07-14 18:57:03 +00:00
|
|
|
|
|
|
|
|
|
|
|
if __name__ == "__main__":
|
2023-05-23 10:47:18 +00:00
|
|
|
logging.getLogger().setLevel(level=logging.INFO)
|
2022-07-14 18:57:03 +00:00
|
|
|
|
|
|
|
assert not is_shallow()
|
|
|
|
with stash():
|
|
|
|
if os.getenv("ROBOT_CLICKHOUSE_SSH_KEY", ""):
|
|
|
|
with SSHKey("ROBOT_CLICKHOUSE_SSH_KEY"):
|
|
|
|
main()
|
|
|
|
else:
|
2022-06-16 11:20:03 +00:00
|
|
|
main()
|