2020-06-29 12:13:19 +00:00
|
|
|
# -*- coding: utf-8 -*-
|
|
|
|
|
2020-12-21 17:44:09 +00:00
|
|
|
try:
|
|
|
|
from clickhouse.utils.github.cherrypick import CherryPick
|
|
|
|
from clickhouse.utils.github.query import Query as RemoteRepo
|
|
|
|
from clickhouse.utils.github.local import Repository as LocalRepo
|
|
|
|
except:
|
|
|
|
from .cherrypick import CherryPick
|
|
|
|
from .query import Query as RemoteRepo
|
|
|
|
from .local import Repository as LocalRepo
|
2020-06-29 12:13:19 +00:00
|
|
|
|
|
|
|
import argparse
|
|
|
|
import logging
|
|
|
|
import re
|
|
|
|
import sys
|
|
|
|
|
|
|
|
|
|
|
|
class Backport:
|
|
|
|
def __init__(self, token, owner, name, team):
|
2022-03-22 16:39:58 +00:00
|
|
|
self._gh = RemoteRepo(
|
|
|
|
token, owner=owner, name=name, team=team, max_page_size=30, min_page_size=7
|
|
|
|
)
|
2020-07-22 01:05:46 +00:00
|
|
|
self._token = token
|
2020-06-29 12:13:19 +00:00
|
|
|
self.default_branch_name = self._gh.default_branch
|
2020-07-22 01:05:46 +00:00
|
|
|
self.ssh_url = self._gh.ssh_url
|
2020-06-29 12:13:19 +00:00
|
|
|
|
|
|
|
def getPullRequests(self, from_commit):
|
|
|
|
return self._gh.get_pull_requests(from_commit)
|
|
|
|
|
2021-04-20 14:25:50 +00:00
|
|
|
def getBranchesWithRelease(self):
|
|
|
|
branches = set()
|
|
|
|
for pull_request in self._gh.find_pull_requests("release"):
|
2022-03-22 16:39:58 +00:00
|
|
|
branches.add(pull_request["headRefName"])
|
2020-12-16 22:48:21 +00:00
|
|
|
return branches
|
|
|
|
|
2021-04-20 14:25:50 +00:00
|
|
|
def execute(self, repo, upstream, until_commit, run_cherrypick):
|
2021-01-08 21:26:33 +00:00
|
|
|
repo = LocalRepo(repo, upstream, self.default_branch_name)
|
2020-12-21 17:44:09 +00:00
|
|
|
all_branches = repo.get_release_branches() # [(branch_name, base_commit)]
|
2020-12-16 22:48:21 +00:00
|
|
|
|
2021-04-20 14:25:50 +00:00
|
|
|
release_branches = self.getBranchesWithRelease()
|
2020-12-21 17:44:09 +00:00
|
|
|
|
|
|
|
branches = []
|
|
|
|
# iterate over all branches to preserve their precedence.
|
|
|
|
for branch in all_branches:
|
2021-04-20 14:25:50 +00:00
|
|
|
if branch[0] in release_branches:
|
2020-12-21 17:44:09 +00:00
|
|
|
branches.append(branch)
|
2020-06-29 12:13:19 +00:00
|
|
|
|
2020-07-22 01:05:46 +00:00
|
|
|
if not branches:
|
2022-03-22 16:39:58 +00:00
|
|
|
logging.info("No release branches found!")
|
2020-07-22 01:05:46 +00:00
|
|
|
return
|
2020-06-29 12:13:19 +00:00
|
|
|
|
2020-07-22 01:05:46 +00:00
|
|
|
for branch in branches:
|
2022-03-22 16:39:58 +00:00
|
|
|
logging.info("Found release branch: %s", branch[0])
|
2020-06-29 12:13:19 +00:00
|
|
|
|
2020-09-03 06:57:01 +00:00
|
|
|
if not until_commit:
|
|
|
|
until_commit = branches[0][1]
|
|
|
|
pull_requests = self.getPullRequests(until_commit)
|
2020-06-29 12:13:19 +00:00
|
|
|
|
2020-07-22 01:05:46 +00:00
|
|
|
backport_map = {}
|
2020-06-29 12:13:19 +00:00
|
|
|
|
2022-03-22 16:39:58 +00:00
|
|
|
RE_MUST_BACKPORT = re.compile(r"^v(\d+\.\d+)-must-backport$")
|
|
|
|
RE_NO_BACKPORT = re.compile(r"^v(\d+\.\d+)-no-backport$")
|
|
|
|
RE_BACKPORTED = re.compile(r"^v(\d+\.\d+)-backported$")
|
2020-06-29 12:13:19 +00:00
|
|
|
|
2021-02-17 15:55:24 +00:00
|
|
|
# pull-requests are sorted by ancestry from the most recent.
|
2020-09-03 06:57:01 +00:00
|
|
|
for pr in pull_requests:
|
2022-03-22 16:39:58 +00:00
|
|
|
while repo.comparator(branches[-1][1]) >= repo.comparator(
|
|
|
|
pr["mergeCommit"]["oid"]
|
|
|
|
):
|
|
|
|
logging.info(
|
|
|
|
"PR #{} is already inside {}. Dropping this branch for further PRs".format(
|
|
|
|
pr["number"], branches[-1][0]
|
|
|
|
)
|
|
|
|
)
|
2020-07-22 01:05:46 +00:00
|
|
|
branches.pop()
|
2020-06-29 12:13:19 +00:00
|
|
|
|
2022-03-22 16:39:58 +00:00
|
|
|
logging.info("Processing PR #{}".format(pr["number"]))
|
2020-07-27 10:05:38 +00:00
|
|
|
|
2020-07-22 01:05:46 +00:00
|
|
|
assert len(branches)
|
2020-06-29 12:13:19 +00:00
|
|
|
|
2020-07-22 01:05:46 +00:00
|
|
|
branch_set = set([branch[0] for branch in branches])
|
2020-06-29 12:13:19 +00:00
|
|
|
|
2020-07-22 01:05:46 +00:00
|
|
|
# First pass. Find all must-backports
|
2022-03-22 16:39:58 +00:00
|
|
|
for label in pr["labels"]["nodes"]:
|
|
|
|
if label["name"] == "pr-must-backport":
|
|
|
|
backport_map[pr["number"]] = branch_set.copy()
|
2020-07-22 01:05:46 +00:00
|
|
|
continue
|
2022-03-22 16:39:58 +00:00
|
|
|
matched = RE_MUST_BACKPORT.match(label["name"])
|
2020-09-03 06:57:01 +00:00
|
|
|
if matched:
|
2022-03-22 16:39:58 +00:00
|
|
|
if pr["number"] not in backport_map:
|
|
|
|
backport_map[pr["number"]] = set()
|
|
|
|
backport_map[pr["number"]].add(matched.group(1))
|
2020-06-29 12:13:19 +00:00
|
|
|
|
2020-07-22 01:05:46 +00:00
|
|
|
# Second pass. Find all no-backports
|
2022-03-22 16:39:58 +00:00
|
|
|
for label in pr["labels"]["nodes"]:
|
|
|
|
if label["name"] == "pr-no-backport" and pr["number"] in backport_map:
|
|
|
|
del backport_map[pr["number"]]
|
2020-07-22 01:05:46 +00:00
|
|
|
break
|
2022-03-22 16:39:58 +00:00
|
|
|
matched_no_backport = RE_NO_BACKPORT.match(label["name"])
|
|
|
|
matched_backported = RE_BACKPORTED.match(label["name"])
|
|
|
|
if (
|
|
|
|
matched_no_backport
|
|
|
|
and pr["number"] in backport_map
|
|
|
|
and matched_no_backport.group(1) in backport_map[pr["number"]]
|
|
|
|
):
|
|
|
|
backport_map[pr["number"]].remove(matched_no_backport.group(1))
|
|
|
|
logging.info(
|
|
|
|
"\tskipping %s because of forced no-backport",
|
|
|
|
matched_no_backport.group(1),
|
|
|
|
)
|
|
|
|
elif (
|
|
|
|
matched_backported
|
|
|
|
and pr["number"] in backport_map
|
|
|
|
and matched_backported.group(1) in backport_map[pr["number"]]
|
|
|
|
):
|
|
|
|
backport_map[pr["number"]].remove(matched_backported.group(1))
|
|
|
|
logging.info(
|
|
|
|
"\tskipping %s because it's already backported manually",
|
|
|
|
matched_backported.group(1),
|
|
|
|
)
|
2020-06-29 12:13:19 +00:00
|
|
|
|
2020-10-02 16:54:07 +00:00
|
|
|
for pr, branches in list(backport_map.items()):
|
2022-03-22 16:39:58 +00:00
|
|
|
logging.info("PR #%s needs to be backported to:", pr)
|
2020-07-22 01:05:46 +00:00
|
|
|
for branch in branches:
|
2022-03-22 16:39:58 +00:00
|
|
|
logging.info(
|
|
|
|
"\t%s, and the status is: %s",
|
|
|
|
branch,
|
|
|
|
run_cherrypick(self._token, pr, branch),
|
|
|
|
)
|
2020-06-29 12:13:19 +00:00
|
|
|
|
2020-07-22 01:05:46 +00:00
|
|
|
# print API costs
|
2022-03-22 16:39:58 +00:00
|
|
|
logging.info("\nGitHub API total costs per query:")
|
2020-10-02 16:54:07 +00:00
|
|
|
for name, value in list(self._gh.api_costs.items()):
|
2022-03-22 16:39:58 +00:00
|
|
|
logging.info("%s : %s", name, value)
|
2020-06-29 12:13:19 +00:00
|
|
|
|
|
|
|
|
|
|
|
if __name__ == "__main__":
|
|
|
|
parser = argparse.ArgumentParser()
|
2022-03-22 16:39:58 +00:00
|
|
|
parser.add_argument(
|
|
|
|
"--token", type=str, required=True, help="token for Github access"
|
|
|
|
)
|
|
|
|
parser.add_argument(
|
|
|
|
"--repo",
|
|
|
|
type=str,
|
|
|
|
required=True,
|
|
|
|
help="path to full repository",
|
|
|
|
metavar="PATH",
|
|
|
|
)
|
|
|
|
parser.add_argument(
|
|
|
|
"--til", type=str, help="check PRs from HEAD til this commit", metavar="COMMIT"
|
|
|
|
)
|
|
|
|
parser.add_argument(
|
|
|
|
"--dry-run",
|
|
|
|
action="store_true",
|
|
|
|
help="do not create or merge any PRs",
|
|
|
|
default=False,
|
|
|
|
)
|
|
|
|
parser.add_argument(
|
|
|
|
"--verbose",
|
|
|
|
"-v",
|
|
|
|
action="store_true",
|
|
|
|
help="more verbose output",
|
|
|
|
default=False,
|
|
|
|
)
|
|
|
|
parser.add_argument(
|
|
|
|
"--upstream",
|
|
|
|
"-u",
|
|
|
|
type=str,
|
|
|
|
help="remote name of upstream in repository",
|
|
|
|
default="origin",
|
|
|
|
)
|
2020-06-29 12:13:19 +00:00
|
|
|
args = parser.parse_args()
|
|
|
|
|
|
|
|
if args.verbose:
|
2022-03-22 16:39:58 +00:00
|
|
|
logging.basicConfig(
|
|
|
|
format="%(message)s", stream=sys.stdout, level=logging.DEBUG
|
|
|
|
)
|
2020-06-29 12:13:19 +00:00
|
|
|
else:
|
2022-03-22 16:39:58 +00:00
|
|
|
logging.basicConfig(format="%(message)s", stream=sys.stdout, level=logging.INFO)
|
2020-06-29 12:13:19 +00:00
|
|
|
|
2022-03-22 16:39:58 +00:00
|
|
|
cherrypick_run = lambda token, pr, branch: CherryPick(
|
|
|
|
token, "ClickHouse", "ClickHouse", "core", pr, branch
|
|
|
|
).execute(args.repo, args.dry_run)
|
|
|
|
bp = Backport(args.token, "ClickHouse", "ClickHouse", "core")
|
2021-04-20 14:25:50 +00:00
|
|
|
bp.execute(args.repo, args.upstream, args.til, cherrypick_run)
|