mirror of
https://github.com/ClickHouse/ClickHouse.git
synced 2024-11-21 15:12:02 +00:00
Detect backports by PRs to stable branches with pr-backport
label
Also add some fancy colors.
This commit is contained in:
parent
28f1d417f7
commit
31a566c4f2
@ -3,7 +3,7 @@
|
||||
'''
|
||||
Rules for commit messages, branch names and everything:
|
||||
|
||||
- All(!) commits to master branch must originate from pull-requests.
|
||||
- All important(!) commits to master branch must originate from pull-requests.
|
||||
- All pull-requests must be squash-merged or explicitly merged without rebase.
|
||||
- All pull-requests to master must have at least one label prefixed with `pr-`.
|
||||
- Labels that require pull-request to be backported must be red colored (#ff0000).
|
||||
@ -15,19 +15,22 @@
|
||||
|
||||
- Commits without references from pull-requests.
|
||||
- Pull-requests to master without proper labels.
|
||||
- Pull-requests that need to be backported.
|
||||
- Pull-requests that need to be backported, with statuses per stable branch.
|
||||
|
||||
'''
|
||||
|
||||
from . import local, query
|
||||
|
||||
from termcolor import colored # `pip install termcolor`
|
||||
|
||||
import argparse
|
||||
import re
|
||||
import sys
|
||||
|
||||
CHECK_MARK = '🗸'
|
||||
CROSS_MARK = '🗙'
|
||||
LABEL_MARK = '🏷'
|
||||
CHECK_MARK = colored('🗸', 'green')
|
||||
CROSS_MARK = colored('🗙', 'red')
|
||||
LABEL_MARK = colored('🏷', 'yellow')
|
||||
CLOCK_MARK = colored('↻', 'cyan')
|
||||
|
||||
|
||||
parser = argparse.ArgumentParser(description='Helper for the ClickHouse Release machinery')
|
||||
@ -39,7 +42,7 @@ parser.add_argument('-n', type=int, default=3, dest='number',
|
||||
help='number of last stable branches to consider')
|
||||
parser.add_argument('--token', type=str, required=True,
|
||||
help='token for Github access')
|
||||
parser.add_argument('--login', type = str,
|
||||
parser.add_argument('--login', type=str,
|
||||
help='filter authorship by login')
|
||||
|
||||
args = parser.parse_args()
|
||||
@ -47,7 +50,7 @@ args = parser.parse_args()
|
||||
github = query.Query(args.token)
|
||||
repo = local.Local(args.repo, args.remote, github.get_default_branch())
|
||||
|
||||
stables = repo.get_stables()[-args.number:] # [(branch, base)]
|
||||
stables = repo.get_stables()[-args.number:] # [(branch name, base)]
|
||||
if not stables:
|
||||
sys.exit('No stable branches found!')
|
||||
else:
|
||||
@ -68,6 +71,15 @@ for i in reversed(range(len(stables))):
|
||||
|
||||
from_commit = stables[i][1]
|
||||
|
||||
members = set(github.get_members("yandex", "clickhouse"))
|
||||
def print_responsible(pull_request):
|
||||
if pull_request["author"]["login"] in members:
|
||||
return colored(pull_request["author"]["login"], 'green')
|
||||
elif pull_request["mergedBy"]["login"] in members:
|
||||
return f'{pull_request["author"]["login"]} → {colored(pull_request["mergedBy"]["login"], "green")}'
|
||||
else:
|
||||
return f'{pull_request["author"]["login"]} → {pull_request["mergedBy"]["login"]}'
|
||||
|
||||
bad_pull_requests = [] # collect and print if not empty
|
||||
need_backporting = []
|
||||
for pull_request in pull_requests:
|
||||
@ -86,10 +98,10 @@ for pull_request in pull_requests:
|
||||
if bad_pull_requests:
|
||||
print('\nPull-requests without description label:')
|
||||
for bad in reversed(sorted(bad_pull_requests, key = lambda x : x['number'])):
|
||||
print(f'{CROSS_MARK} {bad["number"]}: {bad["url"]}')
|
||||
print(f'{CROSS_MARK} {bad["number"]}: {bad["url"]} ({print_responsible(bad)})')
|
||||
|
||||
# FIXME: compatibility logic, until the direct modification of master is not prohibited.
|
||||
if bad_commits:
|
||||
if bad_commits and not args.login:
|
||||
print('\nCommits not referenced by any pull-request:')
|
||||
|
||||
for bad in bad_commits:
|
||||
@ -98,40 +110,64 @@ if bad_commits:
|
||||
# TODO: check backports.
|
||||
if need_backporting:
|
||||
re_vlabel = re.compile(r'^v\d+\.\d+$')
|
||||
re_stable_num = re.compile(r'\d+\.\d+$')
|
||||
|
||||
print('\nPull-requests need to be backported:')
|
||||
for pull_request in reversed(sorted(need_backporting, key=lambda x: x['number'])):
|
||||
targets = [] # use common list for consistent order in output
|
||||
good = set()
|
||||
labeled = set()
|
||||
wait = set()
|
||||
|
||||
for stable in stables:
|
||||
if repo.comparator(stable[1]) < repo.comparator(pull_request['mergeCommit']['oid']):
|
||||
targets.append(stable)
|
||||
targets.append(stable[0])
|
||||
|
||||
# FIXME: compatibility logic - check for a manually set label, that indicates status 'backported'.
|
||||
# FIXME: O(n²) - no need to iterate all labels for every `stable`
|
||||
for label in github.get_labels(pull_request):
|
||||
if re_vlabel.match(label['name']):
|
||||
stable_num = re_stable_num.search(stable[0].name)
|
||||
if f'v{stable_num[0]}' == label['name']:
|
||||
good.add(stable)
|
||||
if f'v{stable[0]}' == label['name']:
|
||||
labeled.add(stable[0])
|
||||
|
||||
for event in github.get_timeline(pull_request):
|
||||
if(event['isCrossRepository'] or
|
||||
event['target']['number'] != pull_request['number'] or
|
||||
event['source']['baseRefName'] not in targets):
|
||||
continue
|
||||
|
||||
found_label = False
|
||||
for label in github.get_labels(event['source']):
|
||||
if label['name'] == 'pr-backport':
|
||||
found_label = True
|
||||
break
|
||||
if not found_label:
|
||||
continue
|
||||
|
||||
if event['source']['merged']:
|
||||
good.add(event['source']['baseRefName'])
|
||||
else:
|
||||
wait.add(event['source']['baseRefName'])
|
||||
|
||||
# print pull-request's status
|
||||
if len(good) == len(targets):
|
||||
if len(good) + len(labeled) == len(targets):
|
||||
print(f'{CHECK_MARK}', end=' ')
|
||||
else:
|
||||
print(f'{CROSS_MARK}', end=' ')
|
||||
print(f'{pull_request["number"]}', end=':')
|
||||
for target in targets:
|
||||
if target in good:
|
||||
print(f'\t{LABEL_MARK} {target[0]}', end='')
|
||||
print(f'\t{CHECK_MARK} {target}', end='')
|
||||
elif target in labeled:
|
||||
print(f'\t{LABEL_MARK} {target}', end='')
|
||||
elif target in wait:
|
||||
print(f'\t{CLOCK_MARK} {target}', end='')
|
||||
else:
|
||||
print(f'\t{CROSS_MARK} {target[0]}', end='')
|
||||
print(f'\t({pull_request["mergeCommit"]["author"]["name"]}) {pull_request["url"]}')
|
||||
print(f'\t{CROSS_MARK} {target}', end='')
|
||||
print(f'\t{pull_request["url"]} ({print_responsible(pull_request)})')
|
||||
|
||||
# print legend
|
||||
print('\nLegend:')
|
||||
print(f'{CHECK_MARK} - good')
|
||||
print(f'{CROSS_MARK} - bad')
|
||||
print(f'{LABEL_MARK} - backport is detected via label')
|
||||
print(f'{CLOCK_MARK} - backport is waiting to merge')
|
||||
|
@ -52,6 +52,6 @@ class Local:
|
||||
elif len(base) > 1:
|
||||
print(f'Branch {stable.path} has more than one base commit. Ignoring.')
|
||||
else:
|
||||
stables.append((stable, base[0]))
|
||||
stables.append((os.path.basename(stable.name), base[0]))
|
||||
|
||||
return sorted(stables, key=lambda x : self.comparator(x[1]))
|
||||
|
@ -7,9 +7,55 @@ class Query:
|
||||
'''Implements queries to the Github API using GraphQL
|
||||
'''
|
||||
|
||||
def __init__(self, token, max_page_size=100):
|
||||
def __init__(self, token, max_page_size=100, min_page_size=5):
|
||||
self._token = token
|
||||
self._max_page_size = max_page_size
|
||||
self._min_page_size = min_page_size
|
||||
|
||||
_MEMBERS = '''
|
||||
{{
|
||||
organization(login: "{organization}") {{
|
||||
team(slug: "{team}") {{
|
||||
members(first: {max_page_size} {next}) {{
|
||||
pageInfo {{
|
||||
hasNextPage
|
||||
endCursor
|
||||
}}
|
||||
nodes {{
|
||||
login
|
||||
}}
|
||||
}}
|
||||
}}
|
||||
}}
|
||||
}}
|
||||
'''
|
||||
def get_members(self, organization, team):
|
||||
'''Get all team members for organization
|
||||
|
||||
Returns:
|
||||
logins: a list of members' logins
|
||||
'''
|
||||
logins = []
|
||||
not_end = True
|
||||
query = Query._MEMBERS.format(organization=organization,
|
||||
team=team,
|
||||
max_page_size=self._max_page_size,
|
||||
next='')
|
||||
|
||||
while not_end:
|
||||
result = self._run(query)['organization']['team']
|
||||
if result is None:
|
||||
break
|
||||
result = result['members']
|
||||
not_end = result['pageInfo']['hasNextPage']
|
||||
query = Query._MEMBERS.format(organization=organization,
|
||||
team=team,
|
||||
max_page_size=self._max_page_size,
|
||||
next=f'after: "{result["pageInfo"]["endCursor"]}"')
|
||||
|
||||
logins += [node['login'] for node in result['nodes']]
|
||||
|
||||
return logins
|
||||
|
||||
_LABELS = '''
|
||||
{{
|
||||
@ -39,20 +85,89 @@ class Query:
|
||||
labels: a list of JSON nodes with the name and color fields
|
||||
'''
|
||||
labels = [label for label in pull_request['labels']['nodes']]
|
||||
not_end = bool(pull_request['labels']['pageInfo']['hasNextPage'])
|
||||
query = Query._LABELS.format(number=pull_request['number'], max_page_size=self._max_page_size, next=f'after: "{pull_request["labels"]["pageInfo"]["endCursor"]}"')
|
||||
not_end = pull_request['labels']['pageInfo']['hasNextPage']
|
||||
query = Query._LABELS.format(number = pull_request['number'],
|
||||
max_page_size = self._max_page_size,
|
||||
next=f'after: "{pull_request["labels"]["pageInfo"]["endCursor"]}"')
|
||||
|
||||
while not_end:
|
||||
result = self._run(query)['data']['repository']['pullRequest']['labels']
|
||||
result = self._run(query)['repository']['pullRequest']['labels']
|
||||
not_end = result['pageInfo']['hasNextPage']
|
||||
query = Query._LABELS.format(number=pull_request['number'],
|
||||
max_page_size=self._max_page_size,
|
||||
next=f'after: "{result["pageInfo"]["endCursor"]}"')
|
||||
|
||||
labels += [label for label in result['nodes']]
|
||||
|
||||
query = Query._LABELS.format(number=pull_request['number'], max_page_size=self._max_page_size, next=f'after: "{result["pageInfo"]["endCursor"]}"')
|
||||
|
||||
return labels
|
||||
|
||||
_MAX_PULL_REQUESTS = 5
|
||||
_TIMELINE = '''
|
||||
{{
|
||||
repository(owner: "yandex" name: "ClickHouse") {{
|
||||
pullRequest(number: {number}) {{
|
||||
timeline(first: {max_page_size} {next}) {{
|
||||
pageInfo {{
|
||||
hasNextPage
|
||||
endCursor
|
||||
}}
|
||||
nodes {{
|
||||
... on CrossReferencedEvent {{
|
||||
isCrossRepository
|
||||
source {{
|
||||
... on PullRequest {{
|
||||
number
|
||||
baseRefName
|
||||
merged
|
||||
labels(first: {max_page_size}) {{
|
||||
pageInfo {{
|
||||
hasNextPage
|
||||
endCursor
|
||||
}}
|
||||
nodes {{
|
||||
name
|
||||
color
|
||||
}}
|
||||
}}
|
||||
}}
|
||||
}}
|
||||
target {{
|
||||
... on PullRequest {{
|
||||
number
|
||||
}}
|
||||
}}
|
||||
}}
|
||||
}}
|
||||
}}
|
||||
}}
|
||||
}}
|
||||
}}
|
||||
'''
|
||||
def get_timeline(self, pull_request):
|
||||
'''Fetchs all cross-reference events from pull-request's timeline
|
||||
|
||||
Args:
|
||||
pull_request: JSON object returned by `get_pull_requests()`
|
||||
|
||||
Returns:
|
||||
events: a list of JSON nodes for CrossReferenceEvent
|
||||
'''
|
||||
events = [event for event in pull_request['timeline']['nodes'] if event and event['source']]
|
||||
not_end = pull_request['timeline']['pageInfo']['hasNextPage']
|
||||
query = Query._TIMELINE.format(number = pull_request['number'],
|
||||
max_page_size = self._max_page_size,
|
||||
next=f'after: "{pull_request["timeline"]["pageInfo"]["endCursor"]}"')
|
||||
|
||||
while not_end:
|
||||
result = self._run(query)['repository']['pullRequest']['timeline']
|
||||
not_end = result['pageInfo']['hasNextPage']
|
||||
query = Query._TIMELINE.format(number=pull_request['number'],
|
||||
max_page_size=self._max_page_size,
|
||||
next=f'after: "{result["pageInfo"]["endCursor"]}"')
|
||||
|
||||
events += [event for event in result['nodes'] if event and event['source']]
|
||||
|
||||
return events
|
||||
|
||||
_PULL_REQUESTS = '''
|
||||
{{
|
||||
repository(owner: "yandex" name: "ClickHouse") {{
|
||||
@ -67,11 +182,17 @@ class Query:
|
||||
}}
|
||||
nodes {{
|
||||
oid
|
||||
associatedPullRequests(first: {max_pull_requests}) {{
|
||||
associatedPullRequests(first: {min_page_size}) {{
|
||||
totalCount
|
||||
nodes {{
|
||||
... on PullRequest {{
|
||||
number
|
||||
author {{
|
||||
login
|
||||
}}
|
||||
mergedBy {{
|
||||
login
|
||||
}}
|
||||
url
|
||||
baseRefName
|
||||
baseRepository {{
|
||||
@ -79,14 +200,8 @@ class Query:
|
||||
}}
|
||||
mergeCommit {{
|
||||
oid
|
||||
author {{
|
||||
user {{
|
||||
id
|
||||
}}
|
||||
name
|
||||
}}
|
||||
}}
|
||||
labels(first: {max_page_size}) {{
|
||||
labels(first: {min_page_size}) {{
|
||||
pageInfo {{
|
||||
hasNextPage
|
||||
endCursor
|
||||
@ -96,6 +211,34 @@ class Query:
|
||||
color
|
||||
}}
|
||||
}}
|
||||
timeline(first: {min_page_size}) {{
|
||||
pageInfo {{
|
||||
hasNextPage
|
||||
endCursor
|
||||
}}
|
||||
nodes {{
|
||||
... on CrossReferencedEvent {{
|
||||
isCrossRepository
|
||||
source {{
|
||||
... on PullRequest {{
|
||||
number
|
||||
baseRefName
|
||||
merged
|
||||
labels(first: 0) {{
|
||||
nodes {{
|
||||
name
|
||||
}}
|
||||
}}
|
||||
}}
|
||||
}}
|
||||
target {{
|
||||
... on PullRequest {{
|
||||
number
|
||||
}}
|
||||
}}
|
||||
}}
|
||||
}}
|
||||
}}
|
||||
}}
|
||||
}}
|
||||
}}
|
||||
@ -107,26 +250,30 @@ class Query:
|
||||
}}
|
||||
}}
|
||||
'''
|
||||
def get_pull_requests(self, before_commit, author):
|
||||
def get_pull_requests(self, before_commit, login):
|
||||
'''Get all merged pull-requests from the HEAD of default branch to the last commit (excluding)
|
||||
|
||||
Args:
|
||||
before_commit (string-convertable): commit sha of the last commit (excluding)
|
||||
author (string): filter pull-requests by author name
|
||||
login (string): filter pull-requests by user login
|
||||
|
||||
Returns:
|
||||
pull_requests: a list of JSON nodes with pull-requests' details
|
||||
'''
|
||||
pull_requests = []
|
||||
query = Query._PULL_REQUESTS.format(max_page_size=self._max_page_size, max_pull_requests=Query._MAX_PULL_REQUESTS, next='')
|
||||
not_end = True
|
||||
user_id = self.get_user(author) if author else None
|
||||
query = Query._PULL_REQUESTS.format(max_page_size=self._max_page_size,
|
||||
min_page_size=self._min_page_size,
|
||||
next='')
|
||||
|
||||
while not_end:
|
||||
result = self._run(query)['data']['repository']['defaultBranchRef']
|
||||
result = self._run(query)['repository']['defaultBranchRef']
|
||||
default_branch_name = result['name']
|
||||
result = result['target']['history']
|
||||
not_end = result['pageInfo']['hasNextPage']
|
||||
query = Query._PULL_REQUESTS.format(max_page_size=self._max_page_size,
|
||||
min_page_size=self._min_page_size,
|
||||
next=f'after: "{result["pageInfo"]["endCursor"]}"')
|
||||
|
||||
for commit in result['nodes']:
|
||||
if str(commit['oid']) == str(before_commit):
|
||||
@ -134,18 +281,16 @@ class Query:
|
||||
break
|
||||
|
||||
# TODO: fetch all pull-requests that were merged in a single commit.
|
||||
assert commit['associatedPullRequests']['totalCount'] <= Query._MAX_PULL_REQUESTS, \
|
||||
assert commit['associatedPullRequests']['totalCount'] <= self._min_page_size, \
|
||||
f'there are {commit["associatedPullRequests"]["totalCount"]} pull-requests merged in commit {commit["oid"]}'
|
||||
|
||||
for pull_request in commit['associatedPullRequests']['nodes']:
|
||||
if(pull_request['baseRepository']['nameWithOwner'] == 'yandex/ClickHouse' and
|
||||
pull_request['baseRefName'] == default_branch_name and
|
||||
pull_request['mergeCommit']['oid'] == commit['oid'] and
|
||||
(not user_id or pull_request['mergeCommit']['author']['user']['id'] == user_id)):
|
||||
(not login or pull_request['author']['login'] == login)):
|
||||
pull_requests.append(pull_request)
|
||||
|
||||
query = Query._PULL_REQUESTS.format(max_page_size=self._max_page_size, max_pull_requests=Query._MAX_PULL_REQUESTS, next=f'after: "{result["pageInfo"]["endCursor"]}"')
|
||||
|
||||
return pull_requests
|
||||
|
||||
_DEFAULT = '''
|
||||
@ -163,19 +308,7 @@ class Query:
|
||||
Returns:
|
||||
name (string): branch name
|
||||
'''
|
||||
return self._run(Query._DEFAULT)['data']['repository']['defaultBranchRef']['name']
|
||||
|
||||
_USER = '''
|
||||
{{
|
||||
user(login: "{login}") {{
|
||||
id
|
||||
}}
|
||||
}}
|
||||
'''
|
||||
def get_user(self, login):
|
||||
'''Returns id by user login
|
||||
'''
|
||||
return self._run(Query._USER.format(login=login))['data']['user']['id']
|
||||
return self._run(Query._DEFAULT)['repository']['defaultBranchRef']['name']
|
||||
|
||||
def _run(self, query):
|
||||
from requests.adapters import HTTPAdapter
|
||||
@ -203,6 +336,10 @@ class Query:
|
||||
headers = {'Authorization': f'bearer {self._token}'}
|
||||
request = requests_retry_session().post('https://api.github.com/graphql', json={'query': query}, headers=headers)
|
||||
if request.status_code == 200:
|
||||
return request.json()
|
||||
result = request.json()
|
||||
if 'errors' in result:
|
||||
raise Exception(f'Errors occured: {result["errors"]}')
|
||||
return result['data']
|
||||
else:
|
||||
raise Exception(f'Query failed with code {request.status_code}: {query}')
|
||||
import json
|
||||
raise Exception(f'Query failed with code {request.status_code}:\n{json.dumps(request.json(), indent=4)}')
|
||||
|
@ -1,4 +1,6 @@
|
||||
#!/bin/sh
|
||||
|
||||
set -e
|
||||
python3 -m github "$@"
|
||||
SCRIPTPATH=$(readlink -f "$0")
|
||||
SCRIPTDIR=$(dirname "$SCRIPTPATH")
|
||||
PYTHONPATH="$SCRIPTDIR" python3 -m github "$@"
|
||||
|
Loading…
Reference in New Issue
Block a user