2019-04-08 15:33:05 +00:00
|
|
|
# -*- coding: utf-8 -*-
|
|
|
|
|
|
|
|
import requests
|
|
|
|
|
|
|
|
|
|
|
|
class Query:
|
2019-04-11 15:08:25 +00:00
|
|
|
'''
|
2020-06-29 12:13:19 +00:00
|
|
|
Implements queries to the Github API using GraphQL
|
|
|
|
'''
|
|
|
|
|
|
|
|
_PULL_REQUEST = '''
|
|
|
|
author {{
|
|
|
|
... on User {{
|
|
|
|
id
|
|
|
|
login
|
|
|
|
}}
|
|
|
|
}}
|
|
|
|
|
|
|
|
baseRepository {{
|
|
|
|
nameWithOwner
|
|
|
|
}}
|
|
|
|
|
|
|
|
mergeCommit {{
|
|
|
|
oid
|
|
|
|
parents(first: {min_page_size}) {{
|
|
|
|
totalCount
|
|
|
|
nodes {{
|
|
|
|
oid
|
|
|
|
}}
|
|
|
|
}}
|
|
|
|
}}
|
|
|
|
|
|
|
|
mergedBy {{
|
|
|
|
... on User {{
|
|
|
|
id
|
|
|
|
login
|
|
|
|
}}
|
|
|
|
}}
|
|
|
|
|
|
|
|
baseRefName
|
|
|
|
closed
|
2020-12-16 22:48:21 +00:00
|
|
|
headRefName
|
2020-06-29 12:13:19 +00:00
|
|
|
id
|
|
|
|
mergeable
|
|
|
|
merged
|
|
|
|
number
|
|
|
|
title
|
|
|
|
url
|
|
|
|
'''
|
|
|
|
|
|
|
|
def __init__(self, token, owner, name, team, max_page_size=100, min_page_size=5):
|
|
|
|
self._PULL_REQUEST = Query._PULL_REQUEST.format(min_page_size=min_page_size)
|
2019-04-11 15:08:25 +00:00
|
|
|
|
2019-04-08 15:33:05 +00:00
|
|
|
self._token = token
|
2020-06-29 12:13:19 +00:00
|
|
|
self._owner = owner
|
|
|
|
self._name = name
|
|
|
|
self._team = team
|
|
|
|
|
2019-04-08 15:33:05 +00:00
|
|
|
self._max_page_size = max_page_size
|
2019-04-12 13:26:02 +00:00
|
|
|
self._min_page_size = min_page_size
|
2020-06-29 12:13:19 +00:00
|
|
|
|
2019-09-06 12:42:15 +00:00
|
|
|
self.api_costs = {}
|
2019-04-12 13:26:02 +00:00
|
|
|
|
2020-06-29 12:13:19 +00:00
|
|
|
repo = self.get_repository()
|
|
|
|
self._id = repo['id']
|
|
|
|
self.ssh_url = repo['sshUrl']
|
|
|
|
self.default_branch = repo['defaultBranchRef']['name']
|
|
|
|
|
|
|
|
self.members = set(self.get_members())
|
|
|
|
|
|
|
|
def get_repository(self):
|
|
|
|
_QUERY = '''
|
|
|
|
repository(owner: "{owner}" name: "{name}") {{
|
|
|
|
defaultBranchRef {{
|
|
|
|
name
|
2019-04-12 13:26:02 +00:00
|
|
|
}}
|
2020-06-29 12:13:19 +00:00
|
|
|
id
|
|
|
|
sshUrl
|
2019-04-12 13:26:02 +00:00
|
|
|
}}
|
2020-06-29 12:13:19 +00:00
|
|
|
'''
|
|
|
|
|
|
|
|
query = _QUERY.format(owner=self._owner, name=self._name)
|
|
|
|
return self._run(query)['repository']
|
|
|
|
|
|
|
|
def get_members(self):
|
2019-04-12 13:26:02 +00:00
|
|
|
'''Get all team members for organization
|
|
|
|
|
|
|
|
Returns:
|
2020-06-29 12:13:19 +00:00
|
|
|
members: a map of members' logins to ids
|
|
|
|
'''
|
|
|
|
|
|
|
|
_QUERY = '''
|
|
|
|
organization(login: "{organization}") {{
|
|
|
|
team(slug: "{team}") {{
|
|
|
|
members(first: {max_page_size} {next}) {{
|
|
|
|
pageInfo {{
|
|
|
|
hasNextPage
|
|
|
|
endCursor
|
|
|
|
}}
|
|
|
|
nodes {{
|
|
|
|
id
|
|
|
|
login
|
|
|
|
}}
|
|
|
|
}}
|
|
|
|
}}
|
|
|
|
}}
|
2019-04-12 13:26:02 +00:00
|
|
|
'''
|
2020-06-29 12:13:19 +00:00
|
|
|
|
|
|
|
members = {}
|
2019-04-12 13:26:02 +00:00
|
|
|
not_end = True
|
2020-06-29 12:13:19 +00:00
|
|
|
query = _QUERY.format(organization=self._owner, team=self._team,
|
|
|
|
max_page_size=self._max_page_size,
|
|
|
|
next='')
|
2019-04-12 13:26:02 +00:00
|
|
|
|
|
|
|
while not_end:
|
|
|
|
result = self._run(query)['organization']['team']
|
|
|
|
if result is None:
|
|
|
|
break
|
|
|
|
result = result['members']
|
|
|
|
not_end = result['pageInfo']['hasNextPage']
|
2020-06-29 12:13:19 +00:00
|
|
|
query = _QUERY.format(organization=self._owner, team=self._team,
|
|
|
|
max_page_size=self._max_page_size,
|
|
|
|
next='after: "{}"'.format(result["pageInfo"]["endCursor"]))
|
2019-04-12 13:26:02 +00:00
|
|
|
|
2020-06-29 12:13:19 +00:00
|
|
|
members += dict([(node['login'], node['id']) for node in result['nodes']])
|
2019-04-12 13:26:02 +00:00
|
|
|
|
2020-06-29 12:13:19 +00:00
|
|
|
return members
|
2019-04-08 15:33:05 +00:00
|
|
|
|
2020-06-29 12:13:19 +00:00
|
|
|
def get_pull_request(self, number):
|
|
|
|
_QUERY = '''
|
|
|
|
repository(owner: "{owner}" name: "{name}") {{
|
|
|
|
pullRequest(number: {number}) {{
|
|
|
|
{pull_request_data}
|
2019-04-11 15:08:25 +00:00
|
|
|
}}
|
|
|
|
}}
|
|
|
|
'''
|
2019-04-08 19:28:08 +00:00
|
|
|
|
2020-06-29 12:13:19 +00:00
|
|
|
query = _QUERY.format(owner=self._owner, name=self._name, number=number,
|
2020-07-22 01:05:46 +00:00
|
|
|
pull_request_data=self._PULL_REQUEST, min_page_size=self._min_page_size)
|
2020-06-29 12:13:19 +00:00
|
|
|
return self._run(query)['repository']['pullRequest']
|
2019-04-08 15:33:05 +00:00
|
|
|
|
2020-06-29 12:13:19 +00:00
|
|
|
def find_pull_request(self, base, head):
|
|
|
|
_QUERY = '''
|
|
|
|
repository(owner: "{owner}" name: "{name}") {{
|
|
|
|
pullRequests(first: {min_page_size} baseRefName: "{base}" headRefName: "{head}") {{
|
2019-04-12 13:26:02 +00:00
|
|
|
nodes {{
|
2020-06-29 12:13:19 +00:00
|
|
|
{pull_request_data}
|
2019-04-12 13:26:02 +00:00
|
|
|
}}
|
2020-06-29 12:13:19 +00:00
|
|
|
totalCount
|
2019-04-12 13:26:02 +00:00
|
|
|
}}
|
|
|
|
}}
|
2020-06-29 12:13:19 +00:00
|
|
|
'''
|
2019-04-12 13:26:02 +00:00
|
|
|
|
2020-06-29 12:13:19 +00:00
|
|
|
query = _QUERY.format(owner=self._owner, name=self._name, base=base, head=head,
|
2020-07-22 01:05:46 +00:00
|
|
|
pull_request_data=self._PULL_REQUEST, min_page_size=self._min_page_size)
|
2020-06-29 12:13:19 +00:00
|
|
|
result = self._run(query)['repository']['pullRequests']
|
|
|
|
if result['totalCount'] > 0:
|
|
|
|
return result['nodes'][0]
|
|
|
|
else:
|
|
|
|
return {}
|
2019-04-12 13:26:02 +00:00
|
|
|
|
2020-12-16 22:48:21 +00:00
|
|
|
def find_pull_requests(self, label_name):
|
|
|
|
'''
|
|
|
|
Get all pull-requests filtered by label name
|
|
|
|
'''
|
|
|
|
_QUERY = '''
|
|
|
|
repository(owner: "{owner}" name: "{name}") {{
|
2020-12-21 17:44:09 +00:00
|
|
|
pullRequests(first: {min_page_size} labels: "{label_name}") {{
|
2020-12-16 22:48:21 +00:00
|
|
|
nodes {{
|
|
|
|
{pull_request_data}
|
|
|
|
}}
|
|
|
|
}}
|
|
|
|
}}
|
|
|
|
'''
|
|
|
|
|
2020-12-21 17:44:09 +00:00
|
|
|
query = _QUERY.format(owner=self._owner, name=self._name, label_name=label_name,
|
|
|
|
pull_request_data=self._PULL_REQUEST, min_page_size=self._min_page_size)
|
2020-12-16 22:48:21 +00:00
|
|
|
return self._run(query)['repository']['pullRequests']['nodes']
|
|
|
|
|
2020-06-29 12:13:19 +00:00
|
|
|
def get_pull_requests(self, before_commit):
|
|
|
|
'''
|
|
|
|
Get all merged pull-requests from the HEAD of default branch to the last commit (excluding)
|
2019-04-12 13:26:02 +00:00
|
|
|
'''
|
|
|
|
|
2020-06-29 12:13:19 +00:00
|
|
|
_QUERY = '''
|
|
|
|
repository(owner: "{owner}" name: "{name}") {{
|
|
|
|
defaultBranchRef {{
|
|
|
|
target {{
|
|
|
|
... on Commit {{
|
|
|
|
history(first: {max_page_size} {next}) {{
|
|
|
|
pageInfo {{
|
|
|
|
hasNextPage
|
|
|
|
endCursor
|
|
|
|
}}
|
|
|
|
nodes {{
|
|
|
|
oid
|
|
|
|
associatedPullRequests(first: {min_page_size}) {{
|
|
|
|
totalCount
|
|
|
|
nodes {{
|
|
|
|
... on PullRequest {{
|
|
|
|
{pull_request_data}
|
|
|
|
|
|
|
|
labels(first: {min_page_size}) {{
|
|
|
|
totalCount
|
|
|
|
pageInfo {{
|
|
|
|
hasNextPage
|
|
|
|
endCursor
|
|
|
|
}}
|
|
|
|
nodes {{
|
|
|
|
name
|
|
|
|
color
|
2019-04-12 13:26:02 +00:00
|
|
|
}}
|
|
|
|
}}
|
|
|
|
}}
|
2019-04-11 15:08:25 +00:00
|
|
|
}}
|
|
|
|
}}
|
|
|
|
}}
|
|
|
|
}}
|
|
|
|
}}
|
|
|
|
}}
|
|
|
|
}}
|
|
|
|
}}
|
|
|
|
'''
|
2020-06-29 12:13:19 +00:00
|
|
|
|
2019-04-11 15:08:25 +00:00
|
|
|
pull_requests = []
|
|
|
|
not_end = True
|
2020-06-29 12:13:19 +00:00
|
|
|
query = _QUERY.format(owner=self._owner, name=self._name,
|
|
|
|
max_page_size=self._max_page_size,
|
|
|
|
min_page_size=self._min_page_size,
|
|
|
|
pull_request_data=self._PULL_REQUEST,
|
|
|
|
next='')
|
2019-04-11 15:08:25 +00:00
|
|
|
|
|
|
|
while not_end:
|
2020-06-29 12:13:19 +00:00
|
|
|
result = self._run(query)['repository']['defaultBranchRef']['target']['history']
|
2019-04-11 15:08:25 +00:00
|
|
|
not_end = result['pageInfo']['hasNextPage']
|
2020-06-29 12:13:19 +00:00
|
|
|
query = _QUERY.format(owner=self._owner, name=self._name,
|
|
|
|
max_page_size=self._max_page_size,
|
|
|
|
min_page_size=self._min_page_size,
|
|
|
|
pull_request_data=self._PULL_REQUEST,
|
|
|
|
next='after: "{}"'.format(result["pageInfo"]["endCursor"]))
|
2019-04-08 15:33:05 +00:00
|
|
|
|
2019-04-11 15:08:25 +00:00
|
|
|
for commit in result['nodes']:
|
2020-06-29 12:13:19 +00:00
|
|
|
# FIXME: maybe include `before_commit`?
|
2019-04-11 15:08:25 +00:00
|
|
|
if str(commit['oid']) == str(before_commit):
|
2019-04-08 15:33:05 +00:00
|
|
|
not_end = False
|
|
|
|
break
|
|
|
|
|
2019-04-11 16:16:57 +00:00
|
|
|
# TODO: fetch all pull-requests that were merged in a single commit.
|
2020-06-29 12:13:19 +00:00
|
|
|
assert commit['associatedPullRequests']['totalCount'] <= self._min_page_size
|
2019-04-08 15:33:05 +00:00
|
|
|
|
2019-04-11 15:08:25 +00:00
|
|
|
for pull_request in commit['associatedPullRequests']['nodes']:
|
2020-06-29 12:13:19 +00:00
|
|
|
if(pull_request['baseRepository']['nameWithOwner'] == '{}/{}'.format(self._owner, self._name) and
|
|
|
|
pull_request['baseRefName'] == self.default_branch and
|
|
|
|
pull_request['mergeCommit']['oid'] == commit['oid']):
|
2019-04-09 10:19:08 +00:00
|
|
|
pull_requests.append(pull_request)
|
2019-04-08 15:33:05 +00:00
|
|
|
|
|
|
|
return pull_requests
|
|
|
|
|
2020-06-29 12:13:19 +00:00
|
|
|
def create_pull_request(self, source, target, title, description="", draft=False, can_modify=True):
|
|
|
|
_QUERY = '''
|
|
|
|
createPullRequest(input: {{
|
|
|
|
baseRefName: "{target}",
|
|
|
|
headRefName: "{source}",
|
|
|
|
repositoryId: "{id}",
|
|
|
|
title: "{title}",
|
|
|
|
body: "{body}",
|
|
|
|
draft: {draft},
|
|
|
|
maintainerCanModify: {modify}
|
|
|
|
}}) {{
|
|
|
|
pullRequest {{
|
|
|
|
{pull_request_data}
|
2019-09-19 18:01:47 +00:00
|
|
|
}}
|
2020-06-29 12:13:19 +00:00
|
|
|
}}
|
|
|
|
'''
|
|
|
|
|
|
|
|
query = _QUERY.format(target=target, source=source, id=self._id, title=title, body=description,
|
|
|
|
draft="true" if draft else "false", modify="true" if can_modify else "false",
|
2020-07-22 01:05:46 +00:00
|
|
|
pull_request_data=self._PULL_REQUEST)
|
2020-06-29 12:13:19 +00:00
|
|
|
return self._run(query, is_mutation=True)['createPullRequest']['pullRequest']
|
|
|
|
|
|
|
|
def merge_pull_request(self, id):
|
|
|
|
_QUERY = '''
|
|
|
|
mergePullRequest(input: {{
|
|
|
|
pullRequestId: "{id}"
|
|
|
|
}}) {{
|
|
|
|
pullRequest {{
|
|
|
|
{pull_request_data}
|
2019-09-19 18:01:47 +00:00
|
|
|
}}
|
|
|
|
}}
|
2020-06-29 12:13:19 +00:00
|
|
|
'''
|
|
|
|
|
2020-07-22 01:05:46 +00:00
|
|
|
query = _QUERY.format(id=id, pull_request_data=self._PULL_REQUEST)
|
2020-06-29 12:13:19 +00:00
|
|
|
return self._run(query, is_mutation=True)['mergePullRequest']['pullRequest']
|
|
|
|
|
|
|
|
# FIXME: figure out how to add more assignees at once
|
|
|
|
def add_assignee(self, pr, assignee):
|
|
|
|
_QUERY = '''
|
|
|
|
addAssigneesToAssignable(input: {{
|
|
|
|
assignableId: "{id1}",
|
|
|
|
assigneeIds: "{id2}"
|
|
|
|
}}) {{
|
|
|
|
clientMutationId
|
|
|
|
}}
|
|
|
|
'''
|
|
|
|
|
|
|
|
query = _QUERY.format(id1=pr['id'], id2=assignee['id'])
|
|
|
|
self._run(query, is_mutation=True)
|
|
|
|
|
2019-09-19 18:01:47 +00:00
|
|
|
def set_label(self, pull_request, label_name):
|
2020-06-29 12:13:19 +00:00
|
|
|
'''
|
|
|
|
Set label by name to the pull request
|
2019-09-19 18:01:47 +00:00
|
|
|
|
|
|
|
Args:
|
|
|
|
pull_request: JSON object returned by `get_pull_requests()`
|
|
|
|
label_name (string): label name
|
|
|
|
'''
|
2020-06-29 12:13:19 +00:00
|
|
|
|
|
|
|
_GET_LABEL = '''
|
|
|
|
repository(owner: "{owner}" name: "{name}") {{
|
|
|
|
labels(first: {max_page_size} {next} query: "{label_name}") {{
|
|
|
|
pageInfo {{
|
|
|
|
hasNextPage
|
|
|
|
endCursor
|
|
|
|
}}
|
|
|
|
nodes {{
|
|
|
|
id
|
|
|
|
name
|
|
|
|
color
|
|
|
|
}}
|
|
|
|
}}
|
|
|
|
}}
|
|
|
|
'''
|
|
|
|
|
|
|
|
_SET_LABEL = '''
|
|
|
|
addLabelsToLabelable(input: {{
|
|
|
|
labelableId: "{pr_id}",
|
|
|
|
labelIds: "{label_id}"
|
|
|
|
}}) {{
|
|
|
|
clientMutationId
|
|
|
|
}}
|
|
|
|
'''
|
|
|
|
|
2019-09-19 18:01:47 +00:00
|
|
|
labels = []
|
|
|
|
not_end = True
|
2020-06-29 12:13:19 +00:00
|
|
|
query = _GET_LABEL.format(owner=self._owner, name=self._name, label_name=label_name,
|
|
|
|
max_page_size=self._max_page_size,
|
|
|
|
next='')
|
2019-09-19 18:01:47 +00:00
|
|
|
|
|
|
|
while not_end:
|
|
|
|
result = self._run(query)['repository']['labels']
|
|
|
|
not_end = result['pageInfo']['hasNextPage']
|
2020-06-29 12:13:19 +00:00
|
|
|
query = _GET_LABEL.format(owner=self._owner, name=self._name, label_name=label_name,
|
|
|
|
max_page_size=self._max_page_size,
|
|
|
|
next='after: "{}"'.format(result["pageInfo"]["endCursor"]))
|
2019-09-19 18:01:47 +00:00
|
|
|
|
|
|
|
labels += [label for label in result['nodes']]
|
|
|
|
|
|
|
|
if not labels:
|
|
|
|
return
|
|
|
|
|
2020-07-22 01:05:46 +00:00
|
|
|
query = _SET_LABEL.format(pr_id=pull_request['id'], label_id=labels[0]['id'])
|
2019-09-19 18:01:47 +00:00
|
|
|
self._run(query, is_mutation=True)
|
|
|
|
|
|
|
|
def _run(self, query, is_mutation=False):
|
2019-04-11 15:08:25 +00:00
|
|
|
from requests.adapters import HTTPAdapter
|
|
|
|
from urllib3.util.retry import Retry
|
|
|
|
|
|
|
|
def requests_retry_session(
|
|
|
|
retries=3,
|
|
|
|
backoff_factor=0.3,
|
|
|
|
status_forcelist=(500, 502, 504),
|
|
|
|
session=None,
|
|
|
|
):
|
|
|
|
session = session or requests.Session()
|
|
|
|
retry = Retry(
|
|
|
|
total=retries,
|
|
|
|
read=retries,
|
|
|
|
connect=retries,
|
|
|
|
backoff_factor=backoff_factor,
|
|
|
|
status_forcelist=status_forcelist,
|
|
|
|
)
|
|
|
|
adapter = HTTPAdapter(max_retries=retry)
|
|
|
|
session.mount('http://', adapter)
|
|
|
|
session.mount('https://', adapter)
|
|
|
|
return session
|
|
|
|
|
2020-06-29 12:13:19 +00:00
|
|
|
headers = {'Authorization': 'bearer {}'.format(self._token)}
|
2019-09-19 18:01:47 +00:00
|
|
|
if is_mutation:
|
2020-06-29 12:13:19 +00:00
|
|
|
query = '''
|
2019-09-19 18:01:47 +00:00
|
|
|
mutation {{
|
|
|
|
{query}
|
2019-09-06 12:42:15 +00:00
|
|
|
}}
|
2020-06-29 12:13:19 +00:00
|
|
|
'''.format(query=query)
|
2019-09-19 18:01:47 +00:00
|
|
|
else:
|
2020-06-29 12:13:19 +00:00
|
|
|
query = '''
|
2019-09-19 18:01:47 +00:00
|
|
|
query {{
|
|
|
|
{query}
|
|
|
|
rateLimit {{
|
|
|
|
cost
|
|
|
|
remaining
|
|
|
|
}}
|
|
|
|
}}
|
2020-06-29 12:13:19 +00:00
|
|
|
'''.format(query=query)
|
2020-03-16 15:27:07 +00:00
|
|
|
|
|
|
|
while True:
|
|
|
|
request = requests_retry_session().post('https://api.github.com/graphql', json={'query': query}, headers=headers)
|
|
|
|
if request.status_code == 200:
|
|
|
|
result = request.json()
|
|
|
|
if 'errors' in result:
|
2020-08-08 01:21:04 +00:00
|
|
|
raise Exception('Errors occurred: {}\nOriginal query: {}'.format(result["errors"], query))
|
2020-03-16 15:27:07 +00:00
|
|
|
|
|
|
|
if not is_mutation:
|
|
|
|
import inspect
|
|
|
|
caller = inspect.getouterframes(inspect.currentframe(), 2)[1][3]
|
2020-10-02 16:54:07 +00:00
|
|
|
if caller not in list(self.api_costs.keys()):
|
2020-03-16 15:27:07 +00:00
|
|
|
self.api_costs[caller] = 0
|
|
|
|
self.api_costs[caller] += result['data']['rateLimit']['cost']
|
|
|
|
|
|
|
|
return result['data']
|
|
|
|
else:
|
|
|
|
import json
|
2020-06-29 12:13:19 +00:00
|
|
|
raise Exception('Query failed with code {code}:\n{json}'.format(code=request.status_code, json=json.dumps(request.json(), indent=4)))
|