2022-07-14 18:57:03 +00:00
|
|
|
#!/usr/bin/env python
|
|
|
|
"""Helper for GitHub API requests"""
|
|
|
|
import logging
|
2023-12-19 11:51:00 +00:00
|
|
|
import re
|
2022-07-14 18:57:03 +00:00
|
|
|
from datetime import date, datetime, timedelta
|
|
|
|
from os import path as p
|
2023-12-19 11:51:00 +00:00
|
|
|
from pathlib import Path
|
2022-07-14 18:57:03 +00:00
|
|
|
from time import sleep
|
2024-06-11 11:24:29 +00:00
|
|
|
from typing import Any, Callable, List, Optional, Tuple, Union
|
2022-07-14 18:57:03 +00:00
|
|
|
|
|
|
|
import github
|
2024-05-21 10:57:05 +00:00
|
|
|
import requests
|
2022-11-15 13:46:48 +00:00
|
|
|
|
|
|
|
# explicit reimport
|
|
|
|
# pylint: disable=useless-import-alias
|
2023-03-02 10:49:36 +00:00
|
|
|
from github.AuthenticatedUser import AuthenticatedUser
|
2022-11-15 13:46:48 +00:00
|
|
|
from github.GithubException import (
|
|
|
|
RateLimitExceededException as RateLimitExceededException,
|
|
|
|
)
|
|
|
|
from github.Issue import Issue as Issue
|
|
|
|
from github.NamedUser import NamedUser as NamedUser
|
|
|
|
from github.PullRequest import PullRequest as PullRequest
|
|
|
|
from github.Repository import Repository as Repository
|
|
|
|
|
|
|
|
# pylint: enable=useless-import-alias
|
2022-07-14 18:57:03 +00:00
|
|
|
|
|
|
|
CACHE_PATH = p.join(p.dirname(p.realpath(__file__)), "gh_cache")
|
|
|
|
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
PullRequests = List[PullRequest]
|
|
|
|
Issues = List[Issue]
|
|
|
|
|
|
|
|
|
|
|
|
class GitHub(github.Github):
|
2022-12-09 20:33:06 +00:00
|
|
|
def __init__(self, *args, create_cache_dir=True, **kwargs):
|
2022-12-09 13:00:53 +00:00
|
|
|
# Define meta attribute and apply setter logic
|
2022-07-14 18:57:03 +00:00
|
|
|
self._cache_path = Path(CACHE_PATH)
|
2022-12-09 20:33:06 +00:00
|
|
|
if create_cache_dir:
|
|
|
|
self.cache_path = self.cache_path
|
2023-02-22 15:46:31 +00:00
|
|
|
if not kwargs.get("per_page"):
|
|
|
|
kwargs["per_page"] = 100
|
2022-07-14 18:57:03 +00:00
|
|
|
# And set Path
|
|
|
|
super().__init__(*args, **kwargs)
|
2022-07-17 12:05:21 +00:00
|
|
|
self._retries = 0
|
2022-07-14 18:57:03 +00:00
|
|
|
|
|
|
|
# pylint: disable=signature-differs
|
|
|
|
def search_issues(self, *args, **kwargs) -> Issues: # type: ignore
|
|
|
|
"""Wrapper around search method with throttling and splitting by date.
|
|
|
|
|
|
|
|
We split only by the first"""
|
2024-06-11 11:23:36 +00:00
|
|
|
splittable_arg = ""
|
|
|
|
splittable_value = []
|
2022-07-14 18:57:03 +00:00
|
|
|
for arg, value in kwargs.items():
|
|
|
|
if arg in ["closed", "created", "merged", "updated"]:
|
2022-07-17 12:05:21 +00:00
|
|
|
if hasattr(value, "__iter__") and not isinstance(value, str):
|
2024-06-11 11:23:36 +00:00
|
|
|
assert all(True for v in value if isinstance(v, (date, datetime)))
|
2022-07-14 18:57:03 +00:00
|
|
|
assert len(value) == 2
|
|
|
|
kwargs[arg] = f"{value[0].isoformat()}..{value[1].isoformat()}"
|
2024-06-11 11:23:36 +00:00
|
|
|
if not splittable_arg:
|
2022-07-17 12:05:21 +00:00
|
|
|
# We split only by the first met splittable argument
|
|
|
|
middle_value = value[0] + (value[1] - value[0]) / 2
|
2024-06-11 11:23:36 +00:00
|
|
|
if middle_value in value:
|
|
|
|
# When the middle value in itareble value, we can't use it
|
|
|
|
# to split by dates later
|
|
|
|
continue
|
|
|
|
splittable_arg = arg
|
|
|
|
splittable_value = value
|
2022-07-14 18:57:03 +00:00
|
|
|
continue
|
|
|
|
assert isinstance(value, (date, datetime, str))
|
|
|
|
|
|
|
|
inter_result = [] # type: Issues
|
2024-06-11 11:23:36 +00:00
|
|
|
exception = RateLimitExceededException(0)
|
2022-07-17 12:05:21 +00:00
|
|
|
for i in range(self.retries):
|
2022-07-14 18:57:03 +00:00
|
|
|
try:
|
2022-07-17 12:05:21 +00:00
|
|
|
logger.debug("Search issues, args=%s, kwargs=%s", args, kwargs)
|
2022-07-14 18:57:03 +00:00
|
|
|
result = super().search_issues(*args, **kwargs)
|
2024-06-11 11:23:36 +00:00
|
|
|
if result.totalCount == 1000 and splittable_arg:
|
2022-07-14 18:57:03 +00:00
|
|
|
# The hard limit is 1000. If it's splittable, then we make
|
|
|
|
# two subrequests requests with less time frames
|
|
|
|
logger.debug(
|
|
|
|
"The search result contain exactly 1000 results, "
|
|
|
|
"splitting %s=%s by middle point %s",
|
2024-06-11 11:23:36 +00:00
|
|
|
splittable_arg,
|
|
|
|
kwargs[splittable_arg],
|
2022-07-14 18:57:03 +00:00
|
|
|
middle_value,
|
|
|
|
)
|
2024-06-11 11:23:36 +00:00
|
|
|
kwargs[splittable_arg] = [splittable_value[0], middle_value]
|
2022-07-14 18:57:03 +00:00
|
|
|
inter_result.extend(self.search_issues(*args, **kwargs))
|
|
|
|
if isinstance(middle_value, date):
|
|
|
|
# When middle_value is a date, 2022-01-01..2022-01-03
|
|
|
|
# is split to 2022-01-01..2022-01-02 and
|
|
|
|
# 2022-01-02..2022-01-03, so we have results for
|
|
|
|
# 2022-01-02 twicely. We split it to
|
|
|
|
# 2022-01-01..2022-01-02 and 2022-01-03..2022-01-03.
|
2024-06-11 11:23:36 +00:00
|
|
|
# 2022-01-01..2022-01-02 aren't split, see splittable_arg
|
|
|
|
# definition above for kwargs.items
|
2022-07-14 18:57:03 +00:00
|
|
|
middle_value += timedelta(days=1)
|
2024-06-11 11:23:36 +00:00
|
|
|
kwargs[splittable_arg] = [middle_value, splittable_value[1]]
|
2022-07-14 18:57:03 +00:00
|
|
|
inter_result.extend(self.search_issues(*args, **kwargs))
|
|
|
|
return inter_result
|
|
|
|
|
|
|
|
inter_result.extend(result)
|
|
|
|
return inter_result
|
|
|
|
except RateLimitExceededException as e:
|
2022-07-17 12:05:21 +00:00
|
|
|
if i == self.retries - 1:
|
2022-07-14 18:57:03 +00:00
|
|
|
exception = e
|
|
|
|
self.sleep_on_rate_limit()
|
|
|
|
|
|
|
|
raise exception
|
|
|
|
|
|
|
|
# pylint: enable=signature-differs
|
2024-06-11 11:24:29 +00:00
|
|
|
def get_pulls_from_search(self, *args: Any, **kwargs: Any) -> PullRequests:
|
2022-07-14 18:57:03 +00:00
|
|
|
"""The search api returns actually issues, so we need to fetch PullRequests"""
|
2024-06-11 11:24:29 +00:00
|
|
|
progress_func = kwargs.pop(
|
|
|
|
"progress_func", lambda x: x
|
|
|
|
) # type: Callable[[Issues], Issues]
|
2024-06-13 12:25:02 +00:00
|
|
|
issues = self.search_issues(*args, **kwargs)
|
|
|
|
repos = {}
|
|
|
|
prs = [] # type: PullRequests
|
2024-06-11 11:24:29 +00:00
|
|
|
for issue in progress_func(issues):
|
2022-07-14 18:57:03 +00:00
|
|
|
# See https://github.com/PyGithub/PyGithub/issues/2202,
|
|
|
|
# obj._rawData doesn't spend additional API requests
|
|
|
|
# pylint: disable=protected-access
|
2023-07-04 12:59:25 +00:00
|
|
|
repo_url = issue._rawData["repository_url"]
|
2022-07-14 18:57:03 +00:00
|
|
|
if repo_url not in repos:
|
|
|
|
repos[repo_url] = issue.repository
|
|
|
|
prs.append(
|
|
|
|
self.get_pull_cached(repos[repo_url], issue.number, issue.updated_at)
|
|
|
|
)
|
|
|
|
return prs
|
|
|
|
|
2023-09-13 17:05:31 +00:00
|
|
|
def get_release_pulls(self, repo_name: str) -> PullRequests:
|
|
|
|
return self.get_pulls_from_search(
|
|
|
|
query=f"type:pr repo:{repo_name} is:open",
|
|
|
|
sort="created",
|
|
|
|
order="asc",
|
|
|
|
label="release",
|
|
|
|
)
|
|
|
|
|
2023-10-19 17:45:16 +00:00
|
|
|
def sleep_on_rate_limit(self) -> None:
|
2022-07-14 18:57:03 +00:00
|
|
|
for limit, data in self.get_rate_limit().raw_data.items():
|
|
|
|
if data["remaining"] == 0:
|
|
|
|
sleep_time = data["reset"] - int(datetime.now().timestamp()) + 1
|
|
|
|
if sleep_time > 0:
|
|
|
|
logger.warning(
|
|
|
|
"Faced rate limit for '%s' requests type, sleeping %s",
|
|
|
|
limit,
|
|
|
|
sleep_time,
|
|
|
|
)
|
|
|
|
sleep(sleep_time)
|
|
|
|
return
|
|
|
|
|
|
|
|
def get_pull_cached(
|
2022-07-21 13:45:38 +00:00
|
|
|
self, repo: Repository, number: int, obj_updated_at: Optional[datetime] = None
|
2022-07-14 18:57:03 +00:00
|
|
|
) -> PullRequest:
|
2023-12-19 11:51:00 +00:00
|
|
|
# clean any special symbol from the repo name, especially '/'
|
|
|
|
repo_name = re.sub(r"\W", "_", repo.full_name)
|
|
|
|
cache_file = self.cache_path / f"pr-{repo_name}-{number}.pickle"
|
2022-07-14 18:57:03 +00:00
|
|
|
|
2022-07-21 13:45:38 +00:00
|
|
|
if cache_file.is_file():
|
|
|
|
is_updated, cached_pr = self._is_cache_updated(cache_file, obj_updated_at)
|
|
|
|
if is_updated:
|
2022-07-14 18:57:03 +00:00
|
|
|
logger.debug("Getting PR #%s from cache", number)
|
2022-07-21 13:45:38 +00:00
|
|
|
return cached_pr # type: ignore
|
2022-07-18 08:32:45 +00:00
|
|
|
logger.debug("Getting PR #%s from API", number)
|
2022-07-17 12:05:21 +00:00
|
|
|
for i in range(self.retries):
|
2022-07-14 18:57:03 +00:00
|
|
|
try:
|
|
|
|
pr = repo.get_pull(number)
|
|
|
|
break
|
|
|
|
except RateLimitExceededException:
|
2022-07-17 12:05:21 +00:00
|
|
|
if i == self.retries - 1:
|
2022-07-14 18:57:03 +00:00
|
|
|
raise
|
|
|
|
self.sleep_on_rate_limit()
|
2022-07-21 13:45:38 +00:00
|
|
|
logger.debug("Caching PR #%s from API in %s", number, cache_file)
|
|
|
|
with open(cache_file, "wb") as prfd:
|
2022-07-14 18:57:03 +00:00
|
|
|
self.dump(pr, prfd) # type: ignore
|
|
|
|
return pr
|
|
|
|
|
2022-07-21 13:45:38 +00:00
|
|
|
def get_user_cached(
|
|
|
|
self, login: str, obj_updated_at: Optional[datetime] = None
|
2023-03-02 10:49:36 +00:00
|
|
|
) -> Union[AuthenticatedUser, NamedUser]:
|
2022-07-21 13:45:38 +00:00
|
|
|
cache_file = self.cache_path / f"user-{login}.pickle"
|
|
|
|
|
|
|
|
if cache_file.is_file():
|
|
|
|
is_updated, cached_user = self._is_cache_updated(cache_file, obj_updated_at)
|
|
|
|
if is_updated:
|
|
|
|
logger.debug("Getting user %s from cache", login)
|
|
|
|
return cached_user # type: ignore
|
|
|
|
logger.debug("Getting PR #%s from API", login)
|
|
|
|
for i in range(self.retries):
|
|
|
|
try:
|
|
|
|
user = self.get_user(login)
|
|
|
|
break
|
|
|
|
except RateLimitExceededException:
|
|
|
|
if i == self.retries - 1:
|
|
|
|
raise
|
|
|
|
self.sleep_on_rate_limit()
|
|
|
|
logger.debug("Caching user %s from API in %s", login, cache_file)
|
|
|
|
with open(cache_file, "wb") as prfd:
|
|
|
|
self.dump(user, prfd) # type: ignore
|
|
|
|
return user
|
|
|
|
|
2022-11-14 19:02:33 +00:00
|
|
|
def _get_cached(self, path: Path): # type: ignore
|
2022-07-21 13:45:38 +00:00
|
|
|
with open(path, "rb") as ob_fd:
|
|
|
|
return self.load(ob_fd) # type: ignore
|
|
|
|
|
2023-12-22 16:45:30 +00:00
|
|
|
# pylint: disable=protected-access
|
|
|
|
@staticmethod
|
|
|
|
def toggle_pr_draft(pr: PullRequest) -> None:
|
|
|
|
"""GH rest API does not provide a way to toggle the draft status for PR"""
|
|
|
|
node_id = pr._rawData["node_id"]
|
|
|
|
if pr.draft:
|
|
|
|
action = (
|
|
|
|
"mutation PullRequestReadyForReview($input:MarkPullRequestReadyForReviewInput!)"
|
|
|
|
"{markPullRequestReadyForReview(input: $input){pullRequest{id}}}"
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
action = (
|
|
|
|
"mutation ConvertPullRequestToDraft($input:ConvertPullRequestToDraftInput!)"
|
|
|
|
"{convertPullRequestToDraft(input: $input){pullRequest{id}}}"
|
|
|
|
)
|
|
|
|
query = {
|
|
|
|
"query": action,
|
|
|
|
"variables": {"input": {"pullRequestId": node_id}},
|
|
|
|
}
|
|
|
|
url = f"{pr._requester.base_url}/graphql"
|
|
|
|
_, data = pr._requester.requestJsonAndCheck("POST", url, input=query)
|
|
|
|
if data.get("data"):
|
|
|
|
pr._draft = pr._makeBoolAttribute(not pr.draft)
|
|
|
|
|
|
|
|
# pylint: enable=protected-access
|
|
|
|
|
2022-07-21 13:45:38 +00:00
|
|
|
def _is_cache_updated(
|
|
|
|
self, cache_file: Path, obj_updated_at: Optional[datetime]
|
|
|
|
) -> Tuple[bool, object]:
|
|
|
|
cached_obj = self._get_cached(cache_file)
|
2022-08-02 16:44:49 +00:00
|
|
|
# We don't want the cache_updated being always old,
|
|
|
|
# for example in cases when the user is not updated for ages
|
|
|
|
cache_updated = max(
|
2023-10-19 17:45:16 +00:00
|
|
|
cache_file.stat().st_mtime, cached_obj.updated_at.timestamp()
|
2022-08-02 16:44:49 +00:00
|
|
|
)
|
2022-07-21 13:45:38 +00:00
|
|
|
if obj_updated_at is None:
|
2022-08-02 16:44:49 +00:00
|
|
|
# When we don't know about the object is updated or not,
|
|
|
|
# we update it once per hour
|
2022-07-21 13:45:38 +00:00
|
|
|
obj_updated_at = datetime.now() - timedelta(hours=1)
|
2023-10-19 17:45:16 +00:00
|
|
|
if obj_updated_at.timestamp() <= cache_updated:
|
2022-07-21 13:45:38 +00:00
|
|
|
return True, cached_obj
|
|
|
|
return False, cached_obj
|
|
|
|
|
2022-07-14 18:57:03 +00:00
|
|
|
@property
|
2022-11-14 19:02:33 +00:00
|
|
|
def cache_path(self) -> Path:
|
2022-07-14 18:57:03 +00:00
|
|
|
return self._cache_path
|
|
|
|
|
|
|
|
@cache_path.setter
|
2022-11-14 19:02:33 +00:00
|
|
|
def cache_path(self, value: str) -> None:
|
2022-07-14 18:57:03 +00:00
|
|
|
self._cache_path = Path(value)
|
|
|
|
if self._cache_path.exists():
|
|
|
|
assert self._cache_path.is_dir()
|
|
|
|
else:
|
|
|
|
self._cache_path.mkdir(parents=True)
|
2022-07-17 12:05:21 +00:00
|
|
|
|
|
|
|
@property
|
|
|
|
def retries(self):
|
|
|
|
if self._retries == 0:
|
|
|
|
self._retries = 3
|
|
|
|
return self._retries
|
|
|
|
|
|
|
|
@retries.setter
|
2022-11-14 19:02:33 +00:00
|
|
|
def retries(self, value: int) -> None:
|
|
|
|
assert isinstance(value, int)
|
2022-07-17 12:05:21 +00:00
|
|
|
self._retries = value
|
2024-05-21 10:57:05 +00:00
|
|
|
|
2024-05-23 13:33:21 +00:00
|
|
|
# static methods not using pygithub
|
2024-05-21 10:57:05 +00:00
|
|
|
@staticmethod
|
2024-05-23 13:33:21 +00:00
|
|
|
def cancel_wf(repo, run_id, token, strict=False):
|
2024-05-21 10:57:05 +00:00
|
|
|
headers = {"Authorization": f"token {token}"}
|
2024-05-23 13:33:21 +00:00
|
|
|
url = f"https://api.github.com/repos/{repo}/actions/runs/{run_id}/cancel"
|
2024-05-21 10:57:05 +00:00
|
|
|
try:
|
|
|
|
response = requests.post(url, headers=headers, timeout=10)
|
|
|
|
response.raise_for_status()
|
|
|
|
print(f"NOTE: Workflow [{run_id}] has been cancelled")
|
|
|
|
except Exception as ex:
|
|
|
|
print("ERROR: Got exception executing wf cancel request", ex)
|
|
|
|
if strict:
|
|
|
|
raise ex
|