2021-09-15 12:59:39 +00:00
|
|
|
#!/usr/bin/env python3
|
2021-11-29 14:17:17 +00:00
|
|
|
import json
|
2022-04-19 07:45:40 +00:00
|
|
|
import logging
|
2021-10-29 13:57:47 +00:00
|
|
|
import os
|
2024-03-01 00:48:31 +00:00
|
|
|
import re
|
2023-12-20 20:28:54 +00:00
|
|
|
from typing import Dict, List, Set, Union
|
2024-01-24 13:26:50 +00:00
|
|
|
from urllib.parse import quote
|
2021-10-29 13:57:47 +00:00
|
|
|
|
2024-01-24 13:26:50 +00:00
|
|
|
# isort: off
|
|
|
|
# for some reason this line moves to the end
|
2023-10-17 14:29:20 +00:00
|
|
|
from unidiff import PatchSet # type: ignore
|
|
|
|
|
2024-01-24 13:26:50 +00:00
|
|
|
# isort: on
|
|
|
|
|
2023-04-28 16:26:50 +00:00
|
|
|
from build_download_helper import get_gh_api
|
2022-01-26 11:09:35 +00:00
|
|
|
from env_helper import (
|
2024-01-24 13:26:50 +00:00
|
|
|
GITHUB_EVENT_PATH,
|
2022-01-26 11:09:35 +00:00
|
|
|
GITHUB_REPOSITORY,
|
2022-03-24 14:37:53 +00:00
|
|
|
GITHUB_RUN_URL,
|
2024-01-24 13:26:50 +00:00
|
|
|
GITHUB_SERVER_URL,
|
2022-01-26 11:09:35 +00:00
|
|
|
)
|
|
|
|
|
2022-07-30 05:07:22 +00:00
|
|
|
SKIP_MERGEABLE_CHECK_LABEL = "skip mergeable check"
|
2022-11-15 12:01:27 +00:00
|
|
|
NeedsDataType = Dict[str, Dict[str, Union[str, Dict[str, str]]]]
|
2022-04-21 14:33:46 +00:00
|
|
|
|
2022-01-26 11:09:35 +00:00
|
|
|
DIFF_IN_DOCUMENTATION_EXT = [
|
|
|
|
".html",
|
|
|
|
".md",
|
|
|
|
".yml",
|
|
|
|
".txt",
|
|
|
|
".css",
|
|
|
|
".js",
|
|
|
|
".xml",
|
|
|
|
".ico",
|
|
|
|
".conf",
|
|
|
|
".svg",
|
|
|
|
".png",
|
|
|
|
".jpg",
|
|
|
|
".py",
|
|
|
|
".sh",
|
|
|
|
".json",
|
|
|
|
]
|
2022-01-26 13:54:11 +00:00
|
|
|
RETRY_SLEEP = 0
|
2021-09-15 12:59:39 +00:00
|
|
|
|
2021-10-29 09:58:25 +00:00
|
|
|
|
2024-01-18 13:14:38 +00:00
|
|
|
class EventType:
|
|
|
|
UNKNOWN = 0
|
|
|
|
PUSH = 1
|
|
|
|
PULL_REQUEST = 2
|
|
|
|
SCHEDULE = 3
|
|
|
|
DISPATCH = 4
|
|
|
|
|
|
|
|
|
2021-11-22 09:56:13 +00:00
|
|
|
def get_pr_for_commit(sha, ref):
|
2022-01-18 14:43:35 +00:00
|
|
|
if not ref:
|
2022-01-24 15:34:01 +00:00
|
|
|
return None
|
2022-01-26 11:09:35 +00:00
|
|
|
try_get_pr_url = (
|
|
|
|
f"https://api.github.com/repos/{GITHUB_REPOSITORY}/commits/{sha}/pulls"
|
|
|
|
)
|
2021-11-22 09:39:45 +00:00
|
|
|
try:
|
2023-04-28 16:26:50 +00:00
|
|
|
response = get_gh_api(try_get_pr_url, sleep=RETRY_SLEEP)
|
2021-11-22 09:39:45 +00:00
|
|
|
data = response.json()
|
2022-12-02 18:17:16 +00:00
|
|
|
our_prs = [] # type: List[Dict]
|
2021-11-22 09:39:45 +00:00
|
|
|
if len(data) > 1:
|
|
|
|
print("Got more than one pr for commit", sha)
|
|
|
|
for pr in data:
|
2022-12-02 18:17:16 +00:00
|
|
|
# We need to check if the PR is created in our repo, because
|
|
|
|
# https://github.com/kaynewu/ClickHouse/pull/2
|
|
|
|
# has broke our PR search once in a while
|
|
|
|
if pr["base"]["repo"]["full_name"] != GITHUB_REPOSITORY:
|
|
|
|
continue
|
2021-11-22 09:56:13 +00:00
|
|
|
# refs for pushes looks like refs/head/XX
|
|
|
|
# refs for RPs looks like XX
|
2022-01-26 11:09:35 +00:00
|
|
|
if pr["head"]["ref"] in ref:
|
2021-11-22 09:39:45 +00:00
|
|
|
return pr
|
2022-12-02 18:17:16 +00:00
|
|
|
our_prs.append(pr)
|
2024-01-04 15:35:09 +00:00
|
|
|
print(
|
|
|
|
f"Cannot find PR with required ref {ref}, sha {sha} - returning first one"
|
|
|
|
)
|
2022-12-02 18:17:16 +00:00
|
|
|
first_pr = our_prs[0]
|
2021-11-22 09:39:45 +00:00
|
|
|
return first_pr
|
|
|
|
except Exception as ex:
|
2024-01-04 15:35:09 +00:00
|
|
|
print(f"Cannot fetch PR info from commit {ref}, {sha}", ex)
|
2021-11-22 09:39:45 +00:00
|
|
|
return None
|
|
|
|
|
2021-11-29 14:17:17 +00:00
|
|
|
|
2021-09-15 12:59:39 +00:00
|
|
|
class PRInfo:
|
2022-01-18 14:43:35 +00:00
|
|
|
default_event = {
|
2022-01-26 11:09:35 +00:00
|
|
|
"commits": 1,
|
2022-11-25 12:58:27 +00:00
|
|
|
"head_commit": {"message": "commit_message"},
|
2022-01-26 11:09:35 +00:00
|
|
|
"before": "HEAD~",
|
|
|
|
"after": "HEAD",
|
|
|
|
"ref": None,
|
|
|
|
}
|
|
|
|
|
|
|
|
def __init__(
|
|
|
|
self,
|
|
|
|
github_event=None,
|
|
|
|
need_orgs=False,
|
|
|
|
need_changed_files=False,
|
2022-01-26 13:54:11 +00:00
|
|
|
pr_event_from_api=False,
|
2022-01-26 11:09:35 +00:00
|
|
|
):
|
2021-11-26 14:00:09 +00:00
|
|
|
if not github_event:
|
|
|
|
if GITHUB_EVENT_PATH:
|
2022-01-26 11:09:35 +00:00
|
|
|
with open(GITHUB_EVENT_PATH, "r", encoding="utf-8") as event_file:
|
2021-11-26 14:00:09 +00:00
|
|
|
github_event = json.load(event_file)
|
|
|
|
else:
|
2022-01-18 14:43:35 +00:00
|
|
|
github_event = PRInfo.default_event.copy()
|
2021-11-26 14:00:09 +00:00
|
|
|
self.event = github_event
|
2022-07-01 13:06:56 +00:00
|
|
|
self.changed_files = set() # type: Set[str]
|
2023-12-20 20:28:54 +00:00
|
|
|
self.changed_files_requested = False
|
2022-01-13 11:08:31 +00:00
|
|
|
self.body = ""
|
2023-09-06 18:57:17 +00:00
|
|
|
self.diff_urls = [] # type: List[str]
|
2022-11-25 12:58:27 +00:00
|
|
|
# release_pr and merged_pr are used for docker images additional cache
|
2022-09-14 16:23:37 +00:00
|
|
|
self.release_pr = 0
|
2022-11-25 12:58:27 +00:00
|
|
|
self.merged_pr = 0
|
2024-01-18 13:14:38 +00:00
|
|
|
self.event_type = EventType.UNKNOWN
|
2022-11-05 13:45:18 +00:00
|
|
|
ref = github_event.get("ref", "refs/heads/master")
|
2022-01-26 11:09:35 +00:00
|
|
|
if ref and ref.startswith("refs/heads/"):
|
2022-01-12 11:29:37 +00:00
|
|
|
ref = ref[11:]
|
2021-12-24 18:22:29 +00:00
|
|
|
|
|
|
|
# workflow completed event, used for PRs only
|
2022-01-26 11:09:35 +00:00
|
|
|
if "action" in github_event and github_event["action"] == "completed":
|
2023-09-06 18:57:17 +00:00
|
|
|
self.sha = github_event["workflow_run"]["head_sha"] # type: str
|
2023-04-28 16:26:50 +00:00
|
|
|
prs_for_sha = get_gh_api(
|
2022-01-26 11:09:35 +00:00
|
|
|
f"https://api.github.com/repos/{GITHUB_REPOSITORY}/commits/{self.sha}"
|
2022-01-26 13:54:11 +00:00
|
|
|
"/pulls",
|
|
|
|
sleep=RETRY_SLEEP,
|
2022-01-26 11:09:35 +00:00
|
|
|
).json()
|
2021-12-24 18:22:29 +00:00
|
|
|
if len(prs_for_sha) != 0:
|
2022-01-26 11:09:35 +00:00
|
|
|
github_event["pull_request"] = prs_for_sha[0]
|
2021-12-24 18:22:29 +00:00
|
|
|
|
2022-01-26 11:09:35 +00:00
|
|
|
if "pull_request" in github_event: # pull request and other similar events
|
2024-01-18 13:14:38 +00:00
|
|
|
self.event_type = EventType.PULL_REQUEST
|
2023-09-06 18:57:17 +00:00
|
|
|
self.number = github_event["pull_request"]["number"] # type: int
|
2022-01-26 13:54:11 +00:00
|
|
|
if pr_event_from_api:
|
2022-04-19 07:45:40 +00:00
|
|
|
try:
|
2023-04-28 16:26:50 +00:00
|
|
|
response = get_gh_api(
|
2022-04-19 07:45:40 +00:00
|
|
|
f"https://api.github.com/repos/{GITHUB_REPOSITORY}"
|
|
|
|
f"/pulls/{self.number}",
|
|
|
|
sleep=RETRY_SLEEP,
|
|
|
|
)
|
|
|
|
github_event["pull_request"] = response.json()
|
|
|
|
except Exception as e:
|
|
|
|
logging.warning(
|
|
|
|
"Unable to get pull request event %s from API, "
|
|
|
|
"fallback to received event. Exception: %s",
|
|
|
|
self.number,
|
|
|
|
e,
|
|
|
|
)
|
2022-01-26 13:54:11 +00:00
|
|
|
|
2022-01-26 11:09:35 +00:00
|
|
|
if "after" in github_event:
|
|
|
|
self.sha = github_event["after"]
|
2021-10-29 15:01:29 +00:00
|
|
|
else:
|
2022-01-26 11:09:35 +00:00
|
|
|
self.sha = github_event["pull_request"]["head"]["sha"]
|
2021-10-29 15:01:29 +00:00
|
|
|
|
2021-11-26 14:00:09 +00:00
|
|
|
repo_prefix = f"{GITHUB_SERVER_URL}/{GITHUB_REPOSITORY}"
|
2022-03-24 14:37:53 +00:00
|
|
|
self.task_url = GITHUB_RUN_URL
|
2021-11-19 14:47:04 +00:00
|
|
|
|
2021-11-26 14:00:09 +00:00
|
|
|
self.repo_full_name = GITHUB_REPOSITORY
|
2021-11-19 14:47:04 +00:00
|
|
|
self.commit_html_url = f"{repo_prefix}/commits/{self.sha}"
|
|
|
|
self.pr_html_url = f"{repo_prefix}/pull/{self.number}"
|
|
|
|
|
2022-09-26 10:52:12 +00:00
|
|
|
# master or backport/xx.x/xxxxx - where the PR will be merged
|
2023-09-06 18:57:17 +00:00
|
|
|
self.base_ref = github_event["pull_request"]["base"]["ref"] # type: str
|
2022-09-26 10:52:12 +00:00
|
|
|
# ClickHouse/ClickHouse
|
2023-09-06 18:57:17 +00:00
|
|
|
self.base_name = github_event["pull_request"]["base"]["repo"][
|
|
|
|
"full_name"
|
|
|
|
] # type: str
|
2022-09-26 10:52:12 +00:00
|
|
|
# any_branch-name - the name of working branch name
|
2023-09-06 18:57:17 +00:00
|
|
|
self.head_ref = github_event["pull_request"]["head"]["ref"] # type: str
|
2022-09-26 10:52:12 +00:00
|
|
|
# UserName/ClickHouse or ClickHouse/ClickHouse
|
2023-09-06 18:57:17 +00:00
|
|
|
self.head_name = github_event["pull_request"]["head"]["repo"][
|
|
|
|
"full_name"
|
|
|
|
] # type: str
|
2022-01-26 11:09:35 +00:00
|
|
|
self.body = github_event["pull_request"]["body"]
|
2022-01-26 13:54:11 +00:00
|
|
|
self.labels = {
|
|
|
|
label["name"] for label in github_event["pull_request"]["labels"]
|
2022-11-10 16:11:23 +00:00
|
|
|
} # type: Set[str]
|
2021-12-22 07:54:50 +00:00
|
|
|
|
2023-09-06 18:57:17 +00:00
|
|
|
self.user_login = github_event["pull_request"]["user"]["login"] # type: str
|
|
|
|
self.user_orgs = set() # type: Set[str]
|
2021-10-29 15:01:29 +00:00
|
|
|
if need_orgs:
|
2023-04-28 16:26:50 +00:00
|
|
|
user_orgs_response = get_gh_api(
|
2022-01-26 13:54:11 +00:00
|
|
|
github_event["pull_request"]["user"]["organizations_url"],
|
|
|
|
sleep=RETRY_SLEEP,
|
2022-01-26 11:09:35 +00:00
|
|
|
)
|
2021-10-29 15:01:29 +00:00
|
|
|
if user_orgs_response.ok:
|
|
|
|
response_json = user_orgs_response.json()
|
2022-01-26 11:09:35 +00:00
|
|
|
self.user_orgs = set(org["id"] for org in response_json)
|
2021-10-29 15:01:29 +00:00
|
|
|
|
2023-10-20 15:19:46 +00:00
|
|
|
self.diff_urls.append(self.compare_pr_url(github_event["pull_request"]))
|
2023-10-17 08:18:17 +00:00
|
|
|
|
2022-01-26 11:09:35 +00:00
|
|
|
elif "commits" in github_event:
|
2024-01-18 13:14:38 +00:00
|
|
|
self.event_type = EventType.PUSH
|
2022-11-25 12:58:27 +00:00
|
|
|
# `head_commit` always comes with `commits`
|
2023-09-06 18:57:17 +00:00
|
|
|
commit_message = github_event["head_commit"]["message"] # type: str
|
2022-11-25 12:58:27 +00:00
|
|
|
if commit_message.startswith("Merge pull request #"):
|
|
|
|
merged_pr = commit_message.split(maxsplit=4)[3]
|
|
|
|
try:
|
|
|
|
self.merged_pr = int(merged_pr[1:])
|
|
|
|
except ValueError:
|
|
|
|
logging.error("Failed to convert %s to integer", merged_pr)
|
2022-01-26 11:09:35 +00:00
|
|
|
self.sha = github_event["after"]
|
|
|
|
pull_request = get_pr_for_commit(self.sha, github_event["ref"])
|
2021-11-26 14:00:09 +00:00
|
|
|
repo_prefix = f"{GITHUB_SERVER_URL}/{GITHUB_REPOSITORY}"
|
2022-03-24 14:37:53 +00:00
|
|
|
self.task_url = GITHUB_RUN_URL
|
2021-11-19 14:47:04 +00:00
|
|
|
self.commit_html_url = f"{repo_prefix}/commits/{self.sha}"
|
2021-11-26 14:00:09 +00:00
|
|
|
self.repo_full_name = GITHUB_REPOSITORY
|
2022-01-26 11:09:35 +00:00
|
|
|
if pull_request is None or pull_request["state"] == "closed":
|
2022-01-04 13:33:50 +00:00
|
|
|
# it's merged PR to master
|
2021-11-22 09:39:45 +00:00
|
|
|
self.number = 0
|
2022-11-10 16:11:23 +00:00
|
|
|
self.labels = set()
|
2022-01-12 11:29:37 +00:00
|
|
|
self.pr_html_url = f"{repo_prefix}/commits/{ref}"
|
|
|
|
self.base_ref = ref
|
2021-11-22 09:39:45 +00:00
|
|
|
self.base_name = self.repo_full_name
|
2022-01-12 11:29:37 +00:00
|
|
|
self.head_ref = ref
|
2021-11-22 09:39:45 +00:00
|
|
|
self.head_name = self.repo_full_name
|
2022-07-01 13:30:21 +00:00
|
|
|
self.diff_urls.append(
|
2023-10-20 15:19:46 +00:00
|
|
|
self.compare_url(github_event["before"], self.sha)
|
2022-01-26 11:09:35 +00:00
|
|
|
)
|
2021-11-22 09:39:45 +00:00
|
|
|
else:
|
2022-02-16 12:31:24 +00:00
|
|
|
self.number = pull_request["number"]
|
2022-01-26 13:54:11 +00:00
|
|
|
self.labels = {label["name"] for label in pull_request["labels"]}
|
2022-01-26 11:09:35 +00:00
|
|
|
|
|
|
|
self.base_ref = pull_request["base"]["ref"]
|
|
|
|
self.base_name = pull_request["base"]["repo"]["full_name"]
|
|
|
|
self.head_ref = pull_request["head"]["ref"]
|
|
|
|
self.head_name = pull_request["head"]["repo"]["full_name"]
|
|
|
|
self.pr_html_url = pull_request["html_url"]
|
|
|
|
if "pr-backport" in self.labels:
|
2022-07-01 13:30:21 +00:00
|
|
|
# head1...head2 gives changes in head2 since merge base
|
|
|
|
# Thag's why we need {self.head_ref}...master to get
|
|
|
|
# files changed in upstream AND master...{self.head_ref}
|
|
|
|
# to get files, changed in current HEAD
|
|
|
|
self.diff_urls.append(
|
2023-10-20 15:19:46 +00:00
|
|
|
self.compare_url(
|
|
|
|
pull_request["base"]["repo"]["default_branch"],
|
|
|
|
pull_request["head"]["label"],
|
|
|
|
)
|
2022-01-26 11:09:35 +00:00
|
|
|
)
|
2022-07-01 13:30:21 +00:00
|
|
|
self.diff_urls.append(
|
2023-10-20 15:19:46 +00:00
|
|
|
self.compare_url(
|
|
|
|
pull_request["head"]["label"],
|
|
|
|
pull_request["base"]["repo"]["default_branch"],
|
|
|
|
)
|
2022-07-01 13:30:21 +00:00
|
|
|
)
|
2022-07-01 14:45:00 +00:00
|
|
|
# Get release PR number.
|
|
|
|
self.release_pr = get_pr_for_commit(self.base_ref, self.base_ref)[
|
|
|
|
"number"
|
|
|
|
]
|
2021-10-29 15:01:29 +00:00
|
|
|
else:
|
2023-10-20 15:19:46 +00:00
|
|
|
self.diff_urls.append(self.compare_pr_url(pull_request))
|
2022-07-05 10:10:33 +00:00
|
|
|
if "release" in self.labels:
|
|
|
|
# For release PRs we must get not only files changed in the PR
|
|
|
|
# itself, but as well files changed since we branched out
|
|
|
|
self.diff_urls.append(
|
2023-10-20 15:19:46 +00:00
|
|
|
self.compare_url(
|
|
|
|
pull_request["head"]["label"],
|
|
|
|
pull_request["base"]["repo"]["default_branch"],
|
|
|
|
)
|
2022-07-05 10:10:33 +00:00
|
|
|
)
|
2021-10-31 18:08:38 +00:00
|
|
|
else:
|
2024-01-16 18:12:09 +00:00
|
|
|
if "schedule" in github_event:
|
2024-01-18 13:14:38 +00:00
|
|
|
self.event_type = EventType.SCHEDULE
|
|
|
|
else:
|
|
|
|
# assume this is a dispatch
|
|
|
|
self.event_type = EventType.DISPATCH
|
2022-05-23 11:10:08 +00:00
|
|
|
print("event.json does not match pull_request or push:")
|
2021-12-24 17:57:32 +00:00
|
|
|
print(json.dumps(github_event, sort_keys=True, indent=4))
|
2023-09-06 18:57:17 +00:00
|
|
|
self.sha = os.getenv(
|
|
|
|
"GITHUB_SHA", "0000000000000000000000000000000000000000"
|
|
|
|
)
|
2021-12-24 18:02:40 +00:00
|
|
|
self.number = 0
|
2022-11-10 16:11:23 +00:00
|
|
|
self.labels = set()
|
2021-12-24 18:02:40 +00:00
|
|
|
repo_prefix = f"{GITHUB_SERVER_URL}/{GITHUB_REPOSITORY}"
|
2022-03-24 14:37:53 +00:00
|
|
|
self.task_url = GITHUB_RUN_URL
|
2021-12-24 18:02:40 +00:00
|
|
|
self.commit_html_url = f"{repo_prefix}/commits/{self.sha}"
|
|
|
|
self.repo_full_name = GITHUB_REPOSITORY
|
2022-01-12 11:29:37 +00:00
|
|
|
self.pr_html_url = f"{repo_prefix}/commits/{ref}"
|
|
|
|
self.base_ref = ref
|
2021-12-24 18:02:40 +00:00
|
|
|
self.base_name = self.repo_full_name
|
2022-01-12 11:29:37 +00:00
|
|
|
self.head_ref = ref
|
2021-12-24 18:02:40 +00:00
|
|
|
self.head_name = self.repo_full_name
|
|
|
|
|
2021-11-26 14:00:09 +00:00
|
|
|
if need_changed_files:
|
|
|
|
self.fetch_changed_files()
|
2021-10-31 18:08:38 +00:00
|
|
|
|
2024-01-04 15:35:09 +00:00
|
|
|
def is_master(self) -> bool:
|
2024-02-02 17:10:47 +00:00
|
|
|
return self.number == 0 and self.head_ref == "master"
|
|
|
|
|
2024-03-01 00:48:31 +00:00
|
|
|
def is_release(self) -> bool:
|
|
|
|
return self.number == 0 and bool(
|
|
|
|
re.match(r"^2[1-9]\.[1-9][0-9]*$", self.head_ref)
|
|
|
|
)
|
|
|
|
|
2024-02-02 17:10:47 +00:00
|
|
|
def is_release_branch(self) -> bool:
|
|
|
|
return self.number == 0
|
2024-01-04 15:35:09 +00:00
|
|
|
|
2024-01-18 13:14:38 +00:00
|
|
|
def is_scheduled(self):
|
|
|
|
return self.event_type == EventType.SCHEDULE
|
|
|
|
|
|
|
|
def is_dispatched(self):
|
|
|
|
return self.event_type == EventType.DISPATCH
|
|
|
|
|
2023-10-20 15:19:46 +00:00
|
|
|
def compare_pr_url(self, pr_object: dict) -> str:
|
|
|
|
return self.compare_url(pr_object["base"]["label"], pr_object["head"]["label"])
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def compare_url(first: str, second: str) -> str:
|
2024-01-24 13:26:50 +00:00
|
|
|
"""the first and second are URL encoded to not fail on '#' and other symbols"""
|
2023-10-20 15:19:46 +00:00
|
|
|
return (
|
|
|
|
"https://api.github.com/repos/"
|
2024-01-24 13:26:50 +00:00
|
|
|
f"{GITHUB_REPOSITORY}/compare/{quote(first)}...{quote(second)}"
|
2023-10-20 15:19:46 +00:00
|
|
|
)
|
|
|
|
|
2021-11-26 14:00:09 +00:00
|
|
|
def fetch_changed_files(self):
|
2022-07-01 13:30:21 +00:00
|
|
|
if not getattr(self, "diff_urls", False):
|
|
|
|
raise TypeError("The event does not have diff URLs")
|
|
|
|
|
|
|
|
for diff_url in self.diff_urls:
|
2023-10-17 14:29:20 +00:00
|
|
|
response = get_gh_api(
|
|
|
|
diff_url,
|
|
|
|
sleep=RETRY_SLEEP,
|
|
|
|
headers={"Accept": "application/vnd.github.v3.diff"},
|
|
|
|
)
|
2022-07-01 13:30:21 +00:00
|
|
|
response.raise_for_status()
|
2023-10-17 14:29:20 +00:00
|
|
|
diff_object = PatchSet(response.text)
|
|
|
|
self.changed_files.update({f.path for f in diff_object})
|
2023-12-20 20:28:54 +00:00
|
|
|
self.changed_files_requested = True
|
2022-07-01 13:06:56 +00:00
|
|
|
print(f"Fetched info about {len(self.changed_files)} changed files")
|
2021-09-15 16:32:17 +00:00
|
|
|
|
2021-09-15 13:56:03 +00:00
|
|
|
def get_dict(self):
|
|
|
|
return {
|
2022-01-26 11:09:35 +00:00
|
|
|
"sha": self.sha,
|
|
|
|
"number": self.number,
|
|
|
|
"labels": self.labels,
|
|
|
|
"user_login": self.user_login,
|
|
|
|
"user_orgs": self.user_orgs,
|
2021-09-15 13:56:03 +00:00
|
|
|
}
|
2021-10-21 15:32:15 +00:00
|
|
|
|
2023-08-14 12:19:08 +00:00
|
|
|
def has_changes_in_documentation(self) -> bool:
|
2023-12-20 20:28:54 +00:00
|
|
|
if not self.changed_files_requested:
|
|
|
|
self.fetch_changed_files()
|
|
|
|
|
|
|
|
if not self.changed_files:
|
2021-10-29 09:58:25 +00:00
|
|
|
return True
|
|
|
|
|
|
|
|
for f in self.changed_files:
|
|
|
|
_, ext = os.path.splitext(f)
|
2023-08-14 12:19:08 +00:00
|
|
|
path_in_docs = f.startswith("docs/")
|
2022-01-26 11:09:35 +00:00
|
|
|
if (
|
2023-08-14 12:19:08 +00:00
|
|
|
ext in DIFF_IN_DOCUMENTATION_EXT and path_in_docs
|
2022-01-26 11:09:35 +00:00
|
|
|
) or "docker/docs" in f:
|
2021-10-29 09:58:25 +00:00
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
2023-12-18 16:06:10 +00:00
|
|
|
def has_changes_in_documentation_only(self) -> bool:
|
|
|
|
"""
|
|
|
|
checks if changes are docs related without other changes
|
|
|
|
FIXME: avoid hardcoding filenames here
|
|
|
|
"""
|
2023-12-20 20:28:54 +00:00
|
|
|
if not self.changed_files_requested:
|
|
|
|
self.fetch_changed_files()
|
|
|
|
|
2023-12-18 16:06:10 +00:00
|
|
|
if not self.changed_files:
|
2023-12-20 20:28:54 +00:00
|
|
|
# if no changes at all return False
|
2023-12-18 16:06:10 +00:00
|
|
|
return False
|
|
|
|
|
|
|
|
for f in self.changed_files:
|
|
|
|
_, ext = os.path.splitext(f)
|
|
|
|
path_in_docs = f.startswith("docs/")
|
|
|
|
if not (
|
|
|
|
(ext in DIFF_IN_DOCUMENTATION_EXT and path_in_docs)
|
|
|
|
or "docker/docs" in f
|
|
|
|
or "docs_check.py" in f
|
2024-02-01 18:49:03 +00:00
|
|
|
or "aspell-dict.txt" in f
|
2023-12-18 16:06:10 +00:00
|
|
|
or ext == ".md"
|
|
|
|
):
|
|
|
|
return False
|
|
|
|
return True
|
|
|
|
|
2022-03-29 17:28:18 +00:00
|
|
|
def has_changes_in_submodules(self):
|
2023-12-20 20:28:54 +00:00
|
|
|
if not self.changed_files_requested:
|
|
|
|
self.fetch_changed_files()
|
|
|
|
|
|
|
|
if not self.changed_files:
|
2022-03-29 17:28:18 +00:00
|
|
|
return True
|
|
|
|
|
|
|
|
for f in self.changed_files:
|
2022-06-03 11:59:34 +00:00
|
|
|
if "contrib/" in f:
|
2022-03-29 17:28:18 +00:00
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
2023-09-13 16:56:14 +00:00
|
|
|
|
2021-10-21 15:32:15 +00:00
|
|
|
class FakePRInfo:
|
|
|
|
def __init__(self):
|
|
|
|
self.number = 11111
|
|
|
|
self.sha = "xxxxxxxxxxxxxxxxxx"
|