mirror of
https://github.com/ClickHouse/ClickHouse.git
synced 2024-11-10 09:32:06 +00:00
Merge pull request #33591 from ClickHouse/check-description
Check description
This commit is contained in:
commit
9c8b58a7f0
@ -41,6 +41,7 @@ class PRInfo:
|
||||
github_event = {'commits': 1, 'after': 'HEAD', 'ref': None}
|
||||
self.event = github_event
|
||||
self.changed_files = set([])
|
||||
self.body = ""
|
||||
ref = github_event.get("ref", "refs/head/master")
|
||||
if ref.startswith('refs/heads/'):
|
||||
ref = ref[11:]
|
||||
@ -70,6 +71,7 @@ class PRInfo:
|
||||
self.base_name = github_event['pull_request']['base']['repo']['full_name']
|
||||
self.head_ref = github_event['pull_request']['head']['ref']
|
||||
self.head_name = github_event['pull_request']['head']['repo']['full_name']
|
||||
self.body = github_event['pull_request']['body']
|
||||
|
||||
if labels_from_api:
|
||||
response = requests.get(f"https://api.github.com/repos/{GITHUB_REPOSITORY}/issues/{self.number}/labels")
|
||||
|
@ -1,6 +1,7 @@
|
||||
#!/usr/bin/env python3
|
||||
import sys
|
||||
import logging
|
||||
import re
|
||||
from github import Github
|
||||
|
||||
from env_helper import GITHUB_RUN_ID, GITHUB_REPOSITORY, GITHUB_SERVER_URL
|
||||
@ -8,10 +9,10 @@ from pr_info import PRInfo
|
||||
from get_robot_token import get_best_robot_token
|
||||
from commit_status_helper import get_commit
|
||||
|
||||
NAME = 'Run Check (actions)'
|
||||
NAME = "Run Check (actions)"
|
||||
|
||||
TRUSTED_ORG_IDS = {
|
||||
7409213, # yandex
|
||||
7409213, # yandex
|
||||
28471076, # altinity
|
||||
54801242, # clickhouse
|
||||
}
|
||||
@ -22,55 +23,58 @@ DO_NOT_TEST_LABEL = "do not test"
|
||||
# Individual trusted contirbutors who are not in any trusted organization.
|
||||
# Can be changed in runtime: we will append users that we learned to be in
|
||||
# a trusted org, to save GitHub API calls.
|
||||
TRUSTED_CONTRIBUTORS = {e.lower() for e in [
|
||||
"achimbab",
|
||||
"adevyatova ", # DOCSUP
|
||||
"Algunenano", # Raúl Marín, Tinybird
|
||||
"AnaUvarova", # DOCSUP
|
||||
"anauvarova", # technical writer, Yandex
|
||||
"annvsh", # technical writer, Yandex
|
||||
"atereh", # DOCSUP
|
||||
"azat",
|
||||
"bharatnc", # Newbie, but already with many contributions.
|
||||
"bobrik", # Seasoned contributor, CloundFlare
|
||||
"BohuTANG",
|
||||
"codyrobert", # Flickerbox engineer
|
||||
"cwurm", # Employee
|
||||
"damozhaeva", # DOCSUP
|
||||
"den-crane",
|
||||
"flickerbox-tom", # Flickerbox
|
||||
"gyuton", # technical writer, Yandex
|
||||
"hagen1778", # Roman Khavronenko, seasoned contributor
|
||||
"hczhcz",
|
||||
"hexiaoting", # Seasoned contributor
|
||||
"ildus", # adjust, ex-pgpro
|
||||
"javisantana", # a Spanish ClickHouse enthusiast, ex-Carto
|
||||
"ka1bi4", # DOCSUP
|
||||
"kirillikoff", # DOCSUP
|
||||
"kitaisreal", # Seasoned contributor
|
||||
"kreuzerkrieg",
|
||||
"lehasm", # DOCSUP
|
||||
"michon470", # DOCSUP
|
||||
"MyroTk", # Tester in Altinity
|
||||
"myrrc", # Michael Kot, Altinity
|
||||
"nikvas0",
|
||||
"nvartolomei",
|
||||
"olgarev", # DOCSUP
|
||||
"otrazhenia", # Yandex docs contractor
|
||||
"pdv-ru", # DOCSUP
|
||||
"podshumok", # cmake expert from QRator Labs
|
||||
"s-mx", # Maxim Sabyanin, former employee, present contributor
|
||||
"sevirov", # technical writer, Yandex
|
||||
"spongedu", # Seasoned contributor
|
||||
"ucasFL", # Amos Bird's friend
|
||||
"vdimir", # Employee
|
||||
"vzakaznikov",
|
||||
"YiuRULE",
|
||||
"zlobober", # Developer of YT
|
||||
"ilejn", # Arenadata, responsible for Kerberized Kafka
|
||||
"thomoco", # ClickHouse
|
||||
"BoloniniD", # Seasoned contributor, HSE
|
||||
]}
|
||||
TRUSTED_CONTRIBUTORS = {
|
||||
e.lower()
|
||||
for e in [
|
||||
"achimbab",
|
||||
"adevyatova ", # DOCSUP
|
||||
"Algunenano", # Raúl Marín, Tinybird
|
||||
"AnaUvarova", # DOCSUP
|
||||
"anauvarova", # technical writer, Yandex
|
||||
"annvsh", # technical writer, Yandex
|
||||
"atereh", # DOCSUP
|
||||
"azat",
|
||||
"bharatnc", # Newbie, but already with many contributions.
|
||||
"bobrik", # Seasoned contributor, CloundFlare
|
||||
"BohuTANG",
|
||||
"codyrobert", # Flickerbox engineer
|
||||
"cwurm", # Employee
|
||||
"damozhaeva", # DOCSUP
|
||||
"den-crane",
|
||||
"flickerbox-tom", # Flickerbox
|
||||
"gyuton", # technical writer, Yandex
|
||||
"hagen1778", # Roman Khavronenko, seasoned contributor
|
||||
"hczhcz",
|
||||
"hexiaoting", # Seasoned contributor
|
||||
"ildus", # adjust, ex-pgpro
|
||||
"javisantana", # a Spanish ClickHouse enthusiast, ex-Carto
|
||||
"ka1bi4", # DOCSUP
|
||||
"kirillikoff", # DOCSUP
|
||||
"kitaisreal", # Seasoned contributor
|
||||
"kreuzerkrieg",
|
||||
"lehasm", # DOCSUP
|
||||
"michon470", # DOCSUP
|
||||
"MyroTk", # Tester in Altinity
|
||||
"myrrc", # Michael Kot, Altinity
|
||||
"nikvas0",
|
||||
"nvartolomei",
|
||||
"olgarev", # DOCSUP
|
||||
"otrazhenia", # Yandex docs contractor
|
||||
"pdv-ru", # DOCSUP
|
||||
"podshumok", # cmake expert from QRator Labs
|
||||
"s-mx", # Maxim Sabyanin, former employee, present contributor
|
||||
"sevirov", # technical writer, Yandex
|
||||
"spongedu", # Seasoned contributor
|
||||
"ucasFL", # Amos Bird's friend
|
||||
"vdimir", # Employee
|
||||
"vzakaznikov",
|
||||
"YiuRULE",
|
||||
"zlobober", # Developer of YT
|
||||
"ilejn", # Arenadata, responsible for Kerberized Kafka
|
||||
"thomoco", # ClickHouse
|
||||
"BoloniniD", # Seasoned contributor, HSE
|
||||
]
|
||||
}
|
||||
|
||||
|
||||
def pr_is_by_trusted_user(pr_user_login, pr_user_orgs):
|
||||
@ -82,33 +86,123 @@ def pr_is_by_trusted_user(pr_user_login, pr_user_orgs):
|
||||
|
||||
for org_id in pr_user_orgs:
|
||||
if org_id in TRUSTED_ORG_IDS:
|
||||
logging.info("Org '%s' is trusted; will mark user %s as trusted", org_id, pr_user_login)
|
||||
logging.info(
|
||||
"Org '%s' is trusted; will mark user %s as trusted",
|
||||
org_id,
|
||||
pr_user_login,
|
||||
)
|
||||
return True
|
||||
logging.info("Org '%s' is not trusted", org_id)
|
||||
|
||||
return False
|
||||
|
||||
|
||||
# Returns whether we should look into individual checks for this PR. If not, it
|
||||
# can be skipped entirely.
|
||||
def should_run_checks_for_pr(pr_info):
|
||||
# Consider the labels and whether the user is trusted.
|
||||
print("Got labels", pr_info.labels)
|
||||
force_labels = set(['force tests']).intersection(pr_info.labels)
|
||||
force_labels = set(["force tests"]).intersection(pr_info.labels)
|
||||
if force_labels:
|
||||
return True, "Labeled '{}'".format(', '.join(force_labels))
|
||||
return True, "Labeled '{}'".format(", ".join(force_labels))
|
||||
|
||||
if 'do not test' in pr_info.labels:
|
||||
if "do not test" in pr_info.labels:
|
||||
return False, "Labeled 'do not test'"
|
||||
|
||||
if 'can be tested' not in pr_info.labels and not pr_is_by_trusted_user(pr_info.user_login, pr_info.user_orgs):
|
||||
if "can be tested" not in pr_info.labels and not pr_is_by_trusted_user(
|
||||
pr_info.user_login, pr_info.user_orgs
|
||||
):
|
||||
return False, "Needs 'can be tested' label"
|
||||
|
||||
if 'release' in pr_info.labels or 'pr-backport' in pr_info.labels or 'pr-cherrypick' in pr_info.labels:
|
||||
if (
|
||||
"release" in pr_info.labels
|
||||
or "pr-backport" in pr_info.labels
|
||||
or "pr-cherrypick" in pr_info.labels
|
||||
):
|
||||
return False, "Don't try new checks for release/backports/cherry-picks"
|
||||
|
||||
return True, "No special conditions apply"
|
||||
|
||||
|
||||
def check_pr_description(pr_info):
|
||||
description = pr_info.body
|
||||
|
||||
lines = [
|
||||
line
|
||||
for line in map(
|
||||
lambda x: x.strip(), description.split("\n") if description else []
|
||||
)
|
||||
]
|
||||
lines = [re.sub(r"\s+", " ", l) for l in lines]
|
||||
|
||||
category = ""
|
||||
entry = ""
|
||||
|
||||
i = 0
|
||||
while i < len(lines):
|
||||
if re.match(r"(?i)^[>*_ ]*change\s*log\s*category", lines[i]):
|
||||
i += 1
|
||||
if i >= len(lines):
|
||||
break
|
||||
# Can have one empty line between header and the category
|
||||
# itself. Filter it out.
|
||||
if not lines[i]:
|
||||
i += 1
|
||||
if i >= len(lines):
|
||||
break
|
||||
category = re.sub(r"^[-*\s]*", "", lines[i])
|
||||
i += 1
|
||||
|
||||
# Should not have more than one category. Require empty line
|
||||
# after the first found category.
|
||||
if i >= len(lines):
|
||||
break
|
||||
if lines[i]:
|
||||
second_category = re.sub(r"^[-*\s]*", "", lines[i])
|
||||
result_status = (
|
||||
"More than one changelog category specified: '"
|
||||
+ category
|
||||
+ "', '"
|
||||
+ second_category
|
||||
+ "'"
|
||||
)
|
||||
return result_status[:140]
|
||||
|
||||
elif re.match(
|
||||
r"(?i)^[>*_ ]*(short\s*description|change\s*log\s*entry)", lines[i]
|
||||
):
|
||||
i += 1
|
||||
# Can have one empty line between header and the entry itself.
|
||||
# Filter it out.
|
||||
if i < len(lines) and not lines[i]:
|
||||
i += 1
|
||||
# All following lines until empty one are the changelog entry.
|
||||
entry_lines = []
|
||||
while i < len(lines) and lines[i]:
|
||||
entry_lines.append(lines[i])
|
||||
i += 1
|
||||
entry = " ".join(entry_lines)
|
||||
# Don't accept changelog entries like '...'.
|
||||
entry = re.sub(r"[#>*_.\- ]", "", entry)
|
||||
else:
|
||||
i += 1
|
||||
|
||||
if not category:
|
||||
return "Changelog category is empty"
|
||||
|
||||
# Filter out the PR categories that are not for changelog.
|
||||
if re.match(
|
||||
r"(?i)doc|((non|in|not|un)[-\s]*significant)|(not[ ]*for[ ]*changelog)",
|
||||
category,
|
||||
):
|
||||
return ""
|
||||
|
||||
if not entry:
|
||||
return "Changelog entry required for category '{}'".format(category)
|
||||
|
||||
return ""
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
|
||||
@ -116,15 +210,40 @@ if __name__ == "__main__":
|
||||
can_run, description = should_run_checks_for_pr(pr_info)
|
||||
gh = Github(get_best_robot_token())
|
||||
commit = get_commit(gh, pr_info.sha)
|
||||
|
||||
description_report = check_pr_description(pr_info)[:139]
|
||||
if description_report:
|
||||
print("::notice ::Cannot run, description does not match the template")
|
||||
url = (
|
||||
f"{GITHUB_SERVER_URL}/{GITHUB_REPOSITORY}/"
|
||||
"blob/master/.github/PULL_REQUEST_TEMPLATE.md?plain=1"
|
||||
)
|
||||
commit.create_status(
|
||||
context=NAME,
|
||||
description=description_report,
|
||||
state="failure",
|
||||
target_url=url,
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
url = f"{GITHUB_SERVER_URL}/{GITHUB_REPOSITORY}/actions/runs/{GITHUB_RUN_ID}"
|
||||
if not can_run:
|
||||
print("::notice ::Cannot run")
|
||||
commit.create_status(context=NAME, description=description, state="failure", target_url=url)
|
||||
commit.create_status(
|
||||
context=NAME, description=description, state="failure", target_url=url
|
||||
)
|
||||
sys.exit(1)
|
||||
else:
|
||||
if 'pr-documentation' in pr_info.labels or 'pr-doc-fix' in pr_info.labels:
|
||||
commit.create_status(context=NAME, description="Skipping checks for documentation", state="success", target_url=url)
|
||||
if "pr-documentation" in pr_info.labels or "pr-doc-fix" in pr_info.labels:
|
||||
commit.create_status(
|
||||
context=NAME,
|
||||
description="Skipping checks for documentation",
|
||||
state="success",
|
||||
target_url=url,
|
||||
)
|
||||
print("::notice ::Can run, but it's documentation PR, skipping")
|
||||
else:
|
||||
print("::notice ::Can run")
|
||||
commit.create_status(context=NAME, description=description, state="pending", target_url=url)
|
||||
commit.create_status(
|
||||
context=NAME, description=description, state="pending", target_url=url
|
||||
)
|
||||
|
Loading…
Reference in New Issue
Block a user