Merge branch 'master' into chesema-dedup-matview

This commit is contained in:
Sema Checherinda 2024-05-28 18:57:19 +02:00
commit 78579af682
6 changed files with 53 additions and 50 deletions

View File

@ -11,6 +11,7 @@ tests/ci/cancel_and_rerun_workflow_lambda/app.py
- Backward Incompatible Change
- Build/Testing/Packaging Improvement
- Documentation (changelog entry is not required)
- Critical Bug Fix (crash, LOGICAL_ERROR, data loss, RBAC)
- Bug Fix (user-visible misbehavior in an official stable release)
- CI Fix or Improvement (changelog entry is not required)
- Not for changelog (changelog entry is not required)

View File

@ -9,7 +9,7 @@ from threading import Thread
from typing import Any, Dict, List, Optional
import requests
from lambda_shared.pr import Labels, check_pr_description
from lambda_shared.pr import Labels
from lambda_shared.token import get_cached_access_token
NEED_RERUN_OR_CANCELL_WORKFLOWS = {
@ -321,21 +321,21 @@ def main(event):
return
if action == "edited":
print("PR is edited, check if the body is correct")
error, _ = check_pr_description(
pull_request["body"], pull_request["base"]["repo"]["full_name"]
)
if error:
print(
f"The PR's body is wrong, is going to comment it. The error is: {error}"
)
post_json = {
"body": "This is an automatic comment. The PR descriptions does not "
f"match the [template]({pull_request['base']['repo']['html_url']}/"
"blob/master/.github/PULL_REQUEST_TEMPLATE.md?plain=1).\n\n"
f"Please, edit it accordingly.\n\nThe error is: {error}"
}
_exec_post_with_retry(pull_request["comments_url"], token, json=post_json)
print("PR is edited - do nothing")
# error, _ = check_pr_description(
# pull_request["body"], pull_request["base"]["repo"]["full_name"]
# )
# if error:
# print(
# f"The PR's body is wrong, is going to comment it. The error is: {error}"
# )
# post_json = {
# "body": "This is an automatic comment. The PR descriptions does not "
# f"match the [template]({pull_request['base']['repo']['html_url']}/"
# "blob/master/.github/PULL_REQUEST_TEMPLATE.md?plain=1).\n\n"
# f"Please, edit it accordingly.\n\nThe error is: {error}"
# }
# _exec_post_with_retry(pull_request["comments_url"], token, json=post_json)
return
if action == "synchronize":

View File

@ -245,6 +245,10 @@ close it.
)
self.cherrypick_pr.add_to_labels(Labels.PR_CHERRYPICK)
self.cherrypick_pr.add_to_labels(Labels.DO_NOT_TEST)
if Labels.PR_CRITICAL_BUGFIX in [label.name for label in self.pr.labels]:
self.cherrypick_pr.add_to_labels(Labels.PR_CRITICAL_BUGFIX)
elif Labels.PR_BUGFIX in [label.name for label in self.pr.labels]:
self.cherrypick_pr.add_to_labels(Labels.PR_BUGFIX)
self._assign_new_pr(self.cherrypick_pr)
# update cherrypick PR to get the state for PR.mergable
self.cherrypick_pr.update()
@ -280,6 +284,10 @@ close it.
head=self.backport_branch,
)
self.backport_pr.add_to_labels(Labels.PR_BACKPORT)
if Labels.PR_CRITICAL_BUGFIX in [label.name for label in self.pr.labels]:
self.backport_pr.add_to_labels(Labels.PR_CRITICAL_BUGFIX)
elif Labels.PR_BUGFIX in [label.name for label in self.pr.labels]:
self.backport_pr.add_to_labels(Labels.PR_BUGFIX)
self._assign_new_pr(self.backport_pr)
def ping_cherry_pick_assignees(self, dry_run: bool) -> None:

View File

@ -50,6 +50,8 @@ TRUSTED_CONTRIBUTORS = {
class Labels:
PR_BUGFIX = "pr-bugfix"
PR_CRITICAL_BUGFIX = "pr-critical-bugfix"
CAN_BE_TESTED = "can be tested"
DO_NOT_TEST = "do not test"
MUST_BACKPORT = "pr-must-backport"
@ -68,8 +70,8 @@ class Labels:
RELEASE_LTS = "release-lts"
SUBMODULE_CHANGED = "submodule changed"
# pr-bugfix autoport can lead to issues in releases, let's do ci fixes only
AUTO_BACKPORT = {"pr-ci"}
# automatic backport for critical bug fixes
AUTO_BACKPORT = {"pr-critical-bugfix"}
# Descriptions are used in .github/PULL_REQUEST_TEMPLATE.md, keep comments there
@ -84,6 +86,7 @@ LABEL_CATEGORIES = {
"Bug Fix (user-visible misbehaviour in official stable or prestable release)",
"Bug Fix (user-visible misbehavior in official stable or prestable release)",
],
"pr-critical-bugfix": ["Critical Bug Fix (crash, LOGICAL_ERROR, data loss, RBAC)"],
"pr-build": [
"Build/Testing/Packaging Improvement",
"Build Improvement",

View File

@ -401,40 +401,30 @@ class BuildResult:
@classmethod
def load_any(cls, build_name: str, pr_number: int, head_ref: str): # type: ignore
"""
loads build report from one of all available report files (matching the job digest)
with the following priority:
1. report for the current PR @pr_number (might happen in PR' wf with or without job reuse)
2. report for the current branch @head_ref (might happen in release/master' wf with or without job reuse)
3. report for master branch (might happen in any workflow in case of job reuse)
4. any other report (job reuse from another PR, if master report is not available yet)
loads report from suitable report file with the following priority:
1. report from PR with the same @pr_number
2. report from branch with the same @head_ref
3. report from the master
4. any other report
"""
pr_report = None
ref_report = None
master_report = None
any_report = None
reports = []
for file in Path(REPORT_PATH).iterdir():
if f"{build_name}.json" in file.name:
any_report = file
if "_master_" in file.name:
master_report = file
elif f"_{head_ref}_" in file.name:
ref_report = file
elif pr_number and f"_{pr_number}_" in file.name:
pr_report = file
if not any_report:
reports.append(file)
if not reports:
return None
if pr_report:
file_path = pr_report
elif ref_report:
file_path = ref_report
elif master_report:
file_path = master_report
else:
file_path = any_report
return cls.load_from_file(file_path)
file_path = None
for file in reports:
if pr_number and f"_{pr_number}_" in file.name:
file_path = file
break
if f"_{head_ref}_" in file.name:
file_path = file
break
if "_master_" in file.name:
file_path = file
break
return cls.load_from_file(file_path or reports[-1])
@classmethod
def load_from_file(cls, file: Union[Path, str]): # type: ignore

View File

@ -25,6 +25,7 @@ categories_preferred_order = (
"New Feature",
"Performance Improvement",
"Improvement",
"Critical Bug Fix",
"Bug Fix",
"Build/Testing/Packaging Improvement",
"Other",
@ -112,7 +113,7 @@ def get_descriptions(prs: PullRequests) -> Dict[str, List[Description]]:
in_changelog = merge_commit in SHA_IN_CHANGELOG
if in_changelog:
desc = generate_description(pr, repos[repo_name])
if desc is not None:
if desc:
if desc.category not in descriptions:
descriptions[desc.category] = []
descriptions[desc.category].append(desc)
@ -187,7 +188,7 @@ def parse_args() -> argparse.Namespace:
# This function mirrors the PR description checks in ClickhousePullRequestTrigger.
# Returns False if the PR should not be mentioned changelog.
# Returns None if the PR should not be mentioned in changelog.
def generate_description(item: PullRequest, repo: Repository) -> Optional[Description]:
backport_number = item.number
if item.head.ref.startswith("backport/"):