Merge pull request #48468 from ClickHouse/status-comment

Implement status comment
This commit is contained in:
Mikhail f. Shiryaev 2023-04-29 09:29:27 +02:00 committed by GitHub
commit f7048bd27a
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
29 changed files with 573 additions and 221 deletions

View File

@ -9,19 +9,21 @@ from github import Github
from build_download_helper import get_build_name_for_check, read_build_urls
from clickhouse_helper import ClickHouseHelper, prepare_tests_results_for_clickhouse
from commit_status_helper import format_description, post_commit_status
from commit_status_helper import (
RerunHelper,
format_description,
get_commit,
post_commit_status,
)
from docker_pull_helper import get_image_with_version
from env_helper import (
GITHUB_REPOSITORY,
GITHUB_RUN_URL,
REPORTS_PATH,
REPO_COPY,
TEMP_PATH,
)
from get_robot_token import get_best_robot_token
from pr_info import PRInfo
from report import TestResult
from rerun_helper import RerunHelper
from s3_helper import S3Helper
from stopwatch import Stopwatch
@ -41,19 +43,12 @@ def get_run_command(pr_number, sha, download_url, workspace_path, image):
)
def get_commit(gh, commit_sha):
repo = gh.get_repo(GITHUB_REPOSITORY)
commit = repo.get_commit(commit_sha)
return commit
if __name__ == "__main__":
def main():
logging.basicConfig(level=logging.INFO)
stopwatch = Stopwatch()
temp_path = TEMP_PATH
repo_path = REPO_COPY
reports_path = REPORTS_PATH
check_name = sys.argv[1]
@ -64,8 +59,9 @@ if __name__ == "__main__":
pr_info = PRInfo()
gh = Github(get_best_robot_token(), per_page=100)
commit = get_commit(gh, pr_info.sha)
rerun_helper = RerunHelper(gh, pr_info, check_name)
rerun_helper = RerunHelper(commit, check_name)
if rerun_helper.is_already_finished_by_status():
logging.info("Check is already finished according to github status, exiting")
sys.exit(0)
@ -172,4 +168,8 @@ if __name__ == "__main__":
logging.info("Result: '%s', '%s', '%s'", status, description, report_url)
print(f"::notice ::Report url: {report_url}")
post_commit_status(gh, pr_info.sha, check_name, description, status, report_url)
post_commit_status(commit, status, report_url, description, check_name, pr_info)
if __name__ == "__main__":
main()

View File

@ -8,7 +8,7 @@ import os
from github import Github
from commit_status_helper import post_commit_status
from commit_status_helper import get_commit, post_commit_status
from get_robot_token import get_best_robot_token
from pr_info import PRInfo
from report import TestResults, TestResult
@ -81,13 +81,14 @@ def main(args):
)
gh = Github(get_best_robot_token(), per_page=100)
commit = get_commit(gh, pr_info.sha)
post_commit_status(
gh,
pr_info.sha,
check_name_with_group,
"" if is_ok else "Changed tests don't reproduce the bug",
commit,
"success" if is_ok else "error",
report_url,
"" if is_ok else "Changed tests don't reproduce the bug",
check_name_with_group,
pr_info,
)

View File

@ -9,7 +9,12 @@ import time
from typing import List, Tuple
from ci_config import CI_CONFIG, BuildConfig
from commit_status_helper import get_commit_filtered_statuses, get_commit
from commit_status_helper import (
NotSet,
get_commit_filtered_statuses,
get_commit,
post_commit_status,
)
from docker_pull_helper import get_image_with_version
from env_helper import (
GITHUB_JOB,
@ -232,10 +237,10 @@ def upload_master_static_binaries(
print(f"::notice ::Binary static URL: {url}")
def mark_failed_reports_pending(build_name: str, sha: str) -> None:
def mark_failed_reports_pending(build_name: str, pr_info: PRInfo) -> None:
try:
gh = GitHub(get_best_robot_token())
commit = get_commit(gh, sha)
commit = get_commit(gh, pr_info.sha)
statuses = get_commit_filtered_statuses(commit)
report_status = [
name
@ -248,8 +253,13 @@ def mark_failed_reports_pending(build_name: str, sha: str) -> None:
"Commit already have failed status for '%s', setting it to 'pending'",
report_status,
)
commit.create_status(
"pending", status.url, "Set to pending on rerun", report_status
post_commit_status(
commit,
"pending",
status.target_url or NotSet,
"Set to pending on rerun",
report_status,
pr_info,
)
except: # we do not care about any exception here
logging.info("Failed to get or mark the reports status as pending, continue")
@ -285,7 +295,7 @@ def main():
check_for_success_run(s3_helper, s3_path_prefix, build_name, build_config)
# If it's a latter running, we need to mark possible failed status
mark_failed_reports_pending(build_name, pr_info.sha)
mark_failed_reports_pending(build_name, pr_info)
docker_image = get_image_with_version(IMAGES_PATH, IMAGE_NAME)
image_version = docker_image.version

View File

@ -22,11 +22,12 @@ from s3_helper import S3Helper
from get_robot_token import get_best_robot_token
from pr_info import NeedsDataType, PRInfo
from commit_status_helper import (
RerunHelper,
get_commit,
post_commit_status,
update_mergeable_check,
)
from ci_config import CI_CONFIG
from rerun_helper import RerunHelper
NEEDS_DATA_PATH = os.getenv("NEEDS_DATA_PATH", "")
@ -136,10 +137,11 @@ def main():
gh = Github(get_best_robot_token(), per_page=100)
pr_info = PRInfo()
commit = get_commit(gh, pr_info.sha)
atexit.register(update_mergeable_check, gh, pr_info, build_check_name)
rerun_helper = RerunHelper(gh, pr_info, build_check_name)
rerun_helper = RerunHelper(commit, build_check_name)
if rerun_helper.is_already_finished_by_status():
logging.info("Check is already finished according to github status, exiting")
sys.exit(0)
@ -274,12 +276,8 @@ def main():
description = f"{ok_groups}/{total_groups} artifact groups are OK {addition}"
commit = get_commit(gh, pr_info.sha)
commit.create_status(
context=build_check_name,
description=description,
state=summary_status,
target_url=url,
post_commit_status(
commit, summary_status, url, description, build_check_name, pr_info
)
if summary_status == "error":

View File

@ -1,6 +1,7 @@
#!/usr/bin/env python3
from typing import Dict, TypeVar
from dataclasses import dataclass
from typing import Callable, Dict, TypeVar
ConfValue = TypeVar("ConfValue", str, bool)
BuildConfig = Dict[str, ConfValue]
@ -399,3 +400,161 @@ REQUIRED_CHECKS = [
"Unit tests (tsan)",
"Unit tests (ubsan)",
]
@dataclass
class CheckDescription:
name: str
description: str # the check descriptions, will be put into the status table
match_func: Callable[[str], bool] # the function to check vs the commit status
def __hash__(self) -> int:
return hash(self.name + self.description)
CHECK_DESCRIPTIONS = [
CheckDescription(
"AST fuzzer",
"Runs randomly generated queries to catch program errors. "
"The build type is optionally given in parenthesis. "
"If it fails, ask a maintainer for help.",
lambda x: x.startswith("AST fuzzer"),
),
CheckDescription(
"Bugfix validate check",
"Checks that either a new test (functional or integration) or there "
"some changed tests that fail with the binary built on master branch",
lambda x: x == "Bugfix validate check",
),
CheckDescription(
"CI running",
"A meta-check that indicates the running CI. Normally, it's in <b>success</b> or "
"<b>pending</b> state. The failed status indicates some problems with the PR",
lambda x: x == "CI running",
),
CheckDescription(
"ClickHouse build check",
"Builds ClickHouse in various configurations for use in further steps. "
"You have to fix the builds that fail. Build logs often has enough "
"information to fix the error, but you might have to reproduce the failure "
"locally. The <b>cmake</b> options can be found in the build log, grepping for "
'<b>cmake</b>. Use these options and follow the <a href="'
'https://clickhouse.com/docs/en/development/build">general build process</a>.',
lambda x: x.startswith("ClickHouse") and x.endswith("build check"),
),
CheckDescription(
"Compatibility check",
"Checks that <b>clickhouse</b> binary runs on distributions with old libc "
"versions. If it fails, ask a maintainer for help.",
lambda x: x.startswith("Compatibility check"),
),
CheckDescription(
"Docker image for servers",
"The check to build and optionally push the mentioned image to docker hub",
lambda x: x.startswith("Docker image")
and (x.endswith("building check") or x.endswith("build and push")),
),
CheckDescription(
"Docs Check", "Builds and tests the documentation", lambda x: x == "Docs Check"
),
CheckDescription(
"Fast test",
"Normally this is the first check that is ran for a PR. It builds ClickHouse "
'and runs most of <a href="https://clickhouse.com/docs/en/development/tests'
'#functional-tests">stateless functional tests</a>, '
"omitting some. If it fails, further checks are not started until it is fixed. "
"Look at the report to see which tests fail, then reproduce the failure "
'locally as described <a href="https://clickhouse.com/docs/en/development/'
'tests#functional-test-locally">here</a>.',
lambda x: x == "Fast test",
),
CheckDescription(
"Flaky tests",
"Runs a flaky tests from master multiple times to identify if they are stable.",
lambda x: "tests flaky check" in x,
),
CheckDescription(
"Install packages",
"Checks that the built packages are installable in a clear environment",
lambda x: x.startswith("Install packages ("),
),
CheckDescription(
"Integration tests",
"The integration tests report. In parenthesis the package type is given, "
"and in square brackets are the optional part/total tests",
lambda x: x.startswith("Integration tests ("),
),
CheckDescription(
"Mergeable Check",
"Checks if all other necessary checks are successful",
lambda x: x == "Mergeable Check",
),
CheckDescription(
"Performance Comparison",
"Measure changes in query performance. The performance test report is "
'described in detail <a href="https://github.com/ClickHouse/ClickHouse/tree'
'/master/docker/test/performance-comparison#how-to-read-the-report">here</a>. '
"In square brackets are the optional part/total tests",
lambda x: x.startswith("Performance Comparison"),
),
CheckDescription(
"Push to Dockerhub",
"The check for building and pushing the CI related docker images to docker hub",
lambda x: x.startswith("Push") and "to Dockerhub" in x,
),
CheckDescription(
"Sqllogic",
"Run clickhouse on the "
'<a href="https://www.sqlite.org/sqllogictest">sqllogic</a> '
"test set against sqlite and checks that all statements are passed.",
lambda x: x.startswith("Sqllogic test"),
),
CheckDescription(
"SQLancer",
"Fuzzing tests that detect logical bugs with "
'<a href="https://github.com/sqlancer/sqlancer">SQLancer</a> tool.',
lambda x: x.startswith("SQLancer"),
),
CheckDescription(
"Stateful tests",
"Runs stateful functional tests for ClickHouse binaries built in various "
"configurations -- release, debug, with sanitizers, etc.",
lambda x: x.startswith("Stateful tests ("),
),
CheckDescription(
"Stateless tests",
"Runs stateless functional tests for ClickHouse binaries built in various "
"configurations -- release, debug, with sanitizers, etc.",
lambda x: x.startswith("Stateless tests ("),
),
CheckDescription(
"Stress test",
"Runs stateless functional tests concurrently from several clients to detect "
"concurrency-related errors.",
lambda x: x.startswith("Stress test ("),
),
CheckDescription(
"Style Check",
"Runs a set of checks to keep the code style clean. If some of tests failed, "
"see the related log from the report.",
lambda x: x == "Style Check",
),
CheckDescription(
"Unit tests",
"Runs the unit tests for different release types",
lambda x: x.startswith("Unit tests ("),
),
CheckDescription(
"Upgrade check",
"Runs stress tests on server version from last release and then tries to "
"upgrade it to the version from the PR. It checks if the new server can "
"successfully startup without any errors, crashes or sanitizer asserts.",
lambda x: x.startswith("Upgrade check ("),
),
CheckDescription(
"Falback for unknown",
"There's no description for the check yet, please add it to "
"tests/ci/ci_config.py:CHECK_DESCRIPTIONS",
lambda x: True,
),
]

View File

@ -7,7 +7,7 @@ import logging
from github import Github
from commit_status_helper import post_commit_status
from commit_status_helper import get_commit, post_commit_status
from docker_pull_helper import get_image_with_version
from env_helper import (
IMAGES_PATH,
@ -43,6 +43,7 @@ if __name__ == "__main__":
gh = Github(get_best_robot_token(), per_page=100)
pr_info = PRInfo()
commit = get_commit(gh, pr_info.sha)
if not os.path.exists(TEMP_PATH):
os.makedirs(TEMP_PATH)
@ -87,4 +88,4 @@ if __name__ == "__main__":
print(f"::notice ::Report url: {report_url}")
post_commit_status(gh, pr_info.sha, NAME, "Report built", "success", report_url)
post_commit_status(commit, "success", report_url, "Report built", NAME, pr_info)

View File

@ -3,20 +3,51 @@
import csv
import os
import time
from typing import List, Literal
from typing import Dict, List, Literal, Optional, Union
import logging
from github import Github
from github.GithubObject import _NotSetType, NotSet as NotSet # type: ignore
from github.Commit import Commit
from github.CommitStatus import CommitStatus
from github.IssueComment import IssueComment
from github.Repository import Repository
from ci_config import CI_CONFIG, REQUIRED_CHECKS
from ci_config import CI_CONFIG, REQUIRED_CHECKS, CHECK_DESCRIPTIONS, CheckDescription
from env_helper import GITHUB_REPOSITORY, GITHUB_RUN_URL
from pr_info import PRInfo, SKIP_MERGEABLE_CHECK_LABEL
from report import TestResult, TestResults
from s3_helper import S3Helper
from upload_result_helper import upload_results
RETRY = 5
CommitStatuses = List[CommitStatus]
MERGEABLE_NAME = "Mergeable Check"
GH_REPO = None # type: Optional[Repository]
CI_STATUS_NAME = "CI running"
class RerunHelper:
def __init__(self, commit: Commit, check_name: str):
self.check_name = check_name
self.commit = commit
self.statuses = get_commit_filtered_statuses(commit)
def is_already_finished_by_status(self) -> bool:
# currently we agree even for failed statuses
for status in self.statuses:
if self.check_name in status.context and status.state in (
"success",
"failure",
):
return True
return False
def get_finished_status(self) -> Optional[CommitStatus]:
for status in self.statuses:
if self.check_name in status.context:
return status
return None
def override_status(status: str, check_name: str, invert: bool = False) -> str:
@ -34,7 +65,7 @@ def override_status(status: str, check_name: str, invert: bool = False) -> str:
def get_commit(gh: Github, commit_sha: str, retry_count: int = RETRY) -> Commit:
for i in range(retry_count):
try:
repo = gh.get_repo(GITHUB_REPOSITORY)
repo = get_repo(gh)
commit = repo.get_commit(commit_sha)
break
except Exception as ex:
@ -46,22 +77,165 @@ def get_commit(gh: Github, commit_sha: str, retry_count: int = RETRY) -> Commit:
def post_commit_status(
gh: Github, sha: str, check_name: str, description: str, state: str, report_url: str
commit: Commit,
state: str,
report_url: Union[_NotSetType, str] = NotSet,
description: Union[_NotSetType, str] = NotSet,
check_name: Union[_NotSetType, str] = NotSet,
pr_info: Optional[PRInfo] = None,
) -> None:
"""The parameters are given in the same order as for commit.create_status,
if an optional parameter `pr_info` is given, the `set_status_comment` functions
is invoked to add or update the comment with statuses overview"""
for i in range(RETRY):
try:
commit = get_commit(gh, sha, 1)
commit.create_status(
context=check_name,
description=description,
state=state,
target_url=report_url,
description=description,
context=check_name,
)
break
except Exception as ex:
if i == RETRY - 1:
raise ex
time.sleep(i)
if pr_info:
set_status_comment(commit, pr_info)
def set_status_comment(commit: Commit, pr_info: PRInfo) -> None:
"""It adds or updates the comment status to all Pull Requests but for release
one, so the method does nothing for simple pushes and pull requests with
`release`/`release-lts` labels"""
# to reduce number of parameters, the Github is constructed on the fly
gh = Github()
gh.__requester = commit._requester # type:ignore #pylint:disable=protected-access
repo = get_repo(gh)
statuses = sorted(get_commit_filtered_statuses(commit), key=lambda x: x.context)
if not statuses:
return
# We update the report in generate_status_comment function, so do it each
# run, even in the release PRs and normal pushes
comment_body = generate_status_comment(pr_info, statuses)
# We post the comment only to normal and backport PRs
if pr_info.number == 0 or pr_info.labels.intersection({"release", "release-lts"}):
return
comment_service_header = comment_body.split("\n", 1)[0]
comment = None # type: Optional[IssueComment]
pr = repo.get_pull(pr_info.number)
for ic in pr.get_issue_comments():
if ic.body.startswith(comment_service_header):
comment = ic
break
if comment is None:
pr.create_issue_comment(comment_body)
return
if comment.body == comment_body:
logging.info("The status comment is already updated, no needs to change it")
return
comment.edit(comment_body)
def generate_status_comment(pr_info: PRInfo, statuses: CommitStatuses) -> str:
"""The method generates the comment body, as well it updates the CI report"""
def beauty_state(state: str) -> str:
if state == "success":
return f"🟢 {state}"
if state == "pending":
return f"🟡 {state}"
if state in ["error", "failure"]:
return f"🔴 {state}"
return state
report_url = create_ci_report(pr_info, statuses)
worst_state = get_worst_state(statuses)
if not worst_state:
# Theoretically possible, although
# the function should not be used on empty statuses
worst_state = "The commit doesn't have the statuses yet"
else:
worst_state = f"The overall status of the commit is {beauty_state(worst_state)}"
comment_body = (
f"<!-- automatic status comment for PR #{pr_info.number} "
f"from {pr_info.head_name}:{pr_info.head_ref} -->\n"
f"This is an automated comment for commit {pr_info.sha} with "
f"description of existing statuses. It's updated for the latest CI running\n"
f"The full report is available [here]({report_url})\n"
f"{worst_state}\n\n<table>"
"<thead><tr><th>Check name</th><th>Description</th><th>Status</th></tr></thead>\n"
"<tbody>"
)
# group checks by the name to get the worst one per each
grouped_statuses = {} # type: Dict[CheckDescription, CommitStatuses]
for status in statuses:
cd = None
for c in CHECK_DESCRIPTIONS:
if c.match_func(status.context):
cd = c
break
if cd is None or cd == CHECK_DESCRIPTIONS[-1]:
# This is the case for either non-found description or a fallback
cd = CheckDescription(
status.context,
CHECK_DESCRIPTIONS[-1].description,
CHECK_DESCRIPTIONS[-1].match_func,
)
if cd in grouped_statuses:
grouped_statuses[cd].append(status)
else:
grouped_statuses[cd] = [status]
table_rows = [] # type: List[str]
for desc, gs in grouped_statuses.items():
table_rows.append(
f"<tr><td>{desc.name}</td><td>{desc.description}</td>"
f"<td>{beauty_state(get_worst_state(gs))}</td></tr>\n"
)
table_rows.sort()
comment_footer = "</table>"
return "".join([comment_body, *table_rows, comment_footer])
def get_worst_state(statuses: CommitStatuses) -> str:
worst_status = None
states = {"error": 0, "failure": 1, "pending": 2, "success": 3}
for status in statuses:
if worst_status is None:
worst_status = status
continue
if states[status.state] < states[worst_status.state]:
worst_status = status
if worst_status.state == "error":
break
if worst_status is None:
return ""
return worst_status.state
def create_ci_report(pr_info: PRInfo, statuses: CommitStatuses) -> str:
"""The function converst the statuses to TestResults and uploads the report
to S3 tests bucket. Then it returns the URL"""
test_results = [] # type: TestResults
for status in statuses:
log_urls = None
if status.target_url is not None:
log_urls = [status.target_url]
test_results.append(TestResult(status.context, status.state, log_urls=log_urls))
return upload_results(
S3Helper(), pr_info.number, pr_info.sha, test_results, [], CI_STATUS_NAME
)
def post_commit_status_to_file(
@ -90,8 +264,16 @@ def get_commit_filtered_statuses(commit: Commit) -> CommitStatuses:
return list(filtered.values())
def get_repo(gh: Github) -> Repository:
global GH_REPO
if GH_REPO is not None:
return GH_REPO
GH_REPO = gh.get_repo(GITHUB_REPOSITORY)
return GH_REPO
def remove_labels(gh: Github, pr_info: PRInfo, labels_names: List[str]) -> None:
repo = gh.get_repo(GITHUB_REPOSITORY)
repo = get_repo(gh)
pull_request = repo.get_pull(pr_info.number)
for label in labels_names:
pull_request.remove_from_labels(label)
@ -99,7 +281,7 @@ def remove_labels(gh: Github, pr_info: PRInfo, labels_names: List[str]) -> None:
def post_labels(gh: Github, pr_info: PRInfo, labels_names: List[str]) -> None:
repo = gh.get_repo(GITHUB_REPOSITORY)
repo = get_repo(gh)
pull_request = repo.get_pull(pr_info.number)
for label in labels_names:
pull_request.add_to_labels(label)

View File

@ -16,13 +16,12 @@ from clickhouse_helper import (
mark_flaky_tests,
prepare_tests_results_for_clickhouse,
)
from commit_status_helper import post_commit_status
from commit_status_helper import RerunHelper, get_commit, post_commit_status
from docker_pull_helper import get_images_with_versions
from env_helper import TEMP_PATH, REPORTS_PATH
from get_robot_token import get_best_robot_token
from pr_info import PRInfo
from report import TestResults, TestResult
from rerun_helper import RerunHelper
from s3_helper import S3Helper
from stopwatch import Stopwatch
from upload_result_helper import upload_results
@ -150,8 +149,9 @@ def main():
pr_info = PRInfo()
gh = Github(get_best_robot_token(), per_page=100)
commit = get_commit(gh, pr_info.sha)
rerun_helper = RerunHelper(gh, pr_info, args.check_name)
rerun_helper = RerunHelper(commit, args.check_name)
if rerun_helper.is_already_finished_by_status():
logging.info("Check is already finished according to github status, exiting")
sys.exit(0)
@ -242,7 +242,7 @@ def main():
args.check_name,
)
print(f"::notice ::Report url: {report_url}")
post_commit_status(gh, pr_info.sha, args.check_name, description, state, report_url)
post_commit_status(commit, state, report_url, description, args.check_name, pr_info)
prepared_events = prepare_tests_results_for_clickhouse(
pr_info,

View File

@ -14,7 +14,7 @@ from typing import Any, Dict, List, Optional, Set, Tuple, Union
from github import Github
from clickhouse_helper import ClickHouseHelper, prepare_tests_results_for_clickhouse
from commit_status_helper import format_description, post_commit_status
from commit_status_helper import format_description, get_commit, post_commit_status
from env_helper import GITHUB_WORKSPACE, RUNNER_TEMP, GITHUB_RUN_URL
from get_robot_token import get_best_robot_token, get_parameter_from_ssm
from pr_info import PRInfo
@ -474,7 +474,8 @@ def main():
return
gh = Github(get_best_robot_token(), per_page=100)
post_commit_status(gh, pr_info.sha, NAME, description, status, url)
commit = get_commit(gh, pr_info.sha)
post_commit_status(commit, status, url, description, NAME, pr_info)
prepared_events = prepare_tests_results_for_clickhouse(
pr_info,

View File

@ -10,7 +10,7 @@ from typing import List, Dict, Tuple
from github import Github
from clickhouse_helper import ClickHouseHelper, prepare_tests_results_for_clickhouse
from commit_status_helper import format_description, post_commit_status
from commit_status_helper import format_description, get_commit, post_commit_status
from env_helper import RUNNER_TEMP
from get_robot_token import get_best_robot_token, get_parameter_from_ssm
from pr_info import PRInfo
@ -221,7 +221,8 @@ def main():
description = format_description(description)
gh = Github(get_best_robot_token(), per_page=100)
post_commit_status(gh, pr_info.sha, NAME, description, status, url)
commit = get_commit(gh, pr_info.sha)
post_commit_status(commit, status, url, description, NAME, pr_info)
prepared_events = prepare_tests_results_for_clickhouse(
pr_info,

View File

@ -15,7 +15,7 @@ from github import Github
from build_check import get_release_or_pr
from clickhouse_helper import ClickHouseHelper, prepare_tests_results_for_clickhouse
from commit_status_helper import format_description, post_commit_status
from commit_status_helper import format_description, get_commit, post_commit_status
from docker_images_check import DockerImage
from env_helper import CI, GITHUB_RUN_URL, RUNNER_TEMP, S3_BUILDS_BUCKET, S3_DOWNLOAD
from get_robot_token import get_best_robot_token, get_parameter_from_ssm
@ -372,7 +372,8 @@ def main():
description = format_description(description)
gh = Github(get_best_robot_token(), per_page=100)
post_commit_status(gh, pr_info.sha, NAME, description, status, url)
commit = get_commit(gh, pr_info.sha)
post_commit_status(commit, status, url, description, NAME, pr_info)
prepared_events = prepare_tests_results_for_clickhouse(
pr_info,

View File

@ -9,13 +9,18 @@ import sys
from github import Github
from clickhouse_helper import ClickHouseHelper, prepare_tests_results_for_clickhouse
from commit_status_helper import post_commit_status, get_commit, update_mergeable_check
from commit_status_helper import (
NotSet,
RerunHelper,
get_commit,
post_commit_status,
update_mergeable_check,
)
from docker_pull_helper import get_image_with_version
from env_helper import TEMP_PATH, REPO_COPY
from get_robot_token import get_best_robot_token
from pr_info import PRInfo
from report import TestResults, TestResult
from rerun_helper import RerunHelper
from s3_helper import S3Helper
from stopwatch import Stopwatch
from tee_popen import TeePopen
@ -52,8 +57,9 @@ def main():
pr_info = PRInfo(need_changed_files=True)
gh = Github(get_best_robot_token(), per_page=100)
commit = get_commit(gh, pr_info.sha)
rerun_helper = RerunHelper(gh, pr_info, NAME)
rerun_helper = RerunHelper(commit, NAME)
if rerun_helper.is_already_finished_by_status():
logging.info("Check is already finished according to github status, exiting")
sys.exit(0)
@ -61,9 +67,8 @@ def main():
if not pr_info.has_changes_in_documentation() and not args.force:
logging.info("No changes in documentation")
commit = get_commit(gh, pr_info.sha)
commit.create_status(
context=NAME, description="No changes in docs", state="success"
post_commit_status(
commit, "success", NotSet, "No changes in docs", NAME, pr_info
)
sys.exit(0)
@ -132,7 +137,7 @@ def main():
s3_helper, pr_info.number, pr_info.sha, test_results, additional_files, NAME
)
print("::notice ::Report url: {report_url}")
post_commit_status(gh, pr_info.sha, NAME, description, status, report_url)
post_commit_status(commit, status, report_url, description, NAME, pr_info)
prepared_events = prepare_tests_results_for_clickhouse(
pr_info,

View File

@ -17,6 +17,8 @@ from clickhouse_helper import (
prepare_tests_results_for_clickhouse,
)
from commit_status_helper import (
RerunHelper,
get_commit,
post_commit_status,
update_mergeable_check,
)
@ -25,7 +27,6 @@ from env_helper import S3_BUILDS_BUCKET, TEMP_PATH
from get_robot_token import get_best_robot_token
from pr_info import FORCE_TESTS_LABEL, PRInfo
from report import TestResults, read_test_results
from rerun_helper import RerunHelper
from s3_helper import S3Helper
from stopwatch import Stopwatch
from tee_popen import TeePopen
@ -106,10 +107,11 @@ def main():
pr_info = PRInfo()
gh = Github(get_best_robot_token(), per_page=100)
commit = get_commit(gh, pr_info.sha)
atexit.register(update_mergeable_check, gh, pr_info, NAME)
rerun_helper = RerunHelper(gh, pr_info, NAME)
rerun_helper = RerunHelper(commit, NAME)
if rerun_helper.is_already_finished_by_status():
logging.info("Check is already finished according to github status, exiting")
status = rerun_helper.get_finished_status()
@ -197,7 +199,7 @@ def main():
NAME,
)
print(f"::notice ::Report url: {report_url}")
post_commit_status(gh, pr_info.sha, NAME, description, state, report_url)
post_commit_status(commit, state, report_url, description, NAME, pr_info)
prepared_events = prepare_tests_results_for_clickhouse(
pr_info,

View File

@ -2,32 +2,42 @@
import logging
from github import Github
from env_helper import GITHUB_RUN_URL
from pr_info import PRInfo
from commit_status_helper import (
CI_STATUS_NAME,
NotSet,
get_commit,
get_commit_filtered_statuses,
post_commit_status,
)
from get_robot_token import get_best_robot_token
from commit_status_helper import get_commit, get_commit_filtered_statuses
NAME = "Run Check"
from pr_info import PRInfo
if __name__ == "__main__":
def main():
logging.basicConfig(level=logging.INFO)
pr_info = PRInfo(need_orgs=True)
gh = Github(get_best_robot_token(), per_page=100)
commit = get_commit(gh, pr_info.sha)
url = GITHUB_RUN_URL
statuses = get_commit_filtered_statuses(commit)
pending_status = any( # find NAME status in pending state
True
for status in statuses
if status.context == NAME and status.state == "pending"
)
if pending_status:
commit.create_status(
context=NAME,
description="All checks finished",
state="success",
target_url=url,
statuses = [
status
for status in get_commit_filtered_statuses(commit)
if status.context == CI_STATUS_NAME
]
if not statuses:
return
status = statuses[0]
if status.state == "pending":
post_commit_status(
commit,
"success",
status.target_url or NotSet,
"All checks finished",
CI_STATUS_NAME,
pr_info,
)
if __name__ == "__main__":
main()

View File

@ -20,9 +20,11 @@ from clickhouse_helper import (
prepare_tests_results_for_clickhouse,
)
from commit_status_helper import (
post_commit_status,
NotSet,
RerunHelper,
get_commit,
override_status,
post_commit_status,
post_commit_status_to_file,
update_mergeable_check,
)
@ -32,7 +34,6 @@ from env_helper import TEMP_PATH, REPO_COPY, REPORTS_PATH
from get_robot_token import get_best_robot_token
from pr_info import FORCE_TESTS_LABEL, PRInfo
from report import TestResults, read_test_results
from rerun_helper import RerunHelper
from s3_helper import S3Helper
from stopwatch import Stopwatch
from tee_popen import TeePopen
@ -247,6 +248,7 @@ def main():
need_changed_files=run_changed_tests, pr_event_from_api=validate_bugfix_check
)
commit = get_commit(gh, pr_info.sha)
atexit.register(update_mergeable_check, gh, pr_info, check_name)
if not os.path.exists(temp_path):
@ -274,7 +276,7 @@ def main():
run_by_hash_total = 0
check_name_with_group = check_name
rerun_helper = RerunHelper(gh, pr_info, check_name_with_group)
rerun_helper = RerunHelper(commit, check_name_with_group)
if rerun_helper.is_already_finished_by_status():
logging.info("Check is already finished according to github status, exiting")
sys.exit(0)
@ -283,13 +285,15 @@ def main():
if run_changed_tests:
tests_to_run = get_tests_to_run(pr_info)
if not tests_to_run:
commit = get_commit(gh, pr_info.sha)
state = override_status("success", check_name, validate_bugfix_check)
if args.post_commit_status == "commit_status":
commit.create_status(
context=check_name_with_group,
description=NO_CHANGES_MSG,
state=state,
post_commit_status(
commit,
state,
NotSet,
NO_CHANGES_MSG,
check_name_with_group,
pr_info,
)
elif args.post_commit_status == "file":
post_commit_status_to_file(
@ -376,16 +380,16 @@ def main():
if args.post_commit_status == "commit_status":
if "parallelreplicas" in check_name.lower():
post_commit_status(
gh,
pr_info.sha,
check_name_with_group,
description,
commit,
"success",
report_url,
description,
check_name_with_group,
pr_info,
)
else:
post_commit_status(
gh, pr_info.sha, check_name_with_group, description, state, report_url
commit, state, report_url, description, check_name_with_group, pr_info
)
elif args.post_commit_status == "file":
if "parallelreplicas" in check_name.lower():

View File

@ -19,7 +19,9 @@ from clickhouse_helper import (
prepare_tests_results_for_clickhouse,
)
from commit_status_helper import (
RerunHelper,
format_description,
get_commit,
post_commit_status,
update_mergeable_check,
)
@ -29,7 +31,6 @@ from env_helper import CI, TEMP_PATH as TEMP, REPORTS_PATH
from get_robot_token import get_best_robot_token
from pr_info import PRInfo
from report import TestResults, TestResult
from rerun_helper import RerunHelper
from s3_helper import S3Helper
from stopwatch import Stopwatch
from tee_popen import TeePopen
@ -268,9 +269,10 @@ def main():
if CI:
gh = Github(get_best_robot_token(), per_page=100)
commit = get_commit(gh, pr_info.sha)
atexit.register(update_mergeable_check, gh, pr_info, args.check_name)
rerun_helper = RerunHelper(gh, pr_info, args.check_name)
rerun_helper = RerunHelper(commit, args.check_name)
if rerun_helper.is_already_finished_by_status():
logging.info(
"Check is already finished according to github status, exiting"
@ -347,7 +349,7 @@ def main():
description = format_description(description)
post_commit_status(gh, pr_info.sha, args.check_name, description, state, report_url)
post_commit_status(commit, state, report_url, description, args.check_name, pr_info)
prepared_events = prepare_tests_results_for_clickhouse(
pr_info,

View File

@ -19,8 +19,10 @@ from clickhouse_helper import (
prepare_tests_results_for_clickhouse,
)
from commit_status_helper import (
post_commit_status,
RerunHelper,
get_commit,
override_status,
post_commit_status,
post_commit_status_to_file,
)
from docker_pull_helper import get_images_with_versions
@ -29,7 +31,6 @@ from env_helper import TEMP_PATH, REPO_COPY, REPORTS_PATH
from get_robot_token import get_best_robot_token
from pr_info import PRInfo
from report import TestResults, read_test_results
from rerun_helper import RerunHelper
from s3_helper import S3Helper
from stopwatch import Stopwatch
from tee_popen import TeePopen
@ -198,8 +199,9 @@ def main():
sys.exit(0)
gh = Github(get_best_robot_token(), per_page=100)
commit = get_commit(gh, pr_info.sha)
rerun_helper = RerunHelper(gh, pr_info, check_name_with_group)
rerun_helper = RerunHelper(commit, check_name_with_group)
if rerun_helper.is_already_finished_by_status():
logging.info("Check is already finished according to github status, exiting")
sys.exit(0)
@ -284,15 +286,10 @@ def main():
print(f"::notice:: {check_name} Report url: {report_url}")
if args.post_commit_status == "commit_status":
post_commit_status(
gh, pr_info.sha, check_name_with_group, description, state, report_url
commit, state, report_url, description, check_name_with_group, pr_info
)
elif args.post_commit_status == "file":
post_commit_status_to_file(
post_commit_path,
description,
state,
report_url,
)
post_commit_status_to_file(post_commit_path, description, state, report_url)
else:
raise Exception(
f'Unknown post_commit_status option "{args.post_commit_status}"'

View File

@ -13,13 +13,12 @@ from github import Github
from build_download_helper import get_build_name_for_check
from clickhouse_helper import ClickHouseHelper, prepare_tests_results_for_clickhouse
from commit_status_helper import post_commit_status
from commit_status_helper import RerunHelper, get_commit, post_commit_status
from compress_files import compress_fast
from env_helper import REPO_COPY, TEMP_PATH, S3_BUILDS_BUCKET, S3_DOWNLOAD
from get_robot_token import get_best_robot_token, get_parameter_from_ssm
from pr_info import PRInfo
from report import TestResults, TestResult
from rerun_helper import RerunHelper
from s3_helper import S3Helper
from ssh import SSHKey
from stopwatch import Stopwatch
@ -181,10 +180,11 @@ if __name__ == "__main__":
sys.exit(0)
gh = Github(get_best_robot_token(), per_page=100)
commit = get_commit(gh, pr_info.sha)
check_name = KEEPER_CHECK_NAME if args.program == "keeper" else SERVER_CHECK_NAME
rerun_helper = RerunHelper(gh, pr_info, check_name)
rerun_helper = RerunHelper(commit, check_name)
if rerun_helper.is_already_finished_by_status():
logging.info("Check is already finished according to github status, exiting")
sys.exit(0)
@ -293,7 +293,7 @@ if __name__ == "__main__":
)
print(f"::notice ::Report url: {report_url}")
post_commit_status(gh, pr_info.sha, check_name, description, status, report_url)
post_commit_status(commit, status, report_url, description, check_name, pr_info)
ch_helper = ClickHouseHelper()
prepared_events = prepare_tests_results_for_clickhouse(

View File

@ -4,7 +4,7 @@ import argparse
import logging
import os
from commit_status_helper import get_commit
from commit_status_helper import NotSet, get_commit, post_commit_status
from env_helper import GITHUB_JOB_URL
from get_robot_token import get_best_robot_token
from github_helper import GitHub
@ -34,6 +34,7 @@ def main():
args = parser.parse_args()
url = ""
description = "the release can be created from the commit, manually set"
pr_info = None
if not args.commit:
pr_info = PRInfo()
if pr_info.event == pr_info.default_event:
@ -45,14 +46,10 @@ def main():
gh = GitHub(args.token, create_cache_dir=False)
# Get the rate limits for a quick fail
gh.get_rate_limit()
commit = get_commit(gh, args.commit)
commit.create_status(
context=RELEASE_READY_STATUS,
description=description,
state="success",
target_url=url,
gh.get_rate_limit()
post_commit_status(
commit, "success", url or NotSet, description, RELEASE_READY_STATUS, pr_info
)

View File

@ -12,13 +12,12 @@ from typing import Dict
from github import Github
from commit_status_helper import get_commit, post_commit_status
from commit_status_helper import RerunHelper, get_commit, post_commit_status
from ci_config import CI_CONFIG
from docker_pull_helper import get_image_with_version
from env_helper import GITHUB_EVENT_PATH, GITHUB_RUN_URL, S3_BUILDS_BUCKET, S3_DOWNLOAD
from get_robot_token import get_best_robot_token, get_parameter_from_ssm
from pr_info import PRInfo
from rerun_helper import RerunHelper
from s3_helper import S3Helper
from tee_popen import TeePopen
@ -118,7 +117,7 @@ if __name__ == "__main__":
message = "Skipped, not labeled with 'pr-performance'"
report_url = GITHUB_RUN_URL
post_commit_status(
gh, pr_info.sha, check_name_with_group, message, status, report_url
commit, status, report_url, message, check_name_with_group, pr_info
)
sys.exit(0)
@ -131,7 +130,7 @@ if __name__ == "__main__":
"Fill fliter our performance tests by grep -v %s", test_grep_exclude_filter
)
rerun_helper = RerunHelper(gh, pr_info, check_name_with_group)
rerun_helper = RerunHelper(commit, check_name_with_group)
if rerun_helper.is_already_finished_by_status():
logging.info("Check is already finished according to github status, exiting")
sys.exit(0)
@ -267,7 +266,7 @@ if __name__ == "__main__":
report_url = uploaded["report.html"]
post_commit_status(
gh, pr_info.sha, check_name_with_group, message, status, report_url
commit, status, report_url, message, check_name_with_group, pr_info
)
if status == "error":

View File

@ -370,6 +370,7 @@ def create_test_html_report(
colspan += 1
if test_result.log_urls is not None:
has_log_urls = True
test_logs_html = "<br>".join(
[_get_html_url(url) for url in test_result.log_urls]
)

View File

@ -1,36 +0,0 @@
#!/usr/bin/env python3
from typing import Optional
from commit_status_helper import get_commit, get_commit_filtered_statuses
from github import Github
from github.CommitStatus import CommitStatus
from pr_info import PRInfo
# TODO: move it to commit_status_helper
class RerunHelper:
def __init__(self, gh: Github, pr_info: PRInfo, check_name: str):
self.gh = gh
self.pr_info = pr_info
self.check_name = check_name
commit = get_commit(gh, self.pr_info.sha)
if commit is None:
raise ValueError(f"unable to receive commit for {pr_info.sha}")
self.pygh_commit = commit
self.statuses = get_commit_filtered_statuses(commit)
def is_already_finished_by_status(self) -> bool:
# currently we agree even for failed statuses
for status in self.statuses:
if self.check_name in status.context and status.state in (
"success",
"failure",
):
return True
return False
def get_finished_status(self) -> Optional[CommitStatus]:
for status in self.statuses:
if self.check_name in status.context:
return status
return None

View File

@ -7,20 +7,22 @@ from typing import Tuple
from github import Github
from commit_status_helper import (
CI_STATUS_NAME,
NotSet,
create_ci_report,
format_description,
get_commit,
post_commit_status,
post_labels,
remove_labels,
set_mergeable_check,
)
from docs_check import NAME as DOCS_NAME
from env_helper import GITHUB_RUN_URL, GITHUB_REPOSITORY, GITHUB_SERVER_URL
from env_helper import GITHUB_REPOSITORY, GITHUB_SERVER_URL
from get_robot_token import get_best_robot_token
from pr_info import FORCE_TESTS_LABEL, PRInfo
from workflow_approve_rerun_lambda.app import TRUSTED_CONTRIBUTORS
NAME = "Run Check"
TRUSTED_ORG_IDS = {
54801242, # clickhouse
}
@ -89,7 +91,7 @@ def pr_is_by_trusted_user(pr_user_login, pr_user_orgs):
# Returns whether we should look into individual checks for this PR. If not, it
# can be skipped entirely.
# Returns can_run, description, labels_state
def should_run_checks_for_pr(pr_info: PRInfo) -> Tuple[bool, str, str]:
def should_run_ci_for_pr(pr_info: PRInfo) -> Tuple[bool, str, str]:
# Consider the labels and whether the user is trusted.
print("Got labels", pr_info.labels)
if FORCE_TESTS_LABEL in pr_info.labels:
@ -203,7 +205,7 @@ def check_pr_description(pr_info: PRInfo) -> Tuple[str, str]:
return description_error, category
if __name__ == "__main__":
def main():
logging.basicConfig(level=logging.INFO)
pr_info = PRInfo(need_orgs=True, pr_event_from_api=True, need_changed_files=True)
@ -213,7 +215,7 @@ if __name__ == "__main__":
print("::notice ::Cannot run, no PR exists for the commit")
sys.exit(1)
can_run, description, labels_state = should_run_checks_for_pr(pr_info)
can_run, description, labels_state = should_run_ci_for_pr(pr_info)
if can_run and OK_SKIP_LABELS.intersection(pr_info.labels):
print("::notice :: Early finish the check, running in a special PR")
sys.exit(0)
@ -253,10 +255,12 @@ if __name__ == "__main__":
if FEATURE_LABEL in pr_info.labels:
print(f"The '{FEATURE_LABEL}' in the labels, expect the 'Docs Check' status")
commit.create_status(
context=DOCS_NAME,
description=f"expect adding docs for {FEATURE_LABEL}",
state="pending",
post_commit_status( # do not pass pr_info here intentionally
commit,
"pending",
NotSet,
f"expect adding docs for {FEATURE_LABEL}",
DOCS_NAME,
)
else:
set_mergeable_check(commit, "skipped")
@ -267,7 +271,7 @@ if __name__ == "__main__":
f"{description_error}"
)
logging.info(
"PR body doesn't match the template: (start)\n%s\n(end)\n" "Reason: %s",
"PR body doesn't match the template: (start)\n%s\n(end)\nReason: %s",
pr_info.body,
description_error,
)
@ -275,23 +279,29 @@ if __name__ == "__main__":
f"{GITHUB_SERVER_URL}/{GITHUB_REPOSITORY}/"
"blob/master/.github/PULL_REQUEST_TEMPLATE.md?plain=1"
)
commit.create_status(
context=NAME,
description=format_description(description_error),
state="failure",
target_url=url,
post_commit_status(
commit,
"failure",
url,
format_description(description_error),
CI_STATUS_NAME,
pr_info,
)
sys.exit(1)
url = GITHUB_RUN_URL
ci_report_url = create_ci_report(pr_info, [])
if not can_run:
print("::notice ::Cannot run")
commit.create_status(
context=NAME, description=description, state=labels_state, target_url=url
post_commit_status(
commit, labels_state, ci_report_url, description, CI_STATUS_NAME, pr_info
)
sys.exit(1)
else:
print("::notice ::Can run")
commit.create_status(
context=NAME, description=description, state="pending", target_url=url
post_commit_status(
commit, "pending", ci_report_url, description, CI_STATUS_NAME, pr_info
)
if __name__ == "__main__":
main()

View File

@ -40,11 +40,11 @@ def _flatten_list(lst):
class S3Helper:
def __init__(self, host=S3_URL, download_host=S3_DOWNLOAD):
def __init__(self):
self.session = boto3.session.Session(region_name="us-east-1")
self.client = self.session.client("s3", endpoint_url=host)
self.host = host
self.download_host = download_host
self.client = self.session.client("s3", endpoint_url=S3_URL)
self.host = S3_URL
self.download_host = S3_DOWNLOAD
def _upload_file_to_s3(self, bucket_name: str, file_path: str, s3_path: str) -> str:
logging.debug(

View File

@ -10,10 +10,14 @@ from github import Github
from build_download_helper import get_build_name_for_check, read_build_urls
from clickhouse_helper import ClickHouseHelper, prepare_tests_results_for_clickhouse
from commit_status_helper import format_description, post_commit_status
from commit_status_helper import (
RerunHelper,
format_description,
get_commit,
post_commit_status,
)
from docker_pull_helper import get_image_with_version
from env_helper import (
GITHUB_REPOSITORY,
GITHUB_RUN_URL,
REPORTS_PATH,
TEMP_PATH,
@ -21,7 +25,6 @@ from env_helper import (
from get_robot_token import get_best_robot_token
from pr_info import PRInfo
from report import TestResults, TestResult
from rerun_helper import RerunHelper
from s3_helper import S3Helper
from stopwatch import Stopwatch
from upload_result_helper import upload_results
@ -46,12 +49,6 @@ def get_run_command(download_url, workspace_path, image):
)
def get_commit(gh, commit_sha):
repo = gh.get_repo(GITHUB_REPOSITORY)
commit = repo.get_commit(commit_sha)
return commit
def main():
logging.basicConfig(level=logging.INFO)
@ -68,8 +65,9 @@ def main():
pr_info = PRInfo()
gh = Github(get_best_robot_token(), per_page=100)
commit = get_commit(gh, pr_info.sha)
rerun_helper = RerunHelper(gh, pr_info, check_name)
rerun_helper = RerunHelper(commit, check_name)
if rerun_helper.is_already_finished_by_status():
logging.info("Check is already finished according to github status, exiting")
sys.exit(0)
@ -187,12 +185,10 @@ def main():
check_name,
)
post_commit_status(gh, pr_info.sha, check_name, description, status, report_url)
post_commit_status(commit, status, report_url, description, check_name, pr_info)
print(f"::notice:: {check_name} Report url: {report_url}")
ch_helper = ClickHouseHelper()
prepared_events = prepare_tests_results_for_clickhouse(
pr_info,
test_results,
@ -202,12 +198,8 @@ def main():
report_url,
check_name,
)
ch_helper.insert_events_into(db="default", table="checks", events=prepared_events)
print(f"::notice Result: '{status}', '{description}', '{report_url}'")
post_commit_status(gh, pr_info.sha, check_name, description, status, report_url)
if __name__ == "__main__":
main()

View File

@ -17,11 +17,15 @@ from pr_info import FORCE_TESTS_LABEL, PRInfo
from build_download_helper import download_all_deb_packages
from upload_result_helper import upload_results
from docker_pull_helper import get_image_with_version
from commit_status_helper import override_status, post_commit_status
from commit_status_helper import (
RerunHelper,
get_commit,
override_status,
post_commit_status,
)
from report import TestResults, read_test_results
from stopwatch import Stopwatch
from rerun_helper import RerunHelper
from tee_popen import TeePopen
@ -103,8 +107,9 @@ if __name__ == "__main__":
pr_info = PRInfo()
gh = Github(get_best_robot_token(), per_page=100)
commit = get_commit(gh, pr_info.sha)
rerun_helper = RerunHelper(gh, pr_info, check_name)
rerun_helper = RerunHelper(commit, check_name)
if rerun_helper.is_already_finished_by_status():
logging.info("Check is already finished according to github status, exiting")
sys.exit(0)
@ -203,7 +208,7 @@ if __name__ == "__main__":
# Until it pass all tests, do not block CI, report "success"
assert description is not None
post_commit_status(gh, pr_info.sha, check_name, description, "success", report_url)
post_commit_status(commit, "success", report_url, description, check_name, pr_info)
if status != "success":
if FORCE_TESTS_LABEL in pr_info.labels:

View File

@ -16,13 +16,12 @@ from clickhouse_helper import (
mark_flaky_tests,
prepare_tests_results_for_clickhouse,
)
from commit_status_helper import post_commit_status
from commit_status_helper import RerunHelper, get_commit, post_commit_status
from docker_pull_helper import get_image_with_version
from env_helper import TEMP_PATH, REPO_COPY, REPORTS_PATH
from get_robot_token import get_best_robot_token
from pr_info import PRInfo
from report import TestResults, read_test_results
from rerun_helper import RerunHelper
from s3_helper import S3Helper
from stopwatch import Stopwatch
from tee_popen import TeePopen
@ -125,8 +124,9 @@ def run_stress_test(docker_image_name):
pr_info = PRInfo()
gh = Github(get_best_robot_token(), per_page=100)
commit = get_commit(gh, pr_info.sha)
rerun_helper = RerunHelper(gh, pr_info, check_name)
rerun_helper = RerunHelper(commit, check_name)
if rerun_helper.is_already_finished_by_status():
logging.info("Check is already finished according to github status, exiting")
sys.exit(0)
@ -180,7 +180,7 @@ def run_stress_test(docker_image_name):
)
print(f"::notice ::Report url: {report_url}")
post_commit_status(gh, pr_info.sha, check_name, description, state, report_url)
post_commit_status(commit, state, report_url, description, check_name, pr_info)
prepared_events = prepare_tests_results_for_clickhouse(
pr_info,

View File

@ -15,7 +15,12 @@ from clickhouse_helper import (
mark_flaky_tests,
prepare_tests_results_for_clickhouse,
)
from commit_status_helper import post_commit_status, update_mergeable_check
from commit_status_helper import (
RerunHelper,
get_commit,
post_commit_status,
update_mergeable_check,
)
from docker_pull_helper import get_image_with_version
from env_helper import GITHUB_WORKSPACE, RUNNER_TEMP
from get_robot_token import get_best_robot_token
@ -23,7 +28,6 @@ from github_helper import GitHub
from git_helper import git_runner
from pr_info import PRInfo
from report import TestResults, read_test_results
from rerun_helper import RerunHelper
from s3_helper import S3Helper
from ssh import SSHKey
from stopwatch import Stopwatch
@ -149,10 +153,11 @@ def main():
checkout_head(pr_info)
gh = GitHub(get_best_robot_token(), create_cache_dir=False)
commit = get_commit(gh, pr_info.sha)
atexit.register(update_mergeable_check, gh, pr_info, NAME)
rerun_helper = RerunHelper(gh, pr_info, NAME)
rerun_helper = RerunHelper(commit, NAME)
if rerun_helper.is_already_finished_by_status():
logging.info("Check is already finished according to github status, exiting")
# Finish with the same code as previous
@ -190,7 +195,7 @@ def main():
s3_helper, pr_info.number, pr_info.sha, test_results, additional_files, NAME
)
print(f"::notice ::Report url: {report_url}")
post_commit_status(gh, pr_info.sha, NAME, description, state, report_url)
post_commit_status(commit, state, report_url, description, NAME, pr_info)
prepared_events = prepare_tests_results_for_clickhouse(
pr_info,

View File

@ -15,13 +15,17 @@ from clickhouse_helper import (
mark_flaky_tests,
prepare_tests_results_for_clickhouse,
)
from commit_status_helper import post_commit_status, update_mergeable_check
from commit_status_helper import (
RerunHelper,
get_commit,
post_commit_status,
update_mergeable_check,
)
from docker_pull_helper import get_image_with_version
from env_helper import TEMP_PATH, REPORTS_PATH
from get_robot_token import get_best_robot_token
from pr_info import PRInfo
from report import TestResults, TestResult
from rerun_helper import RerunHelper
from s3_helper import S3Helper
from stopwatch import Stopwatch
from tee_popen import TeePopen
@ -116,10 +120,11 @@ def main():
pr_info = PRInfo()
gh = Github(get_best_robot_token(), per_page=100)
commit = get_commit(gh, pr_info.sha)
atexit.register(update_mergeable_check, gh, pr_info, check_name)
rerun_helper = RerunHelper(gh, pr_info, check_name)
rerun_helper = RerunHelper(commit, check_name)
if rerun_helper.is_already_finished_by_status():
logging.info("Check is already finished according to github status, exiting")
sys.exit(0)
@ -165,7 +170,7 @@ def main():
check_name,
)
print(f"::notice ::Report url: {report_url}")
post_commit_status(gh, pr_info.sha, check_name, description, state, report_url)
post_commit_status(commit, state, report_url, description, check_name, pr_info)
prepared_events = prepare_tests_results_for_clickhouse(
pr_info,