2021-11-19 14:47:04 +00:00
|
|
|
#!/usr/bin/env python3
|
2023-01-03 14:23:19 +00:00
|
|
|
from typing import List
|
2021-11-19 14:47:04 +00:00
|
|
|
import json
|
2023-01-03 14:23:19 +00:00
|
|
|
import logging
|
|
|
|
import time
|
2021-11-19 14:47:04 +00:00
|
|
|
|
2021-12-22 10:38:27 +00:00
|
|
|
import requests # type: ignore
|
2023-01-03 14:23:19 +00:00
|
|
|
|
2021-11-19 14:47:04 +00:00
|
|
|
from get_robot_token import get_parameter_from_ssm
|
2023-01-03 14:23:19 +00:00
|
|
|
from pr_info import PRInfo
|
|
|
|
from report import TestResults
|
2021-11-19 14:47:04 +00:00
|
|
|
|
2022-03-22 16:39:58 +00:00
|
|
|
|
2022-05-19 15:53:00 +00:00
|
|
|
class InsertException(Exception):
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
2021-11-19 14:47:04 +00:00
|
|
|
class ClickHouseHelper:
|
2022-03-29 18:46:48 +00:00
|
|
|
def __init__(self, url=None):
|
2021-11-19 14:47:04 +00:00
|
|
|
if url is None:
|
2022-03-29 18:46:48 +00:00
|
|
|
url = get_parameter_from_ssm("clickhouse-test-stat-url")
|
2022-03-29 18:32:28 +00:00
|
|
|
|
|
|
|
self.url = url
|
|
|
|
self.auth = {
|
2022-03-29 18:58:16 +00:00
|
|
|
"X-ClickHouse-User": get_parameter_from_ssm("clickhouse-test-stat-login"),
|
2022-03-30 10:55:49 +00:00
|
|
|
"X-ClickHouse-Key": get_parameter_from_ssm("clickhouse-test-stat-password"),
|
2022-03-29 18:32:28 +00:00
|
|
|
}
|
2021-11-19 14:47:04 +00:00
|
|
|
|
2021-11-25 08:52:00 +00:00
|
|
|
@staticmethod
|
|
|
|
def _insert_json_str_info_impl(url, auth, db, table, json_str):
|
2021-11-19 14:47:04 +00:00
|
|
|
params = {
|
2022-03-22 16:39:58 +00:00
|
|
|
"database": db,
|
2022-05-19 15:54:56 +00:00
|
|
|
"query": f"INSERT INTO {table} FORMAT JSONEachRow",
|
2022-03-22 16:39:58 +00:00
|
|
|
"date_time_input_format": "best_effort",
|
|
|
|
"send_logs_level": "warning",
|
2021-11-19 14:47:04 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
for i in range(5):
|
2022-08-01 12:59:13 +00:00
|
|
|
try:
|
|
|
|
response = requests.post(
|
|
|
|
url, params=params, data=json_str, headers=auth
|
|
|
|
)
|
|
|
|
except Exception as e:
|
2022-10-26 09:09:01 +00:00
|
|
|
error = f"Received exception while sending data to {url} on {i} attempt: {e}"
|
|
|
|
logging.warning(error)
|
2022-08-01 12:59:13 +00:00
|
|
|
continue
|
2021-11-19 14:47:04 +00:00
|
|
|
|
|
|
|
logging.info("Response content '%s'", response.content)
|
|
|
|
|
|
|
|
if response.ok:
|
|
|
|
break
|
|
|
|
|
|
|
|
error = (
|
2022-03-22 16:39:58 +00:00
|
|
|
"Cannot insert data into clickhouse at try "
|
|
|
|
+ str(i)
|
|
|
|
+ ": HTTP code "
|
|
|
|
+ str(response.status_code)
|
|
|
|
+ ": '"
|
|
|
|
+ str(response.text)
|
|
|
|
+ "'"
|
|
|
|
)
|
2021-11-19 14:47:04 +00:00
|
|
|
|
|
|
|
if response.status_code >= 500:
|
|
|
|
# A retriable error
|
|
|
|
time.sleep(1)
|
|
|
|
continue
|
|
|
|
|
2022-03-22 16:39:58 +00:00
|
|
|
logging.info(
|
|
|
|
"Request headers '%s', body '%s'",
|
|
|
|
response.request.headers,
|
|
|
|
response.request.body,
|
|
|
|
)
|
2021-11-19 14:47:04 +00:00
|
|
|
|
2022-05-19 15:53:00 +00:00
|
|
|
raise InsertException(error)
|
2021-11-19 14:47:04 +00:00
|
|
|
else:
|
2022-05-19 15:53:00 +00:00
|
|
|
raise InsertException(error)
|
2021-11-19 14:47:04 +00:00
|
|
|
|
2021-11-24 18:04:11 +00:00
|
|
|
def _insert_json_str_info(self, db, table, json_str):
|
|
|
|
self._insert_json_str_info_impl(self.url, self.auth, db, table, json_str)
|
|
|
|
|
2022-05-19 15:53:00 +00:00
|
|
|
def insert_event_into(self, db, table, event, safe=True):
|
2021-11-19 14:47:04 +00:00
|
|
|
event_str = json.dumps(event)
|
2022-05-19 15:53:00 +00:00
|
|
|
try:
|
|
|
|
self._insert_json_str_info(db, table, event_str)
|
|
|
|
except InsertException as e:
|
|
|
|
logging.error(
|
|
|
|
"Exception happened during inserting data into clickhouse: %s", e
|
|
|
|
)
|
|
|
|
if not safe:
|
|
|
|
raise
|
2021-11-19 14:47:04 +00:00
|
|
|
|
2022-05-19 15:53:00 +00:00
|
|
|
def insert_events_into(self, db, table, events, safe=True):
|
2021-11-19 14:47:04 +00:00
|
|
|
jsons = []
|
|
|
|
for event in events:
|
|
|
|
jsons.append(json.dumps(event))
|
|
|
|
|
2022-05-19 15:53:00 +00:00
|
|
|
try:
|
|
|
|
self._insert_json_str_info(db, table, ",".join(jsons))
|
|
|
|
except InsertException as e:
|
|
|
|
logging.error(
|
|
|
|
"Exception happened during inserting data into clickhouse: %s", e
|
|
|
|
)
|
|
|
|
if not safe:
|
|
|
|
raise
|
2021-11-19 14:47:04 +00:00
|
|
|
|
|
|
|
def _select_and_get_json_each_row(self, db, query):
|
|
|
|
params = {
|
2022-03-22 16:39:58 +00:00
|
|
|
"database": db,
|
|
|
|
"query": query,
|
|
|
|
"default_format": "JSONEachRow",
|
2021-11-19 14:47:04 +00:00
|
|
|
}
|
|
|
|
for i in range(5):
|
|
|
|
response = None
|
|
|
|
try:
|
2022-05-19 16:23:20 +00:00
|
|
|
response = requests.get(self.url, params=params, headers=self.auth)
|
2021-11-19 14:47:04 +00:00
|
|
|
response.raise_for_status()
|
|
|
|
return response.text
|
|
|
|
except Exception as ex:
|
|
|
|
logging.warning("Cannot insert with exception %s", str(ex))
|
|
|
|
if response:
|
|
|
|
logging.warning("Reponse text %s", response.text)
|
|
|
|
time.sleep(0.1 * i)
|
|
|
|
|
2022-05-19 15:53:00 +00:00
|
|
|
raise Exception("Cannot fetch data from clickhouse")
|
2021-11-19 14:47:04 +00:00
|
|
|
|
|
|
|
def select_json_each_row(self, db, query):
|
|
|
|
text = self._select_and_get_json_each_row(db, query)
|
|
|
|
result = []
|
2022-03-22 16:39:58 +00:00
|
|
|
for line in text.split("\n"):
|
2021-11-19 14:47:04 +00:00
|
|
|
if line:
|
|
|
|
result.append(json.loads(line))
|
|
|
|
return result
|
|
|
|
|
2022-03-22 16:39:58 +00:00
|
|
|
|
2021-11-19 14:47:04 +00:00
|
|
|
def prepare_tests_results_for_clickhouse(
|
2023-01-03 14:23:19 +00:00
|
|
|
pr_info: PRInfo,
|
|
|
|
test_results: TestResults,
|
|
|
|
check_status: str,
|
|
|
|
check_duration: float,
|
|
|
|
check_start_time: str,
|
|
|
|
report_url: str,
|
|
|
|
check_name: str,
|
|
|
|
) -> List[dict]:
|
2021-11-19 14:47:04 +00:00
|
|
|
|
|
|
|
pull_request_url = "https://github.com/ClickHouse/ClickHouse/commits/master"
|
|
|
|
base_ref = "master"
|
|
|
|
head_ref = "master"
|
|
|
|
base_repo = pr_info.repo_full_name
|
|
|
|
head_repo = pr_info.repo_full_name
|
|
|
|
if pr_info.number != 0:
|
|
|
|
pull_request_url = pr_info.pr_html_url
|
|
|
|
base_ref = pr_info.base_ref
|
|
|
|
base_repo = pr_info.base_name
|
|
|
|
head_ref = pr_info.head_ref
|
|
|
|
head_repo = pr_info.head_name
|
|
|
|
|
|
|
|
common_properties = dict(
|
|
|
|
pull_request_number=pr_info.number,
|
|
|
|
commit_sha=pr_info.sha,
|
|
|
|
commit_url=pr_info.commit_html_url,
|
|
|
|
check_name=check_name,
|
|
|
|
check_status=check_status,
|
|
|
|
check_duration_ms=int(float(check_duration) * 1000),
|
|
|
|
check_start_time=check_start_time,
|
|
|
|
report_url=report_url,
|
|
|
|
pull_request_url=pull_request_url,
|
|
|
|
base_ref=base_ref,
|
|
|
|
base_repo=base_repo,
|
|
|
|
head_ref=head_ref,
|
|
|
|
head_repo=head_repo,
|
|
|
|
task_url=pr_info.task_url,
|
|
|
|
)
|
|
|
|
|
|
|
|
# Always publish a total record for all checks. For checks with individual
|
|
|
|
# tests, also publish a record per test.
|
|
|
|
result = [common_properties]
|
|
|
|
for test_result in test_results:
|
|
|
|
current_row = common_properties.copy()
|
2023-01-03 14:23:19 +00:00
|
|
|
test_name = test_result.name
|
|
|
|
test_status = test_result.status
|
2021-11-19 14:47:04 +00:00
|
|
|
|
2023-01-03 14:23:19 +00:00
|
|
|
test_time = test_result.time or 0
|
|
|
|
current_row["test_duration_ms"] = int(test_time * 1000)
|
2022-03-22 16:39:58 +00:00
|
|
|
current_row["test_name"] = test_name
|
|
|
|
current_row["test_status"] = test_status
|
2023-02-01 00:00:00 +00:00
|
|
|
current_row["test_context_raw"] = test_result.raw_logs or ""
|
2021-11-19 14:47:04 +00:00
|
|
|
result.append(current_row)
|
|
|
|
|
|
|
|
return result
|
|
|
|
|
2022-03-22 16:39:58 +00:00
|
|
|
|
2023-01-03 14:23:19 +00:00
|
|
|
def mark_flaky_tests(
|
|
|
|
clickhouse_helper: ClickHouseHelper, check_name: str, test_results: TestResults
|
|
|
|
) -> None:
|
2021-11-19 14:47:04 +00:00
|
|
|
try:
|
2022-05-19 15:54:56 +00:00
|
|
|
query = f"""SELECT DISTINCT test_name
|
|
|
|
FROM checks
|
|
|
|
WHERE
|
|
|
|
check_start_time BETWEEN now() - INTERVAL 3 DAY AND now()
|
|
|
|
AND check_name = '{check_name}'
|
|
|
|
AND (test_status = 'FAIL' OR test_status = 'FLAKY')
|
|
|
|
AND pull_request_number = 0
|
|
|
|
"""
|
2021-11-19 14:47:04 +00:00
|
|
|
|
2022-03-29 19:06:50 +00:00
|
|
|
tests_data = clickhouse_helper.select_json_each_row("default", query)
|
2022-03-22 16:39:58 +00:00
|
|
|
master_failed_tests = {row["test_name"] for row in tests_data}
|
|
|
|
logging.info("Found flaky tests: %s", ", ".join(master_failed_tests))
|
2021-11-19 14:47:04 +00:00
|
|
|
|
|
|
|
for test_result in test_results:
|
2023-01-03 14:23:19 +00:00
|
|
|
if test_result.status == "FAIL" and test_result.name in master_failed_tests:
|
|
|
|
test_result.status = "FLAKY"
|
2021-11-19 14:47:04 +00:00
|
|
|
except Exception as ex:
|
2022-05-19 15:53:00 +00:00
|
|
|
logging.error("Exception happened during flaky tests fetch %s", ex)
|