2024-09-18 20:04:50 +00:00
|
|
|
import logging
|
|
|
|
import os
|
2023-01-04 15:52:32 +00:00
|
|
|
from pathlib import Path
|
2024-01-04 15:35:09 +00:00
|
|
|
from typing import Dict, List, Optional, Sequence, Union
|
2021-11-12 11:39:00 +00:00
|
|
|
|
2024-09-18 20:04:50 +00:00
|
|
|
from env_helper import GITHUB_REPOSITORY, GITHUB_RUN_URL, GITHUB_SERVER_URL
|
|
|
|
from report import GITHUB_JOB_URL, TestResults, create_test_html_report
|
2023-01-03 14:23:19 +00:00
|
|
|
from s3_helper import S3Helper
|
2021-11-12 11:39:00 +00:00
|
|
|
|
2021-11-26 14:00:09 +00:00
|
|
|
|
2021-11-12 19:57:26 +00:00
|
|
|
def process_logs(
|
2023-01-03 14:23:19 +00:00
|
|
|
s3_client: S3Helper,
|
2024-01-04 15:35:09 +00:00
|
|
|
additional_logs: Union[Sequence[str], Sequence[Path]],
|
2023-01-03 14:23:19 +00:00
|
|
|
s3_path_prefix: str,
|
|
|
|
test_results: TestResults,
|
|
|
|
) -> List[str]:
|
2022-10-26 12:01:12 +00:00
|
|
|
logging.info("Upload files to s3 %s", additional_logs)
|
2022-10-26 08:30:20 +00:00
|
|
|
|
2024-01-04 15:35:09 +00:00
|
|
|
processed_logs = {} # type: Dict[str, str]
|
2022-12-02 20:33:08 +00:00
|
|
|
# Firstly convert paths of logs from test_results to urls to s3.
|
|
|
|
for test_result in test_results:
|
2023-01-03 14:23:19 +00:00
|
|
|
if test_result.log_files is None:
|
2022-12-02 20:33:08 +00:00
|
|
|
continue
|
|
|
|
|
|
|
|
# Convert from string repr of list to list.
|
2023-01-03 14:23:19 +00:00
|
|
|
test_result.log_urls = []
|
|
|
|
for path in test_result.log_files:
|
2023-01-04 15:52:32 +00:00
|
|
|
if path in processed_logs:
|
2024-01-04 15:35:09 +00:00
|
|
|
test_result.log_urls.append(processed_logs[str(path)])
|
2023-01-03 14:23:19 +00:00
|
|
|
elif path:
|
2022-12-02 20:33:08 +00:00
|
|
|
url = s3_client.upload_test_report_to_s3(
|
2024-01-04 15:35:09 +00:00
|
|
|
Path(path), s3_path_prefix + "/" + str(path)
|
2022-12-02 20:33:08 +00:00
|
|
|
)
|
2023-01-03 14:23:19 +00:00
|
|
|
test_result.log_urls.append(url)
|
2024-01-04 15:35:09 +00:00
|
|
|
processed_logs[str(path)] = url
|
2022-12-02 20:33:08 +00:00
|
|
|
|
|
|
|
additional_urls = []
|
2021-11-12 11:39:00 +00:00
|
|
|
for log_path in additional_logs:
|
2024-01-04 15:35:09 +00:00
|
|
|
if Path(log_path).is_file():
|
2021-11-12 11:39:00 +00:00
|
|
|
additional_urls.append(
|
|
|
|
s3_client.upload_test_report_to_s3(
|
2024-01-04 15:35:09 +00:00
|
|
|
Path(log_path), s3_path_prefix + "/" + os.path.basename(log_path)
|
2022-03-22 16:39:58 +00:00
|
|
|
)
|
|
|
|
)
|
2024-01-04 15:35:09 +00:00
|
|
|
else:
|
|
|
|
logging.error("File %s is missing - skip", log_path)
|
2021-11-12 11:39:00 +00:00
|
|
|
|
|
|
|
return additional_urls
|
|
|
|
|
2021-11-26 14:00:09 +00:00
|
|
|
|
2022-03-11 13:36:29 +00:00
|
|
|
def upload_results(
|
2023-01-03 14:23:19 +00:00
|
|
|
s3_client: S3Helper,
|
|
|
|
pr_number: int,
|
|
|
|
commit_sha: str,
|
2024-11-12 10:07:36 +00:00
|
|
|
branch_name: str,
|
2023-01-03 14:23:19 +00:00
|
|
|
test_results: TestResults,
|
2024-01-04 15:35:09 +00:00
|
|
|
additional_files: Union[Sequence[Path], Sequence[str]],
|
2023-01-03 14:23:19 +00:00
|
|
|
check_name: str,
|
2023-06-22 00:36:30 +00:00
|
|
|
additional_urls: Optional[List[str]] = None,
|
2023-01-03 14:23:19 +00:00
|
|
|
) -> str:
|
2023-04-25 16:16:29 +00:00
|
|
|
normalized_check_name = check_name.lower()
|
|
|
|
for r in ((" ", "_"), ("(", "_"), (")", "_"), (",", "_"), ("/", "_")):
|
|
|
|
normalized_check_name = normalized_check_name.replace(*r)
|
2023-06-22 00:36:30 +00:00
|
|
|
|
|
|
|
# Preserve additional_urls to not modify the original one
|
2024-01-04 15:35:09 +00:00
|
|
|
additional_urls = additional_urls or []
|
2023-04-25 16:16:29 +00:00
|
|
|
s3_path_prefix = f"{pr_number}/{commit_sha}/{normalized_check_name}"
|
2024-01-04 15:35:09 +00:00
|
|
|
additional_urls.extend(
|
|
|
|
process_logs(s3_client, additional_files, s3_path_prefix, test_results)
|
2022-12-02 20:33:08 +00:00
|
|
|
)
|
2021-11-12 11:39:00 +00:00
|
|
|
|
2024-11-12 10:07:36 +00:00
|
|
|
branch_url = f"{GITHUB_SERVER_URL}/{GITHUB_REPOSITORY}/commits/{branch_name}"
|
2021-11-12 11:39:00 +00:00
|
|
|
if pr_number != 0:
|
|
|
|
branch_name = f"PR #{pr_number}"
|
2021-11-26 14:00:09 +00:00
|
|
|
branch_url = f"{GITHUB_SERVER_URL}/{GITHUB_REPOSITORY}/pull/{pr_number}"
|
|
|
|
commit_url = f"{GITHUB_SERVER_URL}/{GITHUB_REPOSITORY}/commit/{commit_sha}"
|
2021-11-12 11:39:00 +00:00
|
|
|
|
2024-01-04 15:35:09 +00:00
|
|
|
ready_report_url = None
|
|
|
|
for url in additional_urls:
|
|
|
|
if "report.html" in url:
|
|
|
|
ready_report_url = url
|
|
|
|
additional_urls.remove(ready_report_url)
|
|
|
|
break
|
|
|
|
|
2021-11-12 11:39:00 +00:00
|
|
|
if additional_urls:
|
2023-01-03 14:23:19 +00:00
|
|
|
raw_log_url = additional_urls.pop(0)
|
2021-11-12 11:39:00 +00:00
|
|
|
else:
|
2022-08-09 16:49:55 +00:00
|
|
|
raw_log_url = GITHUB_JOB_URL()
|
2021-11-12 11:39:00 +00:00
|
|
|
|
2024-03-27 15:24:16 +00:00
|
|
|
try:
|
|
|
|
job_url = GITHUB_JOB_URL()
|
|
|
|
except Exception:
|
2024-03-27 15:34:55 +00:00
|
|
|
print(
|
|
|
|
"ERROR: Failed to get job URL from GH API, job report will use run URL instead."
|
|
|
|
)
|
2024-03-27 15:24:16 +00:00
|
|
|
job_url = GITHUB_RUN_URL
|
|
|
|
|
2024-01-04 15:35:09 +00:00
|
|
|
if test_results or not ready_report_url:
|
|
|
|
html_report = create_test_html_report(
|
|
|
|
check_name,
|
|
|
|
test_results,
|
|
|
|
raw_log_url,
|
|
|
|
GITHUB_RUN_URL,
|
2024-03-27 15:24:16 +00:00
|
|
|
job_url,
|
2024-01-04 15:35:09 +00:00
|
|
|
branch_url,
|
|
|
|
branch_name,
|
|
|
|
commit_url,
|
|
|
|
additional_urls,
|
|
|
|
)
|
|
|
|
report_path = Path("report.html")
|
|
|
|
report_path.write_text(html_report, encoding="utf-8")
|
|
|
|
url = s3_client.upload_test_report_to_s3(report_path, s3_path_prefix + ".html")
|
|
|
|
else:
|
|
|
|
logging.info("report.html was prepared by test job itself")
|
|
|
|
url = ready_report_url
|
2021-11-12 11:39:00 +00:00
|
|
|
|
|
|
|
logging.info("Search result in url %s", url)
|
|
|
|
return url
|