ClickHouse/tests/ci/ast_fuzzer_check.py

Ignoring revisions in .git-blame-ignore-revs. Click here to bypass and see the normal blame view.

223 lines
6.4 KiB
Python
Raw Normal View History

2021-11-02 13:38:55 +00:00
#!/usr/bin/env python3
import logging
import subprocess
import os
import sys
2023-08-16 20:53:51 +00:00
from pathlib import Path
2021-11-02 13:38:55 +00:00
from github import Github
from build_download_helper import get_build_name_for_check, read_build_urls
2023-08-12 20:41:56 +00:00
from clickhouse_helper import (
2023-08-16 20:53:51 +00:00
CiLogsCredentials,
2023-08-12 20:41:56 +00:00
ClickHouseHelper,
prepare_tests_results_for_clickhouse,
)
from commit_status_helper import (
RerunHelper,
format_description,
get_commit,
post_commit_status,
)
2023-08-16 20:53:51 +00:00
from docker_pull_helper import DockerImage, get_image_with_version
from env_helper import (
REPORTS_PATH,
TEMP_PATH,
)
2021-11-02 13:38:55 +00:00
from get_robot_token import get_best_robot_token
2021-11-26 14:00:09 +00:00
from pr_info import PRInfo
from report import TestResult
from s3_helper import S3Helper
from stopwatch import Stopwatch
2023-08-16 20:53:51 +00:00
from tee_popen import TeePopen
from upload_result_helper import upload_results
2021-11-02 13:38:55 +00:00
IMAGE_NAME = "clickhouse/fuzzer"
2021-11-02 13:38:55 +00:00
2023-08-12 20:41:56 +00:00
def get_run_command(
2023-08-16 20:53:51 +00:00
pr_info: PRInfo,
build_url: str,
workspace_path: str,
ci_logs_args: str,
image: DockerImage,
) -> str:
2023-08-12 20:41:56 +00:00
envs = [
2023-08-16 20:53:51 +00:00
f"-e PR_TO_TEST={pr_info.number}",
f"-e SHA_TO_TEST={pr_info.sha}",
f"-e BINARY_URL_TO_DOWNLOAD='{build_url}'",
2023-08-12 20:41:56 +00:00
]
env_str = " ".join(envs)
return (
f"docker run "
# For sysctl
"--privileged "
"--network=host "
2023-08-16 20:53:51 +00:00
f"{ci_logs_args}"
f"--volume={workspace_path}:/workspace "
2023-08-12 20:41:56 +00:00
f"{env_str} "
"--cap-add syslog --cap-add sys_admin --cap-add=SYS_PTRACE "
f"{image}"
)
2021-11-02 13:38:55 +00:00
2023-04-06 10:46:43 +00:00
def main():
2021-11-02 13:38:55 +00:00
logging.basicConfig(level=logging.INFO)
2021-11-19 14:47:04 +00:00
stopwatch = Stopwatch()
2021-11-26 14:00:09 +00:00
temp_path = TEMP_PATH
reports_path = REPORTS_PATH
2021-11-02 13:38:55 +00:00
check_name = sys.argv[1]
if not os.path.exists(temp_path):
os.makedirs(temp_path)
2021-11-26 14:00:09 +00:00
pr_info = PRInfo()
2021-11-02 13:38:55 +00:00
gh = Github(get_best_robot_token(), per_page=100)
commit = get_commit(gh, pr_info.sha)
2021-11-02 13:38:55 +00:00
rerun_helper = RerunHelper(commit, check_name)
2021-12-01 14:23:51 +00:00
if rerun_helper.is_already_finished_by_status():
logging.info("Check is already finished according to github status, exiting")
sys.exit(0)
2022-10-20 12:33:56 +00:00
docker_image = get_image_with_version(reports_path, IMAGE_NAME)
2021-11-02 13:38:55 +00:00
build_name = get_build_name_for_check(check_name)
print(build_name)
urls = read_build_urls(build_name, reports_path)
2021-11-02 13:38:55 +00:00
if not urls:
raise Exception("No build URLs found")
for url in urls:
if url.endswith("/clickhouse"):
2021-11-02 13:38:55 +00:00
build_url = url
break
else:
2023-07-19 01:42:32 +00:00
raise Exception("Cannot find the clickhouse binary among build results")
2021-11-02 13:38:55 +00:00
logging.info("Got build url %s", build_url)
workspace_path = os.path.join(temp_path, "workspace")
2021-11-02 13:38:55 +00:00
if not os.path.exists(workspace_path):
os.makedirs(workspace_path)
2023-08-16 20:53:51 +00:00
ci_logs_credentials = CiLogsCredentials(Path(temp_path) / "export-logs-config.sh")
ci_logs_args = ci_logs_credentials.get_docker_arguments(
pr_info, stopwatch.start_time_str, check_name
)
2021-11-02 13:38:55 +00:00
run_command = get_run_command(
2023-08-16 20:53:51 +00:00
pr_info,
2023-08-12 20:41:56 +00:00
build_url,
workspace_path,
2023-08-16 20:53:51 +00:00
ci_logs_args,
2023-08-12 20:53:33 +00:00
docker_image,
)
2021-11-02 13:38:55 +00:00
logging.info("Going to run %s", run_command)
2022-12-26 15:29:32 +00:00
run_log_path = os.path.join(temp_path, "run.log")
2023-08-13 04:32:48 +00:00
main_log_path = os.path.join(workspace_path, "main.log")
2023-08-13 04:35:32 +00:00
2023-08-16 20:53:51 +00:00
with TeePopen(run_command, run_log_path) as process:
retcode = process.wait()
if retcode == 0:
logging.info("Run successfully")
else:
logging.info("Run failed")
2021-11-02 13:38:55 +00:00
subprocess.check_call(f"sudo chown -R ubuntu:ubuntu {temp_path}", shell=True)
2023-08-16 20:53:51 +00:00
ci_logs_credentials.clean_ci_logs_from_credentials(Path(run_log_path))
2023-08-12 22:19:58 +00:00
check_name_lower = (
check_name.lower().replace("(", "").replace(")", "").replace(" ", "")
)
s3_prefix = f"{pr_info.number}/{pr_info.sha}/fuzzer_{check_name_lower}/"
2021-11-02 13:38:55 +00:00
paths = {
2022-12-26 15:29:32 +00:00
"run.log": run_log_path,
2023-08-13 04:32:48 +00:00
"main.log": main_log_path,
"fuzzer.log": os.path.join(workspace_path, "fuzzer.log"),
"report.html": os.path.join(workspace_path, "report.html"),
2023-01-01 20:17:43 +00:00
"core.zst": os.path.join(workspace_path, "core.zst"),
2023-01-07 00:18:17 +00:00
"dmesg.log": os.path.join(workspace_path, "dmesg.log"),
2021-11-02 13:38:55 +00:00
}
2023-08-13 00:38:48 +00:00
compressed_server_log_path = os.path.join(workspace_path, "server.log.zst")
if os.path.exists(compressed_server_log_path):
paths["server.log.zst"] = compressed_server_log_path
# The script can fail before the invocation of `zstd`, but we are still interested in its log:
not_compressed_server_log_path = os.path.join(workspace_path, "server.log")
if os.path.exists(not_compressed_server_log_path):
paths["server.log"] = not_compressed_server_log_path
2022-08-11 13:01:32 +00:00
s3_helper = S3Helper()
2021-11-02 13:38:55 +00:00
for f in paths:
try:
2022-12-29 17:55:28 +00:00
paths[f] = s3_helper.upload_test_report_to_s3(paths[f], s3_prefix + f)
2021-11-02 19:29:58 +00:00
except Exception as ex:
logging.info("Exception uploading file %s text %s", f, ex)
paths[f] = ""
2021-11-02 13:38:55 +00:00
# Try to get status message saved by the fuzzer
try:
with open(
os.path.join(workspace_path, "status.txt"), "r", encoding="utf-8"
) as status_f:
status = status_f.readline().rstrip("\n")
with open(
os.path.join(workspace_path, "description.txt"), "r", encoding="utf-8"
) as desc_f:
description = desc_f.readline().rstrip("\n")
2021-11-02 13:38:55 +00:00
except:
status = "failure"
description = "Task failed: $?=" + str(retcode)
2021-11-02 13:38:55 +00:00
description = format_description(description)
test_result = TestResult(description, "OK")
if "fail" in status:
test_result.status = "FAIL"
2021-11-19 14:47:04 +00:00
if paths["report.html"]:
report_url = paths["report.html"]
else:
report_url = upload_results(
s3_helper,
pr_info.number,
pr_info.sha,
[test_result],
[],
check_name,
[url for url in paths.values() if url],
)
2021-11-19 14:47:04 +00:00
ch_helper = ClickHouseHelper()
prepared_events = prepare_tests_results_for_clickhouse(
pr_info,
[test_result],
status,
stopwatch.duration_seconds,
stopwatch.start_time_str,
report_url,
check_name,
)
2021-11-19 14:47:04 +00:00
ch_helper.insert_events_into(db="default", table="checks", events=prepared_events)
2021-11-02 13:38:55 +00:00
logging.info("Result: '%s', '%s', '%s'", status, description, report_url)
print(f"::notice ::Report url: {report_url}")
post_commit_status(commit, status, report_url, description, check_name, pr_info)
2023-04-06 10:46:43 +00:00
if __name__ == "__main__":
main()