2021-11-16 11:16:10 +00:00
|
|
|
#!/usr/bin/env python3
|
|
|
|
|
|
|
|
|
|
|
|
import os
|
|
|
|
import logging
|
|
|
|
import sys
|
|
|
|
import json
|
|
|
|
import subprocess
|
|
|
|
import traceback
|
|
|
|
import re
|
2022-05-16 18:39:10 +00:00
|
|
|
from typing import Dict
|
2021-11-16 11:16:10 +00:00
|
|
|
|
|
|
|
from github import Github
|
|
|
|
|
2022-05-16 18:39:10 +00:00
|
|
|
from commit_status_helper import get_commit, post_commit_status
|
2022-05-13 11:36:08 +00:00
|
|
|
from ci_config import CI_CONFIG
|
2022-05-16 18:39:10 +00:00
|
|
|
from docker_pull_helper import get_image_with_version
|
2022-08-11 13:01:32 +00:00
|
|
|
from env_helper import GITHUB_EVENT_PATH, GITHUB_RUN_URL, S3_BUILDS_BUCKET, S3_DOWNLOAD
|
2022-06-03 15:26:06 +00:00
|
|
|
from get_robot_token import get_best_robot_token, get_parameter_from_ssm
|
2021-11-16 11:16:10 +00:00
|
|
|
from pr_info import PRInfo
|
2022-05-16 18:39:10 +00:00
|
|
|
from rerun_helper import RerunHelper
|
2021-11-16 11:16:10 +00:00
|
|
|
from s3_helper import S3Helper
|
2021-12-09 12:02:13 +00:00
|
|
|
from tee_popen import TeePopen
|
2021-11-16 11:16:10 +00:00
|
|
|
|
2022-03-22 16:39:58 +00:00
|
|
|
IMAGE_NAME = "clickhouse/performance-comparison"
|
|
|
|
|
|
|
|
|
|
|
|
def get_run_command(
|
|
|
|
workspace,
|
|
|
|
result_path,
|
|
|
|
repo_tests_path,
|
|
|
|
pr_to_test,
|
|
|
|
sha_to_test,
|
|
|
|
additional_env,
|
|
|
|
image,
|
|
|
|
):
|
|
|
|
return (
|
2022-05-16 18:39:10 +00:00
|
|
|
f"docker run --privileged --volume={workspace}:/workspace "
|
|
|
|
f"--volume={result_path}:/output "
|
2022-03-22 16:39:58 +00:00
|
|
|
f"--volume={repo_tests_path}:/usr/share/clickhouse-test "
|
|
|
|
f"--cap-add syslog --cap-add sys_admin --cap-add sys_rawio "
|
|
|
|
f"-e PR_TO_TEST={pr_to_test} -e SHA_TO_TEST={sha_to_test} {additional_env} "
|
2021-11-16 11:16:10 +00:00
|
|
|
f"{image}"
|
2022-03-22 16:39:58 +00:00
|
|
|
)
|
|
|
|
|
2021-11-16 11:16:10 +00:00
|
|
|
|
2021-12-09 09:04:05 +00:00
|
|
|
class RamDrive:
|
|
|
|
def __init__(self, path, size):
|
|
|
|
self.path = path
|
|
|
|
self.size = size
|
|
|
|
|
|
|
|
def __enter__(self):
|
|
|
|
if not os.path.exists(self.path):
|
|
|
|
os.makedirs(self.path)
|
|
|
|
|
2022-03-22 16:39:58 +00:00
|
|
|
subprocess.check_call(
|
|
|
|
f"sudo mount -t tmpfs -o rw,size={self.size} tmpfs {self.path}", shell=True
|
|
|
|
)
|
2021-12-09 09:04:05 +00:00
|
|
|
|
|
|
|
def __exit__(self, exc_type, exc_val, exc_tb):
|
2021-12-09 12:02:13 +00:00
|
|
|
subprocess.check_call(f"sudo umount {self.path}", shell=True)
|
2021-12-09 09:04:05 +00:00
|
|
|
|
2022-03-22 16:39:58 +00:00
|
|
|
|
2021-11-16 11:16:10 +00:00
|
|
|
if __name__ == "__main__":
|
|
|
|
logging.basicConfig(level=logging.INFO)
|
|
|
|
temp_path = os.getenv("TEMP_PATH", os.path.abspath("."))
|
|
|
|
repo_path = os.getenv("REPO_COPY", os.path.abspath("../../"))
|
2022-02-15 12:03:51 +00:00
|
|
|
repo_tests_path = os.path.join(repo_path, "tests")
|
2021-12-09 09:04:05 +00:00
|
|
|
ramdrive_path = os.getenv("RAMDRIVE_PATH", os.path.join(temp_path, "ramdrive"))
|
2021-12-13 08:56:54 +00:00
|
|
|
# currently unused, doesn't make tests more stable
|
2022-03-22 16:39:58 +00:00
|
|
|
ramdrive_size = os.getenv("RAMDRIVE_SIZE", "0G")
|
2021-11-16 11:16:10 +00:00
|
|
|
reports_path = os.getenv("REPORTS_PATH", "./reports")
|
|
|
|
|
|
|
|
check_name = sys.argv[1]
|
2022-05-13 11:36:08 +00:00
|
|
|
required_build = CI_CONFIG["tests_config"][check_name]["required_build"]
|
2021-11-16 11:16:10 +00:00
|
|
|
|
|
|
|
if not os.path.exists(temp_path):
|
|
|
|
os.makedirs(temp_path)
|
|
|
|
|
2022-05-16 18:39:10 +00:00
|
|
|
with open(GITHUB_EVENT_PATH, "r", encoding="utf-8") as event_file:
|
2021-11-16 11:16:10 +00:00
|
|
|
event = json.load(event_file)
|
|
|
|
|
2022-07-30 05:07:22 +00:00
|
|
|
gh = Github(get_best_robot_token(), per_page=100)
|
2021-11-16 11:16:10 +00:00
|
|
|
pr_info = PRInfo(event)
|
|
|
|
commit = get_commit(gh, pr_info.sha)
|
|
|
|
|
2022-03-22 16:39:58 +00:00
|
|
|
docker_env = ""
|
2021-11-16 11:16:10 +00:00
|
|
|
|
2022-08-11 13:01:32 +00:00
|
|
|
docker_env += f" -e S3_URL={S3_DOWNLOAD}/{S3_BUILDS_BUCKET}"
|
2022-05-13 11:36:08 +00:00
|
|
|
docker_env += f" -e BUILD_NAME={required_build}"
|
2021-11-16 11:16:10 +00:00
|
|
|
|
|
|
|
if pr_info.number == 0:
|
|
|
|
pr_link = commit.html_url
|
|
|
|
else:
|
|
|
|
pr_link = f"https://github.com/ClickHouse/ClickHouse/pull/{pr_info.number}"
|
|
|
|
|
2022-03-24 14:37:53 +00:00
|
|
|
docker_env += (
|
|
|
|
f' -e CHPC_ADD_REPORT_LINKS="<a href={GITHUB_RUN_URL}>'
|
|
|
|
f'Job (actions)</a> <a href={pr_link}>Tested commit</a>"'
|
2022-03-22 16:39:58 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
if "RUN_BY_HASH_TOTAL" in os.environ:
|
2022-05-16 18:39:10 +00:00
|
|
|
run_by_hash_total = int(os.getenv("RUN_BY_HASH_TOTAL", "1"))
|
|
|
|
run_by_hash_num = int(os.getenv("RUN_BY_HASH_NUM", "1"))
|
|
|
|
docker_env += (
|
|
|
|
f" -e CHPC_TEST_RUN_BY_HASH_TOTAL={run_by_hash_total}"
|
|
|
|
f" -e CHPC_TEST_RUN_BY_HASH_NUM={run_by_hash_num}"
|
|
|
|
)
|
2022-03-22 16:39:58 +00:00
|
|
|
check_name_with_group = (
|
|
|
|
check_name + f" [{run_by_hash_num + 1}/{run_by_hash_total}]"
|
|
|
|
)
|
2021-12-13 09:04:08 +00:00
|
|
|
else:
|
|
|
|
check_name_with_group = check_name
|
2021-12-13 08:56:54 +00:00
|
|
|
|
2022-11-25 16:55:10 +00:00
|
|
|
is_aarch64 = "aarch64" in os.getenv("CHECK_NAME", "Performance Comparison").lower()
|
|
|
|
if pr_info.number != 0 and is_aarch64 and "pr-performance" not in pr_info.labels:
|
|
|
|
status = "success"
|
|
|
|
message = "Skipped, not labeled with 'pr-performance'"
|
|
|
|
report_url = GITHUB_RUN_URL
|
|
|
|
post_commit_status(
|
|
|
|
gh, pr_info.sha, check_name_with_group, message, status, report_url
|
|
|
|
)
|
|
|
|
sys.exit(0)
|
|
|
|
|
2022-06-02 14:13:52 +00:00
|
|
|
test_grep_exclude_filter = CI_CONFIG["tests_config"][check_name][
|
|
|
|
"test_grep_exclude_filter"
|
|
|
|
]
|
2022-06-02 12:29:24 +00:00
|
|
|
if test_grep_exclude_filter:
|
2022-06-02 14:13:52 +00:00
|
|
|
docker_env += f" -e CHPC_TEST_GREP_EXCLUDE={test_grep_exclude_filter}"
|
|
|
|
logging.info(
|
2022-06-02 14:17:25 +00:00
|
|
|
"Fill fliter our performance tests by grep -v %s", test_grep_exclude_filter
|
2022-06-02 12:29:24 +00:00
|
|
|
)
|
|
|
|
|
2022-01-21 15:40:03 +00:00
|
|
|
rerun_helper = RerunHelper(gh, pr_info, check_name_with_group)
|
|
|
|
if rerun_helper.is_already_finished_by_status():
|
|
|
|
logging.info("Check is already finished according to github status, exiting")
|
|
|
|
sys.exit(0)
|
|
|
|
|
2022-07-05 12:24:58 +00:00
|
|
|
check_name_prefix = (
|
|
|
|
check_name_with_group.lower()
|
|
|
|
.replace(" ", "_")
|
|
|
|
.replace("(", "_")
|
|
|
|
.replace(")", "_")
|
|
|
|
.replace(",", "_")
|
|
|
|
)
|
|
|
|
|
2021-11-16 11:16:10 +00:00
|
|
|
docker_image = get_image_with_version(reports_path, IMAGE_NAME)
|
|
|
|
|
2022-03-22 16:39:58 +00:00
|
|
|
# with RamDrive(ramdrive_path, ramdrive_size):
|
2021-12-10 13:09:00 +00:00
|
|
|
result_path = ramdrive_path
|
|
|
|
if not os.path.exists(result_path):
|
|
|
|
os.makedirs(result_path)
|
|
|
|
|
2022-08-26 09:21:42 +00:00
|
|
|
database_url = get_parameter_from_ssm("clickhouse-test-stat-url")
|
|
|
|
database_username = get_parameter_from_ssm("clickhouse-test-stat-login")
|
|
|
|
database_password = get_parameter_from_ssm("clickhouse-test-stat-password")
|
|
|
|
|
|
|
|
env_extra = {
|
|
|
|
"CLICKHOUSE_PERFORMANCE_COMPARISON_DATABASE_URL": f"{database_url}:9440",
|
|
|
|
"CLICKHOUSE_PERFORMANCE_COMPARISON_DATABASE_USER": database_username,
|
|
|
|
"CLICKHOUSE_PERFORMANCE_COMPARISON_DATABASE_USER_PASSWORD": database_password,
|
|
|
|
"CLICKHOUSE_PERFORMANCE_COMPARISON_CHECK_NAME": check_name_with_group,
|
|
|
|
"CLICKHOUSE_PERFORMANCE_COMPARISON_CHECK_NAME_PREFIX": check_name_prefix,
|
|
|
|
}
|
|
|
|
|
2022-08-26 13:49:40 +00:00
|
|
|
docker_env += "".join([f" -e {name}" for name in env_extra])
|
2022-06-07 10:00:31 +00:00
|
|
|
|
2022-03-22 16:39:58 +00:00
|
|
|
run_command = get_run_command(
|
|
|
|
result_path,
|
|
|
|
result_path,
|
|
|
|
repo_tests_path,
|
|
|
|
pr_info.number,
|
|
|
|
pr_info.sha,
|
|
|
|
docker_env,
|
|
|
|
docker_image,
|
|
|
|
)
|
2021-12-10 13:09:00 +00:00
|
|
|
logging.info("Going to run command %s", run_command)
|
2022-06-03 15:26:06 +00:00
|
|
|
|
2022-12-26 15:29:32 +00:00
|
|
|
run_log_path = os.path.join(temp_path, "run.log")
|
2022-08-26 09:21:42 +00:00
|
|
|
|
|
|
|
popen_env = os.environ.copy()
|
|
|
|
popen_env.update(env_extra)
|
2022-06-03 15:26:06 +00:00
|
|
|
with TeePopen(run_command, run_log_path, env=popen_env) as process:
|
2021-12-10 13:09:00 +00:00
|
|
|
retcode = process.wait()
|
|
|
|
if retcode == 0:
|
|
|
|
logging.info("Run successfully")
|
|
|
|
else:
|
|
|
|
logging.info("Run failed")
|
|
|
|
|
|
|
|
subprocess.check_call(f"sudo chown -R ubuntu:ubuntu {temp_path}", shell=True)
|
|
|
|
|
|
|
|
paths = {
|
2022-03-22 16:39:58 +00:00
|
|
|
"compare.log": os.path.join(result_path, "compare.log"),
|
|
|
|
"output.7z": os.path.join(result_path, "output.7z"),
|
|
|
|
"report.html": os.path.join(result_path, "report.html"),
|
|
|
|
"all-queries.html": os.path.join(result_path, "all-queries.html"),
|
|
|
|
"queries.rep": os.path.join(result_path, "queries.rep"),
|
|
|
|
"all-query-metrics.tsv": os.path.join(
|
|
|
|
result_path, "report/all-query-metrics.tsv"
|
|
|
|
),
|
2022-12-26 15:29:32 +00:00
|
|
|
"run.log": run_log_path,
|
2021-12-10 13:09:00 +00:00
|
|
|
}
|
|
|
|
|
2022-03-22 16:39:58 +00:00
|
|
|
s3_prefix = f"{pr_info.number}/{pr_info.sha}/{check_name_prefix}/"
|
2022-08-11 13:01:32 +00:00
|
|
|
s3_helper = S3Helper()
|
2022-05-16 18:39:10 +00:00
|
|
|
uploaded = {} # type: Dict[str, str]
|
|
|
|
for name, path in paths.items():
|
2021-11-16 11:16:10 +00:00
|
|
|
try:
|
2022-05-16 18:39:10 +00:00
|
|
|
uploaded[name] = s3_helper.upload_test_report_to_s3(path, s3_prefix + name)
|
2021-11-16 11:16:10 +00:00
|
|
|
except Exception:
|
2022-05-16 18:39:10 +00:00
|
|
|
uploaded[name] = ""
|
2021-11-16 11:16:10 +00:00
|
|
|
traceback.print_exc()
|
|
|
|
|
2021-12-10 13:09:00 +00:00
|
|
|
# Upload all images and flamegraphs to S3
|
|
|
|
try:
|
|
|
|
s3_helper.upload_test_folder_to_s3(
|
2022-03-22 16:39:58 +00:00
|
|
|
os.path.join(result_path, "images"), s3_prefix + "images"
|
2021-12-10 13:09:00 +00:00
|
|
|
)
|
|
|
|
except Exception:
|
|
|
|
traceback.print_exc()
|
|
|
|
|
|
|
|
# Try to fetch status from the report.
|
2022-03-22 16:39:58 +00:00
|
|
|
status = ""
|
|
|
|
message = ""
|
2021-12-10 13:09:00 +00:00
|
|
|
try:
|
2022-05-16 18:39:10 +00:00
|
|
|
with open(
|
|
|
|
os.path.join(result_path, "report.html"), "r", encoding="utf-8"
|
|
|
|
) as report_fd:
|
|
|
|
report_text = report_fd.read()
|
|
|
|
status_match = re.search("<!--[ ]*status:(.*)-->", report_text)
|
|
|
|
message_match = re.search("<!--[ ]*message:(.*)-->", report_text)
|
2021-12-10 13:09:00 +00:00
|
|
|
if status_match:
|
|
|
|
status = status_match.group(1).strip()
|
|
|
|
if message_match:
|
|
|
|
message = message_match.group(1).strip()
|
2021-12-14 10:31:31 +00:00
|
|
|
|
2022-02-21 14:49:52 +00:00
|
|
|
# TODO: Remove me, always green mode for the first time, unless errors
|
2022-03-22 16:39:58 +00:00
|
|
|
status = "success"
|
2023-01-03 14:26:02 +00:00
|
|
|
if "errors" in message.lower():
|
2022-02-21 14:49:52 +00:00
|
|
|
status = "failure"
|
|
|
|
# TODO: Remove until here
|
2021-12-10 13:09:00 +00:00
|
|
|
except Exception:
|
|
|
|
traceback.print_exc()
|
2022-03-22 16:39:58 +00:00
|
|
|
status = "failure"
|
|
|
|
message = "Failed to parse the report."
|
2021-12-10 13:09:00 +00:00
|
|
|
|
|
|
|
if not status:
|
2022-03-22 16:39:58 +00:00
|
|
|
status = "failure"
|
|
|
|
message = "No status in report."
|
2021-12-10 13:09:00 +00:00
|
|
|
elif not message:
|
2022-03-22 16:39:58 +00:00
|
|
|
status = "failure"
|
|
|
|
message = "No message in report."
|
2021-12-10 13:09:00 +00:00
|
|
|
|
2022-03-24 14:37:53 +00:00
|
|
|
report_url = GITHUB_RUN_URL
|
2021-12-10 13:09:00 +00:00
|
|
|
|
2022-12-26 15:29:32 +00:00
|
|
|
if uploaded["run.log"]:
|
|
|
|
report_url = uploaded["run.log"]
|
2021-12-10 13:09:00 +00:00
|
|
|
|
2022-05-16 18:39:10 +00:00
|
|
|
if uploaded["compare.log"]:
|
|
|
|
report_url = uploaded["compare.log"]
|
2021-12-10 13:09:00 +00:00
|
|
|
|
2022-05-16 18:39:10 +00:00
|
|
|
if uploaded["output.7z"]:
|
|
|
|
report_url = uploaded["output.7z"]
|
2021-12-10 13:09:00 +00:00
|
|
|
|
2022-05-16 18:39:10 +00:00
|
|
|
if uploaded["report.html"]:
|
|
|
|
report_url = uploaded["report.html"]
|
2021-12-10 13:09:00 +00:00
|
|
|
|
2022-03-22 16:39:58 +00:00
|
|
|
post_commit_status(
|
|
|
|
gh, pr_info.sha, check_name_with_group, message, status, report_url
|
|
|
|
)
|
2022-03-29 12:41:47 +00:00
|
|
|
|
|
|
|
if status == "error":
|
|
|
|
sys.exit(1)
|