ClickHouse/tests/ci/jepsen_check.py

Ignoring revisions in .git-blame-ignore-revs. Click here to bypass and see the normal blame view.

315 lines
10 KiB
Python
Raw Normal View History

2021-12-21 12:07:10 +00:00
#!/usr/bin/env python3
2023-08-10 20:41:41 +00:00
import argparse
2021-12-21 12:07:10 +00:00
import logging
import os
2021-12-24 10:31:34 +00:00
import sys
2023-08-10 20:41:41 +00:00
import time
2021-12-21 12:07:10 +00:00
2023-08-10 20:41:41 +00:00
from pathlib import Path
2023-09-22 11:16:46 +00:00
from typing import Any, List
2022-11-04 13:40:13 +00:00
2022-11-15 13:52:40 +00:00
import boto3 # type: ignore
import requests # type: ignore
2021-12-21 12:07:10 +00:00
from github import Github
from build_download_helper import get_build_name_for_check
from clickhouse_helper import ClickHouseHelper, prepare_tests_results_for_clickhouse
from commit_status_helper import RerunHelper, get_commit, post_commit_status
from compress_files import compress_fast
2022-08-11 13:01:32 +00:00
from env_helper import REPO_COPY, TEMP_PATH, S3_BUILDS_BUCKET, S3_DOWNLOAD
2021-12-21 13:16:01 +00:00
from get_robot_token import get_best_robot_token, get_parameter_from_ssm
2021-12-21 12:07:10 +00:00
from pr_info import PRInfo
from report import TestResults, TestResult
from s3_helper import S3Helper
from ssh import SSHKey
from stopwatch import Stopwatch
from tee_popen import TeePopen
from upload_result_helper import upload_results
from version_helper import get_version_from_repo
2023-06-06 07:17:30 +00:00
from build_check import get_release_or_pr
2021-12-21 12:07:10 +00:00
JEPSEN_GROUP_NAME = "jepsen_group"
2022-11-04 14:12:30 +00:00
KEEPER_DESIRED_INSTANCE_COUNT = 3
SERVER_DESIRED_INSTANCE_COUNT = 4
2022-11-04 13:40:13 +00:00
KEEPER_IMAGE_NAME = "clickhouse/keeper-jepsen-test"
KEEPER_CHECK_NAME = "ClickHouse Keeper Jepsen"
SERVER_IMAGE_NAME = "clickhouse/server-jepsen-test"
SERVER_CHECK_NAME = "ClickHouse Server Jepsen"
2021-12-21 12:07:10 +00:00
SUCCESSFUL_TESTS_ANCHOR = "# Successful tests"
INTERMINATE_TESTS_ANCHOR = "# Indeterminate tests"
CRASHED_TESTS_ANCHOR = "# Crashed tests"
FAILED_TESTS_ANCHOR = "# Failed tests"
2023-09-22 11:16:46 +00:00
def _parse_jepsen_output(path: Path) -> TestResults:
test_results = [] # type: TestResults
current_type = ""
with open(path, "r") as f:
2021-12-21 12:07:10 +00:00
for line in f:
if SUCCESSFUL_TESTS_ANCHOR in line:
current_type = "OK"
2021-12-21 12:07:10 +00:00
elif INTERMINATE_TESTS_ANCHOR in line or CRASHED_TESTS_ANCHOR in line:
current_type = "ERROR"
2021-12-21 12:07:10 +00:00
elif FAILED_TESTS_ANCHOR in line:
current_type = "FAIL"
2021-12-21 12:07:10 +00:00
if (
2022-11-07 13:49:21 +00:00
line.startswith("store/clickhouse") or line.startswith("clickhouse")
) and current_type:
test_results.append(TestResult(line.strip(), current_type))
2021-12-21 12:07:10 +00:00
return test_results
2021-12-21 12:07:10 +00:00
def get_autoscaling_group_instances_ids(asg_client, group_name):
group_description = asg_client.describe_auto_scaling_groups(
AutoScalingGroupNames=[group_name]
)
our_group = group_description["AutoScalingGroups"][0]
2021-12-21 12:07:10 +00:00
instance_ids = []
for instance in our_group["Instances"]:
if (
instance["LifecycleState"] == "InService"
and instance["HealthStatus"] == "Healthy"
):
instance_ids.append(instance["InstanceId"])
2021-12-21 12:07:10 +00:00
return instance_ids
2021-12-21 12:07:10 +00:00
def get_instances_addresses(ec2_client, instance_ids):
ec2_response = ec2_client.describe_instances(InstanceIds=instance_ids)
2021-12-21 12:07:10 +00:00
instance_ips = []
for instances in ec2_response["Reservations"]:
for ip in instances["Instances"]:
instance_ips.append(ip["PrivateIpAddress"])
2021-12-21 12:07:10 +00:00
return instance_ips
2022-11-04 14:12:30 +00:00
def prepare_autoscaling_group_and_get_hostnames(count):
asg_client = boto3.client("autoscaling", region_name="us-east-1")
asg_client.set_desired_capacity(
2022-11-04 14:12:30 +00:00
AutoScalingGroupName=JEPSEN_GROUP_NAME, DesiredCapacity=count
)
2021-12-21 12:07:10 +00:00
instances = get_autoscaling_group_instances_ids(asg_client, JEPSEN_GROUP_NAME)
counter = 0
2022-11-04 14:12:30 +00:00
while len(instances) < count:
2021-12-21 12:07:10 +00:00
time.sleep(5)
instances = get_autoscaling_group_instances_ids(asg_client, JEPSEN_GROUP_NAME)
counter += 1
if counter > 30:
raise Exception("Cannot wait autoscaling group")
ec2_client = boto3.client("ec2", region_name="us-east-1")
2021-12-21 12:07:10 +00:00
return get_instances_addresses(ec2_client, instances)
def clear_autoscaling_group():
asg_client = boto3.client("autoscaling", region_name="us-east-1")
asg_client.set_desired_capacity(
AutoScalingGroupName=JEPSEN_GROUP_NAME, DesiredCapacity=0
)
2021-12-21 12:07:10 +00:00
instances = get_autoscaling_group_instances_ids(asg_client, JEPSEN_GROUP_NAME)
counter = 0
while len(instances) > 0:
time.sleep(5)
instances = get_autoscaling_group_instances_ids(asg_client, JEPSEN_GROUP_NAME)
counter += 1
if counter > 30:
raise Exception("Cannot wait autoscaling group")
2023-09-22 11:16:46 +00:00
def save_nodes_to_file(instances: List[Any], temp_path: Path) -> Path:
nodes_path = temp_path / "nodes.txt"
with open(nodes_path, "w") as f:
2021-12-21 12:07:10 +00:00
f.write("\n".join(instances))
f.flush()
return nodes_path
def get_run_command(
ssh_auth_sock,
ssh_sock_dir,
pr_info,
nodes_path,
repo_path,
build_url,
result_path,
2022-11-04 13:40:13 +00:00
extra_args,
docker_image,
):
return (
f"docker run --network=host -v '{ssh_sock_dir}:{ssh_sock_dir}' -e SSH_AUTH_SOCK={ssh_auth_sock} "
f"-e PR_TO_TEST={pr_info.number} -e SHA_TO_TEST={pr_info.sha} -v '{nodes_path}:/nodes.txt' -v {result_path}:/test_output "
2022-11-04 13:40:13 +00:00
f"-e 'CLICKHOUSE_PACKAGE={build_url}' -v '{repo_path}:/ch' -e 'CLICKHOUSE_REPO_PATH=/ch' -e NODES_USERNAME=ubuntu {extra_args} {docker_image}"
)
2021-12-21 12:07:10 +00:00
2023-09-22 11:16:46 +00:00
def main():
2021-12-21 12:07:10 +00:00
logging.basicConfig(level=logging.INFO)
2022-11-04 13:40:13 +00:00
parser = argparse.ArgumentParser(
2022-11-07 13:49:21 +00:00
prog="Jepsen Check",
description="Check that uses Jepsen. Both Keeper and Server can be tested.",
)
parser.add_argument(
"program", help='What should be tested. Valid values "keeper", "server"'
)
2022-11-04 13:40:13 +00:00
args = parser.parse_args()
2022-11-07 13:49:21 +00:00
if args.program != "server" and args.program != "keeper":
2022-11-07 14:03:11 +00:00
logging.warning("Invalid argument '%s'", args.program)
2022-11-04 13:40:13 +00:00
sys.exit(0)
2021-12-21 12:07:10 +00:00
stopwatch = Stopwatch()
2023-09-22 11:16:46 +00:00
temp_path = Path(TEMP_PATH)
temp_path.mkdir(parents=True, exist_ok=True)
2021-12-21 12:07:10 +00:00
pr_info = PRInfo()
2022-11-07 11:46:47 +00:00
logging.info(
"Start at PR number %s, commit sha %s labels %s",
pr_info.number,
pr_info.sha,
pr_info.labels,
)
2021-12-27 07:33:25 +00:00
2022-11-07 11:46:47 +00:00
if pr_info.number != 0 and "jepsen-test" not in pr_info.labels:
logging.info("Not jepsen test label in labels list, skipping")
sys.exit(0)
2021-12-24 12:56:03 +00:00
gh = Github(get_best_robot_token(), per_page=100)
commit = get_commit(gh, pr_info.sha)
2021-12-21 12:07:10 +00:00
2022-11-07 13:49:21 +00:00
check_name = KEEPER_CHECK_NAME if args.program == "keeper" else SERVER_CHECK_NAME
2022-11-04 13:40:13 +00:00
rerun_helper = RerunHelper(commit, check_name)
2021-12-24 10:31:34 +00:00
if rerun_helper.is_already_finished_by_status():
logging.info("Check is already finished according to github status, exiting")
sys.exit(0)
2021-12-21 12:07:10 +00:00
if not os.path.exists(TEMP_PATH):
os.makedirs(TEMP_PATH)
2023-09-22 11:16:46 +00:00
result_path = temp_path / "result_path"
result_path.mkdir(parents=True, exist_ok=True)
2021-12-21 12:07:10 +00:00
2022-11-07 13:49:21 +00:00
instances = prepare_autoscaling_group_and_get_hostnames(
KEEPER_DESIRED_INSTANCE_COUNT
if args.program == "keeper"
else SERVER_DESIRED_INSTANCE_COUNT
)
nodes_path = save_nodes_to_file(
2023-09-22 11:16:46 +00:00
instances[:KEEPER_DESIRED_INSTANCE_COUNT], temp_path
2022-11-07 13:49:21 +00:00
)
2021-12-21 12:07:10 +00:00
2021-12-24 12:56:03 +00:00
# always use latest
2022-11-07 13:49:21 +00:00
docker_image = KEEPER_IMAGE_NAME if args.program == "keeper" else SERVER_IMAGE_NAME
2021-12-21 12:07:10 +00:00
2022-11-04 13:40:13 +00:00
build_name = get_build_name_for_check(check_name)
2021-12-24 12:56:03 +00:00
2023-06-06 07:17:30 +00:00
release_or_pr, _ = get_release_or_pr(pr_info, get_version_from_repo())
2021-12-24 12:56:03 +00:00
# This check run separately from other checks because it requires exclusive
# run (see .github/workflows/jepsen.yml) So we cannot add explicit
# dependency on a build job and using busy loop on it's results. For the
# same reason we are using latest docker image.
2023-09-22 11:16:46 +00:00
build_url = (
f"{S3_DOWNLOAD}/{S3_BUILDS_BUCKET}/{release_or_pr}/{pr_info.sha}/"
f"{build_name}/clickhouse"
)
2021-12-24 12:56:03 +00:00
head = requests.head(build_url)
counter = 0
while head.status_code != 200:
time.sleep(10)
head = requests.head(build_url)
counter += 1
if counter >= 180:
logging.warning("Cannot fetch build in 30 minutes, exiting")
sys.exit(0)
2021-12-21 12:07:10 +00:00
2022-11-07 13:49:21 +00:00
extra_args = ""
if args.program == "server":
extra_args = f"-e KEEPER_NODE={instances[-1]}"
2022-11-04 13:40:13 +00:00
with SSHKey(key_value=get_parameter_from_ssm("jepsen_ssh_key") + "\n"):
ssh_auth_sock = os.environ["SSH_AUTH_SOCK"]
2021-12-21 12:07:10 +00:00
auth_sock_dir = os.path.dirname(ssh_auth_sock)
cmd = get_run_command(
ssh_auth_sock,
auth_sock_dir,
pr_info,
nodes_path,
REPO_COPY,
build_url,
result_path,
2022-11-04 13:40:13 +00:00
extra_args,
docker_image,
)
2021-12-21 12:07:10 +00:00
logging.info("Going to run jepsen: %s", cmd)
2023-09-22 11:16:46 +00:00
run_log_path = temp_path / "run.log"
2021-12-21 12:07:10 +00:00
with TeePopen(cmd, run_log_path) as process:
retcode = process.wait()
if retcode == 0:
logging.info("Run successfully")
else:
logging.info("Run failed")
status = "success"
description = "No invalid analysis found ヽ(‘ー`)"
2023-09-22 11:16:46 +00:00
jepsen_log_path = result_path / "jepsen_run_all_tests.log"
2021-12-21 12:07:10 +00:00
additional_data = []
try:
test_result = _parse_jepsen_output(jepsen_log_path)
if any(r.status == "FAIL" for r in test_result):
status = "failure"
description = "Found invalid analysis (ノಥ益ಥ)ノ ┻━┻"
2023-09-22 11:16:46 +00:00
compress_fast(result_path / "store", result_path / "jepsen_store.tar.zst")
additional_data.append(result_path / "jepsen_store.tar.zst")
2021-12-24 10:35:40 +00:00
except Exception as ex:
print("Exception", ex)
status = "failure"
description = "No Jepsen output log"
test_result = [TestResult("No Jepsen output log", "FAIL")]
2022-08-11 13:01:32 +00:00
s3_helper = S3Helper()
report_url = upload_results(
s3_helper,
pr_info.number,
pr_info.sha,
test_result,
[run_log_path] + additional_data,
2022-11-04 13:40:13 +00:00
check_name,
)
2021-12-21 12:07:10 +00:00
print(f"::notice ::Report url: {report_url}")
post_commit_status(
commit, status, report_url, description, check_name, pr_info, dump_to_file=True
)
2021-12-21 12:07:10 +00:00
ch_helper = ClickHouseHelper()
prepared_events = prepare_tests_results_for_clickhouse(
pr_info,
test_result,
status,
stopwatch.duration_seconds,
stopwatch.start_time_str,
report_url,
2022-11-04 13:40:13 +00:00
check_name,
)
2022-03-29 19:06:50 +00:00
ch_helper.insert_events_into(db="default", table="checks", events=prepared_events)
2021-12-21 12:07:10 +00:00
clear_autoscaling_group()
2023-09-22 11:16:46 +00:00
if __name__ == "__main__":
main()