ClickHouse/tests/ci/docs_release.py

101 lines
3.4 KiB
Python
Raw Normal View History

2021-10-29 15:01:29 +00:00
#!/usr/bin/env python3
import logging
import subprocess
import os
2022-01-07 09:05:38 +00:00
import sys
2021-11-12 11:39:00 +00:00
2021-10-29 15:01:29 +00:00
from github import Github
2021-11-12 11:39:00 +00:00
2021-11-26 14:00:09 +00:00
from env_helper import TEMP_PATH, REPO_COPY, CLOUDFLARE_TOKEN
2021-10-29 15:01:29 +00:00
from s3_helper import S3Helper
2021-11-26 14:00:09 +00:00
from pr_info import PRInfo
2021-10-30 12:34:30 +00:00
from get_robot_token import get_best_robot_token
2021-10-29 18:42:42 +00:00
from ssh import SSHKey
2021-11-12 11:39:00 +00:00
from upload_result_helper import upload_results
2021-11-12 12:36:25 +00:00
from docker_pull_helper import get_image_with_version
from commit_status_helper import get_commit
2022-01-07 09:05:38 +00:00
from rerun_helper import RerunHelper
2021-10-29 15:01:29 +00:00
NAME = "Docs Release (actions)"
if __name__ == "__main__":
logging.basicConfig(level=logging.INFO)
2021-11-26 14:00:09 +00:00
temp_path = TEMP_PATH
repo_path = REPO_COPY
2021-10-29 15:01:29 +00:00
gh = Github(get_best_robot_token())
2022-01-07 09:05:38 +00:00
pr_info = PRInfo(need_changed_files=True)
rerun_helper = RerunHelper(gh, pr_info, NAME)
if rerun_helper.is_already_finished_by_status():
logging.info("Check is already finished according to github status, exiting")
sys.exit(0)
2021-10-29 15:01:29 +00:00
if not os.path.exists(temp_path):
os.makedirs(temp_path)
docker_image = get_image_with_version(temp_path, "clickhouse/docs-release")
2021-10-29 15:01:29 +00:00
test_output = os.path.join(temp_path, "docs_release_log")
2021-10-29 15:01:29 +00:00
if not os.path.exists(test_output):
os.makedirs(test_output)
2021-11-26 14:00:09 +00:00
token = CLOUDFLARE_TOKEN
cmd = (
"docker run --cap-add=SYS_PTRACE --volume=$SSH_AUTH_SOCK:/ssh-agent "
f"-e SSH_AUTH_SOCK=/ssh-agent -e CLOUDFLARE_TOKEN={token} "
f"-e EXTRA_BUILD_ARGS='--verbose' --volume={repo_path}:/repo_path"
f" --volume={test_output}:/output_path {docker_image}"
)
2021-10-29 15:01:29 +00:00
run_log_path = os.path.join(test_output, "runlog.log")
2021-10-29 15:01:29 +00:00
with open(run_log_path, "w", encoding="utf-8") as log, SSHKey(
"ROBOT_CLICKHOUSE_SSH_KEY"
):
2021-10-29 15:01:29 +00:00
with subprocess.Popen(cmd, shell=True, stderr=log, stdout=log) as process:
retcode = process.wait()
if retcode == 0:
logging.info("Run successfully")
status = "success"
2021-10-29 17:41:43 +00:00
description = "Released successfuly"
2021-10-29 15:01:29 +00:00
else:
2021-10-29 17:41:43 +00:00
description = "Release failed (non zero exit code)"
2021-10-29 15:01:29 +00:00
status = "failure"
logging.info("Run failed")
subprocess.check_call(f"sudo chown -R ubuntu:ubuntu {temp_path}", shell=True)
files = os.listdir(test_output)
lines = []
additional_files = []
if not files:
logging.error("No output files after docs release")
description = "No output files after docs release"
status = "failure"
else:
for f in files:
path = os.path.join(test_output, f)
additional_files.append(path)
with open(path, "r", encoding="utf-8") as check_file:
2021-10-29 15:01:29 +00:00
for line in check_file:
if "ERROR" in line:
lines.append((line.split(":")[-1], "FAIL"))
2021-10-29 15:01:29 +00:00
if lines:
status = "failure"
2021-10-29 17:41:43 +00:00
description = "Found errors in docs"
2021-10-29 15:01:29 +00:00
elif status != "failure":
lines.append(("No errors found", "OK"))
else:
lines.append(("Non zero exit code", "FAIL"))
s3_helper = S3Helper("https://s3.amazonaws.com")
2021-10-29 15:01:29 +00:00
report_url = upload_results(
s3_helper, pr_info.number, pr_info.sha, lines, additional_files, NAME
)
2021-10-29 15:01:29 +00:00
print("::notice ::Report url: {report_url}")
2021-10-31 18:08:38 +00:00
commit = get_commit(gh, pr_info.sha)
commit.create_status(
context=NAME, description=description, state=status, target_url=report_url
)