ClickHouse/tests/ci/docs_release.py

104 lines
3.6 KiB
Python
Raw Normal View History

2021-10-29 15:01:29 +00:00
#!/usr/bin/env python3
#!/usr/bin/env python3
import logging
import subprocess
import os
import time
import json
import sys
2021-11-12 11:39:00 +00:00
2021-10-29 15:01:29 +00:00
from github import Github
2021-11-12 11:39:00 +00:00
2021-10-29 15:01:29 +00:00
from s3_helper import S3Helper
from pr_info import PRInfo
2021-10-30 12:34:30 +00:00
from get_robot_token import get_best_robot_token
2021-10-29 18:42:42 +00:00
from ssh import SSHKey
2021-11-12 11:39:00 +00:00
from upload_result_helper import upload_results
2021-11-12 12:13:13 +00:00
from docker_pull_helper import get_chaned_images
2021-10-29 15:01:29 +00:00
NAME = "Docs Release (actions)"
def get_commit(gh, commit_sha):
repo = gh.get_repo(os.getenv("GITHUB_REPOSITORY", "ClickHouse/ClickHouse"))
commit = repo.get_commit(commit_sha)
return commit
if __name__ == "__main__":
logging.basicConfig(level=logging.INFO)
temp_path = os.path.join(os.getenv("TEMP_PATH"))
repo_path = os.path.join(os.getenv("REPO_COPY"))
with open(os.getenv('GITHUB_EVENT_PATH'), 'r', encoding='utf-8') as event_file:
event = json.load(event_file)
pr_info = PRInfo(event, need_changed_files=True)
gh = Github(get_best_robot_token())
if not pr_info.has_changes_in_documentation():
logging.info ("No changes in documentation")
commit = get_commit(gh, pr_info.sha)
commit.create_status(context=NAME, description="No changes in docs", state="success")
sys.exit(0)
logging.info("Has changes in docs")
if not os.path.exists(temp_path):
os.makedirs(temp_path)
2021-11-12 12:13:13 +00:00
docker_image = get_chaned_images(temp_path, ['clickhouse/docs-release'])
2021-10-29 15:01:29 +00:00
test_output = os.path.join(temp_path, 'docs_release_log')
if not os.path.exists(test_output):
os.makedirs(test_output)
2021-10-30 12:11:31 +00:00
token = os.getenv('CLOUDFLARE_TOKEN')
2021-10-31 20:47:11 +00:00
cmd = "docker run --cap-add=SYS_PTRACE --volume=$SSH_AUTH_SOCK:/ssh-agent -e SSH_AUTH_SOCK=/ssh-agent " \
f"-e CLOUDFLARE_TOKEN={token} --volume={repo_path}:/repo_path --volume={test_output}:/output_path {docker_image}"
2021-10-29 15:01:29 +00:00
run_log_path = os.path.join(test_output, 'runlog.log')
2021-10-30 12:11:31 +00:00
with open(run_log_path, 'w', encoding='utf-8') as log, SSHKey("ROBOT_CLICKHOUSE_SSH_KEY"):
2021-10-29 15:01:29 +00:00
with subprocess.Popen(cmd, shell=True, stderr=log, stdout=log) as process:
retcode = process.wait()
if retcode == 0:
logging.info("Run successfully")
status = "success"
2021-10-29 17:41:43 +00:00
description = "Released successfuly"
2021-10-29 15:01:29 +00:00
else:
2021-10-29 17:41:43 +00:00
description = "Release failed (non zero exit code)"
2021-10-29 15:01:29 +00:00
status = "failure"
logging.info("Run failed")
subprocess.check_call(f"sudo chown -R ubuntu:ubuntu {temp_path}", shell=True)
files = os.listdir(test_output)
lines = []
additional_files = []
if not files:
logging.error("No output files after docs release")
description = "No output files after docs release"
status = "failure"
else:
for f in files:
path = os.path.join(test_output, f)
additional_files.append(path)
with open(path, 'r', encoding='utf-8') as check_file:
for line in check_file:
if "ERROR" in line:
lines.append((line.split(':')[-1], "FAIL"))
if lines:
status = "failure"
2021-10-29 17:41:43 +00:00
description = "Found errors in docs"
2021-10-29 15:01:29 +00:00
elif status != "failure":
lines.append(("No errors found", "OK"))
else:
lines.append(("Non zero exit code", "FAIL"))
s3_helper = S3Helper('https://s3.amazonaws.com')
2021-11-12 11:39:00 +00:00
report_url = upload_results(s3_helper, pr_info.number, pr_info.sha, lines, additional_files, NAME)
2021-10-29 15:01:29 +00:00
print("::notice ::Report url: {report_url}")
2021-10-31 18:08:38 +00:00
commit = get_commit(gh, pr_info.sha)
2021-10-29 15:01:29 +00:00
commit.create_status(context=NAME, description=description, state=status, target_url=report_url)