Add docs release

This commit is contained in:
alesapin 2021-10-29 18:01:29 +03:00
parent 1a13a741f9
commit 4c645f3ef6
9 changed files with 263 additions and 24 deletions

View File

@ -58,7 +58,7 @@ jobs:
sudo rm -fr $TEMP_PATH
DocsCheck:
needs: DockerHubPush
runs-on: [self-hosted, style-checker]
runs-on: [self-hosted, func-tester]
steps:
- name: Download changed images
uses: actions/download-artifact@v2

47
.github/workflows/release.yml vendored Normal file
View File

@ -0,0 +1,47 @@
name: ReleaseChecks
concurrency:
group: docs-release
cancel-in-progress: true
on: # yamllint disable-line rule:truthy
push:
branches:
- master
jobs:
DockerHubPush:
needs: CheckLabels
runs-on: [self-hosted, style-checker]
steps:
- name: Check out repository code
uses: actions/checkout@v2
- name: Images check
run: |
cd $GITHUB_WORKSPACE/tests/ci
python3 docker_images_check.py
- name: Upload images files to artifacts
uses: actions/upload-artifact@v2
with:
name: changed_images
path: ${{ runner.temp }}/docker_images_check/changed_images.json
DocsRelease:
runs: [self-hosted, func-tester]
steps:
- name: Check out repository code
uses: actions/checkout@v2
- name: Download changed images
uses: actions/download-artifact@v2
with:
name: changed_images
path: ${{runner.temp}}/docs_release
- name: Docs Release
env:
TEMP_PATH: ${{runner.temp}}/docs_release
REPO_COPY: ${{runner.temp}}/docs_release/ClickHouse
run: |
cp -r $GITHUB_WORKSPACE $TEMP_PATH
cd $REPO_COPY/tests/ci
python3 docs_release.py
- name: Cleanup
if: always()
run: |
docker kill $(docker ps -q) ||:
sudo rm -fr $TEMP_PATH

View File

@ -0,0 +1,9 @@
# docker build -t clickhouse/docs-release .
FROM clickhouse/docs-builder
COPY run.sh /
ENV REPO_PATH=/repo_path
ENV OUTPUT_PATH=/output_path
CMD ["/bin/bash", "/run.sh"]

View File

@ -0,0 +1,9 @@
#!/usr/bin/env bash
set -euo pipefail
cd $REPO_PATH/docs/tools
mkdir venv
virtualenv -p $(which python3) venv
source venv/bin/activate
python3 -m pip install --ignore-installed -r requirements.txt
./release.sh 2>&1 | tee tee $OUTPUT_PATH/output.log

View File

@ -169,11 +169,17 @@
},
"docker/docs/builder": {
"name": "clickhouse/docs-builder",
"dependent": ["docker/docs/check"]
"dependent": [
"docker/docs/check",
"docker/docs/release"
]
},
"docker/docs/check": {
"name": "clickhouse/docs-check",
"dependent": []
},
"docker/docs/release": {
"name": "clickhouse/docs-release",
"dependent": []
}
}

View File

@ -193,8 +193,9 @@ if __name__ == "__main__":
changed_images, dockerhub_repo_name = get_changed_docker_images(pr_info, repo_path, "docker/images.json")
logging.info("Has changed images %s", ', '.join([str(image[0]) for image in changed_images]))
pr_commit_version = str(pr_info.number) + '-' + pr_info.sha
versions = [str(pr_info.number), pr_commit_version]
if pr_info.number == 0:
versions.append("latest")
subprocess.check_output("docker login --username 'robotclickhouse' --password '{}'".format(dockerhub_password), shell=True)

View File

@ -111,7 +111,7 @@ if __name__ == "__main__":
retcode = process.wait()
if retcode == 0:
logging.info("Run successfully")
status = "Success"
status = "success"
description = "Run Ok"
else:
description = "Run failed (non zero exit code)"

153
tests/ci/docs_release.py Normal file
View File

@ -0,0 +1,153 @@
#!/usr/bin/env python3
#!/usr/bin/env python3
import logging
import subprocess
import os
import time
import json
import sys
from github import Github
from report import create_test_html_report
from s3_helper import S3Helper
from pr_info import PRInfo
from get_robot_token import get_best_robot_token, get_parameter_from_ssm
NAME = "Docs Release (actions)"
def process_logs(s3_client, additional_logs, s3_path_prefix):
additional_urls = []
for log_path in additional_logs:
if log_path:
additional_urls.append(
s3_client.upload_test_report_to_s3(
log_path,
s3_path_prefix + "/" + os.path.basename(log_path)))
return additional_urls
def upload_results(s3_client, pr_number, commit_sha, test_results, additional_files):
s3_path_prefix = f"{pr_number}/{commit_sha}/docs_release"
additional_urls = process_logs(s3_client, additional_files, s3_path_prefix)
branch_url = "https://github.com/ClickHouse/ClickHouse/commits/master"
branch_name = "master"
if pr_number != 0:
branch_name = f"PR #{pr_number}"
branch_url = f"https://github.com/ClickHouse/ClickHouse/pull/{pr_number}"
commit_url = f"https://github.com/ClickHouse/ClickHouse/commit/{commit_sha}"
task_url = f"https://github.com/ClickHouse/ClickHouse/actions/runs/{os.getenv('GITHUB_RUN_ID')}"
raw_log_url = additional_urls[0]
additional_urls.pop(0)
html_report = create_test_html_report(NAME, test_results, raw_log_url, task_url, branch_url, branch_name, commit_url, additional_urls)
with open('report.html', 'w', encoding='utf-8') as f:
f.write(html_report)
url = s3_client.upload_test_report_to_s3('report.html', s3_path_prefix + ".html")
logging.info("Search result in url %s", url)
return url
def get_commit(gh, commit_sha):
repo = gh.get_repo(os.getenv("GITHUB_REPOSITORY", "ClickHouse/ClickHouse"))
commit = repo.get_commit(commit_sha)
return commit
if __name__ == "__main__":
logging.basicConfig(level=logging.INFO)
temp_path = os.path.join(os.getenv("TEMP_PATH"))
repo_path = os.path.join(os.getenv("REPO_COPY"))
with open(os.getenv('GITHUB_EVENT_PATH'), 'r', encoding='utf-8') as event_file:
event = json.load(event_file)
pr_info = PRInfo(event, need_changed_files=True)
gh = Github(get_best_robot_token())
if not pr_info.has_changes_in_documentation():
logging.info ("No changes in documentation")
commit = get_commit(gh, pr_info.sha)
commit.create_status(context=NAME, description="No changes in docs", state="success")
sys.exit(0)
logging.info("Has changes in docs")
if not os.path.exists(temp_path):
os.makedirs(temp_path)
images_path = os.path.join(temp_path, 'changed_images.json')
docker_image = 'clickhouse/docs-release'
if os.path.exists(images_path):
logging.info("Images file exists")
with open(images_path, 'r', encoding='utf-8') as images_fd:
images = json.load(images_fd)
logging.info("Got images %s", images)
if 'clickhouse/docs-release' in images:
docker_image += ':' + images['clickhouse/docs-release']
logging.info("Got docker image %s", docker_image)
for i in range(10):
try:
subprocess.check_output(f"docker pull {docker_image}", shell=True)
break
except Exception as ex:
time.sleep(i * 3)
logging.info("Got execption pulling docker %s", ex)
else:
raise Exception(f"Cannot pull dockerhub for image {docker_image}")
test_output = os.path.join(temp_path, 'docs_release_log')
if not os.path.exists(test_output):
os.makedirs(test_output)
token = get_parameter_from_ssm('cloudflare_token', decrypt=True)
cmd = f"docker run --cap-add=SYS_PTRACE -e CLOUDFLARE_TOKEN={token} --volume={repo_path}:/repo_path --volume={test_output}:/output_path {docker_image}"
run_log_path = os.path.join(test_output, 'runlog.log')
with open(run_log_path, 'w', encoding='utf-8') as log:
with subprocess.Popen(cmd, shell=True, stderr=log, stdout=log) as process:
retcode = process.wait()
if retcode == 0:
logging.info("Run successfully")
status = "success"
description = "Run Ok"
else:
description = "Run failed (non zero exit code)"
status = "failure"
logging.info("Run failed")
subprocess.check_call(f"sudo chown -R ubuntu:ubuntu {temp_path}", shell=True)
files = os.listdir(test_output)
lines = []
additional_files = []
if not files:
logging.error("No output files after docs release")
description = "No output files after docs release"
status = "failure"
else:
for f in files:
path = os.path.join(test_output, f)
additional_files.append(path)
with open(path, 'r', encoding='utf-8') as check_file:
for line in check_file:
if "ERROR" in line:
lines.append((line.split(':')[-1], "FAIL"))
if lines:
status = "failure"
description = "Found errors during docs release"
elif status != "failure":
lines.append(("No errors found", "OK"))
else:
lines.append(("Non zero exit code", "FAIL"))
s3_helper = S3Helper('https://s3.amazonaws.com')
report_url = upload_results(s3_helper, pr_info.number, pr_info.sha, lines, additional_files)
print("::notice ::Report url: {report_url}")
commit = get_commit(gh, pr_info.sha)
commit.create_status(context=NAME, description=description, state=status, target_url=report_url)

View File

@ -10,27 +10,41 @@ DIFF_IN_DOCUMENTATION_EXT = [".html", ".md", ".yml", ".txt", ".css", ".js", ".xm
class PRInfo:
def __init__(self, github_event, need_orgs=False, need_changed_files=False):
self.number = github_event['number']
if 'after' in github_event:
if 'pull_request' in github_event: # pull request and other similar events
self.number = github_event['number']
if 'after' in github_event:
self.sha = github_event['after']
else:
self.sha = github_event['pull_request']['head']['sha']
self.labels = { l['name'] for l in github_event['pull_request']['labels'] }
self.user_login = github_event['pull_request']['user']['login']
self.user_orgs = set([])
if need_orgs:
user_orgs_response = requests.get(github_event['pull_request']['user']['organizations_url'])
if user_orgs_response.ok:
response_json = user_orgs_response.json()
self.user_orgs = set(org['id'] for org in response_json)
self.changed_files = set([])
if need_changed_files:
diff_url = github_event['pull_request']['diff_url']
diff = urllib.request.urlopen(diff_url)
diff_object = PatchSet(diff, diff.headers.get_charsets()[0])
self.changed_files = { f.path for f in diff_object }
elif github_event['type'] == 'PushEvent': # push on master
self.number = 0
self.sha = github_event['after']
self.labels = {}
if need_changed_files:
commit_before = github_event['before']
diff = requests.get(f'https://api.github.com/repos/ClickHouse/ClickHouse/compare/{commit_before}...{self.sha}')
if 'files' in diff:
self.changed_files = [f['filename'] for f in diff['files']]
else:
self.changed_files = set([])
else:
self.sha = github_event['pull_request']['head']['sha']
self.labels = { l['name'] for l in github_event['pull_request']['labels'] }
self.user_login = github_event['pull_request']['user']['login']
self.user_orgs = set([])
if need_orgs:
user_orgs_response = requests.get(github_event['pull_request']['user']['organizations_url'])
if user_orgs_response.ok:
response_json = user_orgs_response.json()
self.user_orgs = set(org['id'] for org in response_json)
self.changed_files = set([])
if need_changed_files:
diff_url = github_event['pull_request']['diff_url']
diff = urllib.request.urlopen(diff_url)
diff_object = PatchSet(diff, diff.headers.get_charsets()[0])
self.changed_files = { f.path for f in diff_object }
raise Exception("Unknown event type")
def get_dict(self):
return {