From dc1964aa9ac9985cc48602be4c38346c7d2ac970 Mon Sep 17 00:00:00 2001 From: alesapin Date: Tue, 9 Nov 2021 13:15:22 +0300 Subject: [PATCH] Add unit tests to CI --- .github/workflows/main.yml | 29 ++++ tests/ci/unit_tests_check.py | 287 +++++++++++++++++++++++++++++++++++ 2 files changed, 316 insertions(+) create mode 100644 tests/ci/unit_tests_check.py diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 76933b4e2e4..5fe9af386ad 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -333,6 +333,35 @@ jobs: docker kill $(docker ps -q) ||: docker rm -f $(docker ps -a -q) ||: sudo rm -fr $TEMP_PATH + UnitTestsAsan: + needs: [BuilderDebAsan] + runs-on: [self-hosted, func-tester] + steps: + - name: Download json reports + uses: actions/download-artifact@v2 + with: + path: ${{runner.temp}}/reports_dir + - name: Check out repository code + uses: actions/checkout@v2 + - name: Unit test + env: + TEMP_PATH: ${{runner.temp}}/unit_tests_asan + REPORTS_PATH: ${{runner.temp}}/reports_dir + CHECK_NAME: 'Unit tests (asan, actions)' + REPO_COPY: ${{runner.temp}}/unit_tests_asan/ClickHouse + REQUIRED_BUILD_NUMBER: 3 + run: | + sudo rm -fr $TEMP_PATH + mkdir -p $TEMP_PATH + cp -r $GITHUB_WORKSPACE $TEMP_PATH + cd $REPO_COPY/tests/ci + python3 unit_tests_check.py "$CHECK_NAME" $REQUIRED_BUILD_NUMBER + - name: Cleanup + if: always() + run: | + docker kill $(docker ps -q) ||: + docker rm -f $(docker ps -a -q) ||: + sudo rm -fr $TEMP_PATH FastTest: needs: DockerHubPush if: ${{ !contains(github.event.pull_request.labels.*.name, 'pr-documentation') && !contains(github.event.pull_request.labels.*.name, 'pr-doc-fix') }} diff --git a/tests/ci/unit_tests_check.py b/tests/ci/unit_tests_check.py new file mode 100644 index 00000000000..2a277d3168f --- /dev/null +++ b/tests/ci/unit_tests_check.py @@ -0,0 +1,287 @@ +#!/usr/bin/env python3 + +import logging +import os +import sys +import time +import subprocess +import json + +from github import Github +import requests + +from report import create_test_html_report +from s3_helper import S3Helper +from get_robot_token import get_best_robot_token +from pr_info import PRInfo + +IMAGE_NAME = 'clickhouse/unit-test' + +DOWNLOAD_RETRIES_COUNT = 5 + +def process_logs(s3_client, additional_logs, s3_path_prefix): + additional_urls = [] + for log_path in additional_logs: + if log_path: + additional_urls.append( + s3_client.upload_test_report_to_s3( + log_path, + s3_path_prefix + "/" + os.path.basename(log_path))) + + return additional_urls + +def dowload_build_with_progress(url, path): + logging.info("Downloading from %s to temp path %s", url, path) + for i in range(DOWNLOAD_RETRIES_COUNT): + try: + with open(path, 'wb') as f: + response = requests.get(url, stream=True) + response.raise_for_status() + total_length = response.headers.get('content-length') + if total_length is None or int(total_length) == 0: + logging.info("No content-length, will download file without progress") + f.write(response.content) + else: + dl = 0 + total_length = int(total_length) + logging.info("Content length is %ld bytes", total_length) + for data in response.iter_content(chunk_size=4096): + dl += len(data) + f.write(data) + if sys.stdout.isatty(): + done = int(50 * dl / total_length) + percent = int(100 * float(dl) / total_length) + eq_str = '=' * done + space_str = ' ' * (50 - done) + sys.stdout.write(f"\r[{eq_str}{space_str}] {percent}%") + sys.stdout.flush() + break + except Exception as ex: + sys.stdout.write("\n") + time.sleep(3) + logging.info("Exception while downloading %s, retry %s", ex, i + 1) + if os.path.exists(path): + os.remove(path) + else: + raise Exception(f"Cannot download dataset from {url}, all retries exceeded") + + sys.stdout.write("\n") + logging.info("Downloading finished") + + +def upload_results(s3_client, pr_number, commit_sha, test_results, raw_log, additional_files, check_name): + additional_files = [raw_log] + additional_files + s3_path_prefix = f"{pr_number}/{commit_sha}/" + check_name.lower().replace(' ', '_').replace('(', '_').replace(')', '_').replace(',', '_') + additional_urls = process_logs(s3_client, additional_files, s3_path_prefix) + + branch_url = "https://github.com/ClickHouse/ClickHouse/commits/master" + branch_name = "master" + if pr_number != 0: + branch_name = f"PR #{pr_number}" + branch_url = f"https://github.com/ClickHouse/ClickHouse/pull/{pr_number}" + commit_url = f"https://github.com/ClickHouse/ClickHouse/commit/{commit_sha}" + + task_url = f"https://github.com/ClickHouse/ClickHouse/actions/runs/{os.getenv('GITHUB_RUN_ID')}" + + raw_log_url = additional_urls[0] + additional_urls.pop(0) + + html_report = create_test_html_report(check_name, test_results, raw_log_url, task_url, branch_url, branch_name, commit_url, additional_urls, True) + with open('report.html', 'w', encoding='utf-8') as f: + f.write(html_report) + + url = s3_client.upload_test_report_to_s3('report.html', s3_path_prefix + ".html") + logging.info("Search result in url %s", url) + return url + +def get_commit(gh, commit_sha): + repo = gh.get_repo(os.getenv("GITHUB_REPOSITORY", "ClickHouse/ClickHouse")) + commit = repo.get_commit(commit_sha) + return commit + +def get_build_config(build_number, repo_path): + ci_config_path = os.path.join(repo_path, "tests/ci/ci_config.json") + with open(ci_config_path, 'r', encoding='utf-8') as ci_config: + config_dict = json.load(ci_config) + return config_dict['build_config'][build_number] + +def get_build_urls(build_config_str, reports_path): + for root, _, files in os.walk(reports_path): + for f in files: + if build_config_str in f : + logging.info("Found build report json %s", f) + with open(os.path.join(root, f), 'r', encoding='utf-8') as file_handler: + build_report = json.load(file_handler) + return build_report['build_urls'] + return [] + +def build_config_to_string(build_config): + if build_config["package-type"] == "performance": + return "performance" + + return "_".join([ + build_config['compiler'], + build_config['build-type'] if build_config['build-type'] else "relwithdebuginfo", + build_config['sanitizer'] if build_config['sanitizer'] else "none", + build_config['bundled'], + build_config['splitted'], + "tidy" if build_config['tidy'] == "enable" else "notidy", + "with_coverage" if build_config['with_coverage'] else "without_coverage", + build_config['package-type'], + ]) + +def get_test_name(line): + elements = reversed(line.split(' ')) + for element in elements: + if '(' not in element and ')' not in element: + return element + raise Exception(f"No test name in line '{line}'") + +def process_result(result_folder): + OK_SIGN = 'OK ]' + FAILED_SIGN = 'FAILED ]' + SEGFAULT = 'Segmentation fault' + SIGNAL = 'received signal SIG' + PASSED = 'PASSED' + + summary = [] + total_counter = 0 + failed_counter = 0 + result_log_path = f'{result_folder}/test_result.txt' + if not os.path.exists(result_log_path): + logging.info("No output log on path %s", result_log_path) + return "error", "No output log", summary, [] + + status = "success" + description = "" + passed = False + with open(result_log_path, 'r', encoding='utf-8') as test_result: + for line in test_result: + if OK_SIGN in line: + logging.info("Found ok line: '%s'", line) + test_name = get_test_name(line.strip()) + logging.info("Test name: '%s'", test_name) + summary.append((test_name, "OK")) + total_counter += 1 + elif FAILED_SIGN in line and 'listed below' not in line and 'ms)' in line: + logging.info("Found fail line: '%s'", line) + test_name = get_test_name(line.strip()) + logging.info("Test name: '%s'", test_name) + summary.append((test_name, "FAIL")) + total_counter += 1 + failed_counter += 1 + elif SEGFAULT in line: + logging.info("Found segfault line: '%s'", line) + status = "failure" + description += "Segmentation fault. " + break + elif SIGNAL in line: + logging.info("Received signal line: '%s'", line) + status = "failure" + description += "Exit on signal. " + break + elif PASSED in line: + logging.info("PASSED record found: '%s'", line) + passed = True + + if not passed: + status = "failure" + description += "PASSED record not found. " + + if failed_counter != 0: + status = "failure" + + if not description: + description += f"fail: {failed_counter}, passed: {total_counter - failed_counter}" + + return status, description, summary, [result_log_path] + + +if __name__ == "__main__": + logging.basicConfig(level=logging.INFO) + temp_path = os.getenv("TEMP_PATH", os.path.abspath(".")) + repo_path = os.getenv("REPO_COPY", os.path.abspath("../../")) + reports_path = os.getenv("REPORTS_PATH", "./reports") + + check_name = sys.argv[1] + build_number = int(sys.argv[2]) + + if not os.path.exists(temp_path): + os.makedirs(temp_path) + + with open(os.getenv('GITHUB_EVENT_PATH'), 'r', encoding='utf-8') as event_file: + event = json.load(event_file) + + pr_info = PRInfo(event) + + gh = Github(get_best_robot_token()) + + for root, _, files in os.walk(reports_path): + for f in files: + if f == 'changed_images.json': + images_path = os.path.join(root, 'changed_images.json') + break + + docker_image = IMAGE_NAME + if images_path and os.path.exists(images_path): + logging.info("Images file exists") + with open(images_path, 'r', encoding='utf-8') as images_fd: + images = json.load(images_fd) + logging.info("Got images %s", images) + if IMAGE_NAME in images: + docker_image += ':' + images[IMAGE_NAME] + else: + logging.info("Images file not found") + + for i in range(10): + try: + logging.info("Pulling image %s", docker_image) + subprocess.check_output(f"docker pull {docker_image}", stderr=subprocess.STDOUT, shell=True) + break + except Exception as ex: + time.sleep(i * 3) + logging.info("Got execption pulling docker %s", ex) + else: + raise Exception(f"Cannot pull dockerhub for image docker pull {docker_image}") + + build_config = get_build_config(build_number, repo_path) + build_config_str = build_config_to_string(build_config) + urls = get_build_urls(build_config_str, reports_path) + + if not urls: + raise Exception("No build URLs found") + + tests_binary_path = os.path.join(temp_path, "unit_tests_dbms") + for url in urls: + if url.endswith('unit_tests_dbms'): + dowload_build_with_progress(url, tests_binary_path) + break + + os.chmod(tests_binary_path, 0o777) + + test_output = os.path.join(temp_path, "test_output") + if not os.path.exists(test_output): + os.makedirs(test_output) + + run_command = f"docker run --cap-add=SYS_PTRACE --volume={tests_binary_path}:/unit_tests_dbms --volume={test_output}:/test_output {docker_image}" + + run_log_path = os.path.join(test_output, "runlog.log") + + logging.info("Going to run func tests: %s", run_command) + + with open(run_log_path, 'w', encoding='utf-8') as log: + with subprocess.Popen(run_command, shell=True, stderr=log, stdout=log) as process: + retcode = process.wait() + if retcode == 0: + logging.info("Run successfully") + else: + logging.info("Run failed") + + subprocess.check_call(f"sudo chown -R ubuntu:ubuntu {temp_path}", shell=True) + + s3_helper = S3Helper('https://s3.amazonaws.com') + state, description, test_results, additional_logs = process_result(test_output) + report_url = upload_results(s3_helper, pr_info.number, pr_info.sha, test_results, run_log_path, additional_logs, check_name) + print(f"::notice ::Report url: {report_url}") + commit = get_commit(gh, pr_info.sha) + commit.create_status(context=check_name, description=description, state=state, target_url=report_url)