ClickHouse/tests/ci/build_check.py

239 lines
8.8 KiB
Python
Raw Normal View History

2021-10-21 11:09:15 +00:00
#!/usr/bin/env python3
#
import subprocess
import logging
import json
import os
import sys
import time
from github import Github
from s3_helper import S3Helper
from pr_info import PRInfo, get_event
from get_robot_token import get_best_robot_token
2021-10-21 13:18:59 +00:00
from version_helper import get_version_from_repo, update_version_local
2021-11-10 09:08:43 +00:00
from ccache_utils import get_ccache_if_not_exists, upload_ccache
from ci_config import CI_CONFIG
2021-11-12 12:36:25 +00:00
from docker_pull_helper import get_image_with_version
2021-12-03 08:33:16 +00:00
from tee_popen import TeePopen
2021-10-21 11:09:15 +00:00
def get_build_config(build_check_name, build_name):
2021-10-21 11:09:15 +00:00
if build_check_name == 'ClickHouse build check (actions)':
build_config_name = 'build_config'
2021-10-25 15:14:49 +00:00
else:
raise Exception(f"Unknown build check name {build_check_name}")
2021-10-21 11:09:15 +00:00
return CI_CONFIG[build_config_name][build_name]
2021-10-21 11:09:15 +00:00
def _can_export_binaries(build_config):
2021-11-12 13:16:17 +00:00
if build_config['package_type'] != 'deb':
2021-10-21 11:09:15 +00:00
return False
if build_config['bundled'] != "bundled":
return False
if build_config['splitted'] == 'splitted':
return False
if build_config['sanitizer'] != '':
return True
2021-11-12 13:16:17 +00:00
if build_config['build_type'] != '':
2021-10-21 11:09:15 +00:00
return True
return False
2021-11-11 07:33:44 +00:00
def get_packager_cmd(build_config, packager_path, output_path, build_version, image_version, ccache_path, pr_info):
2021-11-12 13:16:17 +00:00
package_type = build_config['package_type']
2021-10-21 11:31:40 +00:00
comp = build_config['compiler']
cmd = f"cd {packager_path} && ./packager --output-dir={output_path} --package-type={package_type} --compiler={comp}"
2021-11-12 13:16:17 +00:00
if build_config['build_type']:
cmd += ' --build-type={}'.format(build_config['build_type'])
2021-10-21 11:09:15 +00:00
if build_config['sanitizer']:
cmd += ' --sanitizer={}'.format(build_config['sanitizer'])
if build_config['splitted'] == 'splitted':
cmd += ' --split-binary'
if build_config['tidy'] == 'enable':
cmd += ' --clang-tidy'
2021-10-21 11:48:56 +00:00
cmd += ' --cache=ccache'
cmd += ' --ccache_dir={}'.format(ccache_path)
2021-10-21 11:09:15 +00:00
if 'alien_pkgs' in build_config and build_config['alien_pkgs']:
if pr_info.number == 0 or 'release' in pr_info.labels:
2021-11-11 07:33:44 +00:00
cmd += ' --alien-pkgs rpm tgz'
2021-10-21 11:09:15 +00:00
cmd += ' --docker-image-version={}'.format(image_version)
cmd += ' --version={}'.format(build_version)
if _can_export_binaries(build_config):
cmd += ' --with-binaries=tests'
return cmd
def get_image_name(build_config):
2021-11-21 10:58:26 +00:00
if build_config['package_type'] != 'deb':
2021-10-21 11:09:15 +00:00
return 'clickhouse/binary-builder'
else:
return 'clickhouse/deb-builder'
2021-12-14 10:40:03 +00:00
def build_clickhouse(packager_cmd, logs_path, build_output_path):
2021-10-21 11:09:15 +00:00
build_log_path = os.path.join(logs_path, 'build_log.log')
2021-12-03 08:33:16 +00:00
with TeePopen(packager_cmd, build_log_path) as process:
retcode = process.wait()
2021-12-14 10:40:03 +00:00
if os.path.exists(build_output_path):
build_results = os.listdir(build_output_path)
else:
build_results = []
2021-10-21 11:09:15 +00:00
if retcode == 0:
2021-12-14 10:40:03 +00:00
if len(build_results) != 0:
logging.info("Built successfully")
else:
logging.info("Success exit code, but no build artifacts => build failed")
2021-10-21 11:09:15 +00:00
else:
logging.info("Build failed")
2021-12-14 10:40:03 +00:00
return build_log_path, retcode == 0 and len(build_results) > 0
2021-10-21 11:09:15 +00:00
2021-12-01 14:23:51 +00:00
def get_build_results_if_exists(s3_helper, s3_prefix):
try:
content = s3_helper.list_prefix(s3_prefix)
return content
except Exception as ex:
logging.info("Got exception %s listing %s", ex, s3_prefix)
return None
def create_json_artifact(temp_path, build_name, log_url, build_urls, build_config, elapsed, success):
subprocess.check_call(f"echo 'BUILD_NAME=build_urls_{build_name}' >> $GITHUB_ENV", shell=True)
result = {
"log_url": log_url,
"build_urls": build_urls,
"build_config": build_config,
"elapsed_seconds": elapsed,
"status": success,
}
with open(os.path.join(temp_path, "build_urls_" + build_name + '.json'), 'w') as build_links:
json.dump(result, build_links)
2021-10-21 11:09:15 +00:00
if __name__ == "__main__":
logging.basicConfig(level=logging.INFO)
2021-10-21 11:31:40 +00:00
repo_path = os.getenv("REPO_COPY", os.path.abspath("../../"))
temp_path = os.getenv("TEMP_PATH", os.path.abspath("."))
2021-10-21 12:42:21 +00:00
caches_path = os.getenv("CACHES_PATH", temp_path)
2021-10-21 11:09:15 +00:00
build_check_name = sys.argv[1]
build_name = sys.argv[2]
2021-10-21 11:09:15 +00:00
build_config = get_build_config(build_check_name, build_name)
2021-10-21 11:09:15 +00:00
if not os.path.exists(temp_path):
os.makedirs(temp_path)
pr_info = PRInfo(get_event())
2021-10-21 11:09:15 +00:00
logging.info("Repo copy path %s", repo_path)
gh = Github(get_best_robot_token())
2021-12-01 14:23:51 +00:00
s3_helper = S3Helper('https://s3.amazonaws.com')
2021-12-02 19:03:59 +00:00
version = get_version_from_repo(repo_path)
2021-12-01 14:23:51 +00:00
release_or_pr = None
if 'release' in pr_info.labels or 'release-lts' in pr_info.labels:
# for release pull requests we use branch names prefixes, not pr numbers
release_or_pr = pr_info.head_ref
elif pr_info.number == 0:
# for pushes to master - major version
release_or_pr = ".".join(version.as_tuple()[:2])
else:
# PR number for anything else
release_or_pr = str(pr_info.number)
s3_path_prefix = "/".join((release_or_pr, pr_info.sha, build_name))
# If this is rerun, then we try to find already created artifacts and just
# put them as github actions artifcat (result)
build_results = get_build_results_if_exists(s3_helper, s3_path_prefix)
if build_results is not None and len(build_results) > 0:
logging.info("Some build results found %s", build_results)
build_urls = []
log_url = ''
for url in build_results:
if 'build_log.log' in url:
2021-12-02 07:49:44 +00:00
log_url = 'https://s3.amazonaws.com/clickhouse-builds/' + url.replace('+', '%2B').replace(' ', '%20')
2021-12-01 14:23:51 +00:00
else:
2021-12-02 07:49:44 +00:00
build_urls.append('https://s3.amazonaws.com/clickhouse-builds/' + url.replace('+', '%2B').replace(' ', '%20'))
2021-12-14 16:16:22 +00:00
create_json_artifact(temp_path, build_name, log_url, build_urls, build_config, 0, len(build_urls) > 0)
2021-12-01 14:23:51 +00:00
sys.exit(0)
2021-10-21 11:09:15 +00:00
image_name = get_image_name(build_config)
2021-11-12 12:36:25 +00:00
docker_image = get_image_with_version(os.getenv("IMAGES_PATH"), image_name)
image_version = docker_image.version
2021-10-21 11:09:15 +00:00
logging.info("Got version from repo %s", version.get_version_string())
version_type = 'testing'
if 'release' in pr_info.labels or 'release-lts' in pr_info.labels:
version_type = 'stable'
update_version_local(repo_path, pr_info.sha, version, version_type)
logging.info("Updated local files with version")
2021-10-21 11:09:15 +00:00
logging.info("Build short name %s", build_name)
2021-10-21 11:09:15 +00:00
build_output_path = os.path.join(temp_path, build_name)
if not os.path.exists(build_output_path):
os.makedirs(build_output_path)
2021-10-21 12:42:21 +00:00
ccache_path = os.path.join(caches_path, build_name + '_ccache')
2021-11-10 09:08:43 +00:00
logging.info("Will try to fetch cache for our build")
get_ccache_if_not_exists(ccache_path, s3_helper, pr_info.number, temp_path)
2021-10-21 11:48:56 +00:00
if not os.path.exists(ccache_path):
2021-11-10 09:08:43 +00:00
logging.info("cache was not fetched, will create empty dir")
2021-10-21 11:48:56 +00:00
os.makedirs(ccache_path)
2021-12-14 16:29:30 +00:00
if build_config['package_type'] == "performance" and pr_info.number != 0:
2021-11-18 07:32:20 +00:00
# because perf tests store some information about git commits
2021-11-18 08:07:54 +00:00
subprocess.check_call(f"cd {repo_path} && git fetch origin master:master", shell=True)
2021-11-18 07:32:20 +00:00
2021-11-11 07:33:44 +00:00
packager_cmd = get_packager_cmd(build_config, os.path.join(repo_path, "docker/packager"), build_output_path, version.get_version_string(), image_version, ccache_path, pr_info)
2021-10-21 11:09:15 +00:00
logging.info("Going to run packager with %s", packager_cmd)
build_clickhouse_log = os.path.join(temp_path, "build_log")
if not os.path.exists(build_clickhouse_log):
os.makedirs(build_clickhouse_log)
2021-10-21 14:41:07 +00:00
start = time.time()
2021-12-14 10:40:03 +00:00
log_path, success = build_clickhouse(packager_cmd, build_clickhouse_log, build_output_path)
2021-10-21 14:41:07 +00:00
elapsed = int(time.time() - start)
2021-10-21 12:37:19 +00:00
subprocess.check_call(f"sudo chown -R ubuntu:ubuntu {build_output_path}", shell=True)
subprocess.check_call(f"sudo chown -R ubuntu:ubuntu {ccache_path}", shell=True)
2021-10-21 11:09:15 +00:00
logging.info("Build finished with %s, log path %s", success, log_path)
2021-11-10 09:08:43 +00:00
logging.info("Will upload cache")
upload_ccache(ccache_path, s3_helper, pr_info.number, temp_path)
2021-10-21 11:09:15 +00:00
if os.path.exists(log_path):
log_url = s3_helper.upload_build_file_to_s3(log_path, s3_path_prefix + "/" + os.path.basename(log_path))
logging.info("Log url %s", log_url)
else:
logging.info("Build log doesn't exist")
build_urls = s3_helper.upload_build_folder_to_s3(build_output_path, s3_path_prefix, keep_dirs_in_s3_path=False, upload_symlinks=False)
2021-10-21 12:37:19 +00:00
logging.info("Got build URLs %s", build_urls)
2021-10-21 12:46:25 +00:00
2021-10-21 14:41:07 +00:00
print("::notice ::Build URLs: {}".format('\n'.join(build_urls)))
2021-10-21 12:46:25 +00:00
2021-10-21 11:09:15 +00:00
print("::notice ::Log URL: {}".format(log_url))
2021-12-01 14:47:10 +00:00
create_json_artifact(temp_path, build_name, log_url, build_urls, build_config, elapsed, success)
2021-11-25 10:01:29 +00:00
# Fail build job if not successeded
if not success:
sys.exit(1)