mirror of
https://github.com/ClickHouse/ClickHouse.git
synced 2024-11-21 23:21:59 +00:00
Fixes
This commit is contained in:
parent
7a62338b0e
commit
2e6da9ea71
2
.github/workflows/main.yml
vendored
2
.github/workflows/main.yml
vendored
@ -365,7 +365,7 @@ jobs:
|
||||
env:
|
||||
TEMP_PATH: ${{runner.temp}}/stress_debug
|
||||
REPORTS_PATH: ${{runner.temp}}/reports_dir
|
||||
CHECK_NAME: 'Stress tests (debug, actions)'
|
||||
CHECK_NAME: 'Stress test (debug, actions)'
|
||||
REPO_COPY: ${{runner.temp}}/stress_debug/ClickHouse
|
||||
run: |
|
||||
sudo rm -fr $TEMP_PATH
|
||||
|
@ -14,6 +14,7 @@ from get_robot_token import get_best_robot_token
|
||||
from pr_info import PRInfo
|
||||
from ci_config import build_config_to_string
|
||||
from build_download_helper import get_build_config_for_check, get_build_urls
|
||||
from docker_pull_helper import get_image_with_version
|
||||
|
||||
|
||||
DOWNLOAD_RETRIES_COUNT = 5
|
||||
@ -49,27 +50,7 @@ if __name__ == "__main__":
|
||||
|
||||
gh = Github(get_best_robot_token())
|
||||
|
||||
images_path = os.path.join(temp_path, 'changed_images.json')
|
||||
|
||||
docker_image = IMAGE_NAME
|
||||
if os.path.exists(images_path):
|
||||
logging.info("Images file exists")
|
||||
with open(images_path, 'r', encoding='utf-8') as images_fd:
|
||||
images = json.load(images_fd)
|
||||
logging.info("Got images %s", images)
|
||||
if IMAGE_NAME in images:
|
||||
docker_image += ':' + images[IMAGE_NAME]
|
||||
|
||||
for i in range(10):
|
||||
try:
|
||||
logging.info("Pulling image %s", docker_image)
|
||||
subprocess.check_output(f"docker pull {docker_image}", stderr=subprocess.STDOUT, shell=True)
|
||||
break
|
||||
except Exception as ex:
|
||||
time.sleep(i * 3)
|
||||
logging.info("Got execption pulling docker %s", ex)
|
||||
else:
|
||||
raise Exception(f"Cannot pull dockerhub for image docker pull {docker_image}")
|
||||
docker_image = get_image_with_version(temp_path, IMAGE_NAME)
|
||||
|
||||
build_config = get_build_config_for_check(check_name)
|
||||
build_config_str = build_config_to_string(build_config)
|
||||
|
@ -12,6 +12,8 @@ from pr_info import PRInfo
|
||||
from get_robot_token import get_best_robot_token
|
||||
from version_helper import get_version_from_repo, update_version_local
|
||||
from ccache_utils import get_ccache_if_not_exists, upload_ccache
|
||||
from ci_config import build_config_to_string
|
||||
from docker_pull_helper import get_images_with_versions
|
||||
|
||||
|
||||
def get_build_config(build_check_name, build_number, repo_path):
|
||||
@ -92,21 +94,6 @@ def build_clickhouse(packager_cmd, logs_path):
|
||||
logging.info("Build failed")
|
||||
return build_log_path, retcode == 0
|
||||
|
||||
def build_config_to_string(build_config):
|
||||
if build_config["package-type"] == "performance":
|
||||
return "performance"
|
||||
|
||||
return "_".join([
|
||||
build_config['compiler'],
|
||||
build_config['build-type'] if build_config['build-type'] else "relwithdebuginfo",
|
||||
build_config['sanitizer'] if build_config['sanitizer'] else "none",
|
||||
build_config['bundled'],
|
||||
build_config['splitted'],
|
||||
"tidy" if build_config['tidy'] == "enable" else "notidy",
|
||||
"with_coverage" if build_config['with_coverage'] else "without_coverage",
|
||||
build_config['package-type'],
|
||||
])
|
||||
|
||||
if __name__ == "__main__":
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
repo_path = os.getenv("REPO_COPY", os.path.abspath("../../"))
|
||||
@ -130,27 +117,9 @@ if __name__ == "__main__":
|
||||
|
||||
gh = Github(get_best_robot_token())
|
||||
|
||||
images_path = os.path.join(os.getenv("IMAGES_PATH", temp_path), 'changed_images.json')
|
||||
image_name = get_image_name(build_config)
|
||||
image_version = 'latest'
|
||||
if os.path.exists(images_path):
|
||||
logging.info("Images file exists")
|
||||
with open(images_path, 'r') as images_fd:
|
||||
images = json.load(images_fd)
|
||||
logging.info("Got images %s", images)
|
||||
if image_name in images:
|
||||
image_version = images[image_name]
|
||||
|
||||
for i in range(10):
|
||||
try:
|
||||
logging.info("Pulling image %s:%s", image_name, image_version)
|
||||
subprocess.check_output(f"docker pull {image_name}:{image_version}", stderr=subprocess.STDOUT, shell=True)
|
||||
break
|
||||
except Exception as ex:
|
||||
time.sleep(i * 3)
|
||||
logging.info("Got execption pulling docker %s", ex)
|
||||
else:
|
||||
raise Exception(f"Cannot pull dockerhub for image docker pull {image_name}:{image_version}")
|
||||
docker_images = get_images_with_versions(reports_path=os.getenv("IMAGES_PATH"), [image_name])
|
||||
image_version = docker_images[0].version
|
||||
|
||||
version = get_version_from_repo(repo_path)
|
||||
version.tweak_update()
|
||||
|
@ -4,9 +4,9 @@ CI_CONFIG = {
|
||||
"build_config": [
|
||||
{
|
||||
"compiler": "clang-13",
|
||||
"build-type": "",
|
||||
"build_type": "",
|
||||
"sanitizer": "",
|
||||
"package-type": "deb",
|
||||
"package_type": "deb",
|
||||
"bundled": "bundled",
|
||||
"splitted": "unsplitted",
|
||||
"alien_pkgs": True,
|
||||
@ -15,9 +15,9 @@ CI_CONFIG = {
|
||||
},
|
||||
{
|
||||
"compiler": "clang-13",
|
||||
"build-type": "",
|
||||
"build_type": "",
|
||||
"sanitizer": "",
|
||||
"package-type": "performance",
|
||||
"package_type": "performance",
|
||||
"bundled": "bundled",
|
||||
"splitted": "unsplitted",
|
||||
"tidy": "disable",
|
||||
@ -25,9 +25,9 @@ CI_CONFIG = {
|
||||
},
|
||||
{
|
||||
"compiler": "gcc-11",
|
||||
"build-type": "",
|
||||
"build_type": "",
|
||||
"sanitizer": "",
|
||||
"package-type": "binary",
|
||||
"package_type": "binary",
|
||||
"bundled": "bundled",
|
||||
"splitted": "unsplitted",
|
||||
"tidy": "disable",
|
||||
@ -35,9 +35,9 @@ CI_CONFIG = {
|
||||
},
|
||||
{
|
||||
"compiler": "clang-13",
|
||||
"build-type": "",
|
||||
"build_type": "",
|
||||
"sanitizer": "address",
|
||||
"package-type": "deb",
|
||||
"package_type": "deb",
|
||||
"bundled": "bundled",
|
||||
"splitted": "unsplitted",
|
||||
"tidy": "disable",
|
||||
@ -45,9 +45,9 @@ CI_CONFIG = {
|
||||
},
|
||||
{
|
||||
"compiler": "clang-13",
|
||||
"build-type": "",
|
||||
"build_type": "",
|
||||
"sanitizer": "undefined",
|
||||
"package-type": "deb",
|
||||
"package_type": "deb",
|
||||
"bundled": "bundled",
|
||||
"splitted": "unsplitted",
|
||||
"tidy": "disable",
|
||||
@ -55,9 +55,9 @@ CI_CONFIG = {
|
||||
},
|
||||
{
|
||||
"compiler": "clang-13",
|
||||
"build-type": "",
|
||||
"build_type": "",
|
||||
"sanitizer": "thread",
|
||||
"package-type": "deb",
|
||||
"package_type": "deb",
|
||||
"bundled": "bundled",
|
||||
"splitted": "unsplitted",
|
||||
"tidy": "disable",
|
||||
@ -65,9 +65,9 @@ CI_CONFIG = {
|
||||
},
|
||||
{
|
||||
"compiler": "clang-13",
|
||||
"build-type": "",
|
||||
"build_type": "",
|
||||
"sanitizer": "memory",
|
||||
"package-type": "deb",
|
||||
"package_type": "deb",
|
||||
"bundled": "bundled",
|
||||
"splitted": "unsplitted",
|
||||
"tidy": "disable",
|
||||
@ -75,9 +75,9 @@ CI_CONFIG = {
|
||||
},
|
||||
{
|
||||
"compiler": "clang-13",
|
||||
"build-type": "debug",
|
||||
"build_type": "debug",
|
||||
"sanitizer": "",
|
||||
"package-type": "deb",
|
||||
"package_type": "deb",
|
||||
"bundled": "bundled",
|
||||
"splitted": "unsplitted",
|
||||
"tidy": "disable",
|
||||
@ -85,9 +85,9 @@ CI_CONFIG = {
|
||||
},
|
||||
{
|
||||
"compiler": "gcc-11",
|
||||
"build-type": "",
|
||||
"build_type": "",
|
||||
"sanitizer": "",
|
||||
"package-type": "deb",
|
||||
"package_type": "deb",
|
||||
"bundled": "unbundled",
|
||||
"splitted": "unsplitted",
|
||||
"tidy": "disable",
|
||||
@ -95,9 +95,9 @@ CI_CONFIG = {
|
||||
},
|
||||
{
|
||||
"compiler": "clang-13",
|
||||
"build-type": "",
|
||||
"build_type": "",
|
||||
"sanitizer": "",
|
||||
"package-type": "binary",
|
||||
"package_type": "binary",
|
||||
"bundled": "bundled",
|
||||
"splitted": "unsplitted",
|
||||
"tidy": "disable",
|
||||
@ -107,9 +107,9 @@ CI_CONFIG = {
|
||||
"special_build_config": [
|
||||
{
|
||||
"compiler": "clang-13",
|
||||
"build-type": "debug",
|
||||
"build_type": "debug",
|
||||
"sanitizer": "",
|
||||
"package-type": "deb",
|
||||
"package_type": "deb",
|
||||
"bundled": "bundled",
|
||||
"splitted": "unsplitted",
|
||||
"tidy": "enable",
|
||||
@ -117,9 +117,9 @@ CI_CONFIG = {
|
||||
},
|
||||
{
|
||||
"compiler": "clang-13",
|
||||
"build-type": "",
|
||||
"build_type": "",
|
||||
"sanitizer": "",
|
||||
"package-type": "binary",
|
||||
"package_type": "binary",
|
||||
"bundled": "bundled",
|
||||
"splitted": "splitted",
|
||||
"tidy": "disable",
|
||||
@ -127,9 +127,9 @@ CI_CONFIG = {
|
||||
},
|
||||
{
|
||||
"compiler": "clang-13-darwin",
|
||||
"build-type": "",
|
||||
"build_type": "",
|
||||
"sanitizer": "",
|
||||
"package-type": "binary",
|
||||
"package_type": "binary",
|
||||
"bundled": "bundled",
|
||||
"splitted": "unsplitted",
|
||||
"tidy": "disable",
|
||||
@ -137,9 +137,9 @@ CI_CONFIG = {
|
||||
},
|
||||
{
|
||||
"compiler": "clang-13-aarch64",
|
||||
"build-type": "",
|
||||
"build_type": "",
|
||||
"sanitizer": "",
|
||||
"package-type": "binary",
|
||||
"package_type": "binary",
|
||||
"bundled": "bundled",
|
||||
"splitted": "unsplitted",
|
||||
"tidy": "disable",
|
||||
@ -147,9 +147,9 @@ CI_CONFIG = {
|
||||
},
|
||||
{
|
||||
"compiler": "clang-13-freebsd",
|
||||
"build-type": "",
|
||||
"build_type": "",
|
||||
"sanitizer": "",
|
||||
"package-type": "binary",
|
||||
"package_type": "binary",
|
||||
"bundled": "bundled",
|
||||
"splitted": "unsplitted",
|
||||
"tidy": "disable",
|
||||
@ -157,9 +157,9 @@ CI_CONFIG = {
|
||||
},
|
||||
{
|
||||
"compiler": "clang-13-darwin-aarch64",
|
||||
"build-type": "",
|
||||
"build_type": "",
|
||||
"sanitizer": "",
|
||||
"package-type": "binary",
|
||||
"package_type": "binary",
|
||||
"bundled": "bundled",
|
||||
"splitted": "unsplitted",
|
||||
"tidy": "disable",
|
||||
@ -167,9 +167,9 @@ CI_CONFIG = {
|
||||
},
|
||||
{
|
||||
"compiler": "clang-13-ppc64le",
|
||||
"build-type": "",
|
||||
"build_type": "",
|
||||
"sanitizer": "",
|
||||
"package-type": "binary",
|
||||
"package_type": "binary",
|
||||
"bundled": "bundled",
|
||||
"splitted": "unsplitted",
|
||||
"tidy": "disable",
|
||||
@ -185,7 +185,7 @@ CI_CONFIG = {
|
||||
"sanitizer": "address",
|
||||
"bundled": "bundled",
|
||||
"splitted": "unsplitted",
|
||||
"clang-tidy": "disable",
|
||||
"clang_tidy": "disable",
|
||||
"with_coverage": False
|
||||
}
|
||||
},
|
||||
@ -197,7 +197,7 @@ CI_CONFIG = {
|
||||
"sanitizer": "thread",
|
||||
"bundled": "bundled",
|
||||
"splitted": "unsplitted",
|
||||
"clang-tidy": "disable",
|
||||
"clang_tidy": "disable",
|
||||
"with_coverage": False
|
||||
}
|
||||
},
|
||||
@ -209,7 +209,7 @@ CI_CONFIG = {
|
||||
"sanitizer": "memory",
|
||||
"bundled": "bundled",
|
||||
"splitted": "unsplitted",
|
||||
"clang-tidy": "disable",
|
||||
"clang_tidy": "disable",
|
||||
"with_coverage": False
|
||||
}
|
||||
},
|
||||
@ -221,7 +221,7 @@ CI_CONFIG = {
|
||||
"sanitizer": "undefined",
|
||||
"bundled": "bundled",
|
||||
"splitted": "unsplitted",
|
||||
"clang-tidy": "disable",
|
||||
"clang_tidy": "disable",
|
||||
"with_coverage": False
|
||||
}
|
||||
},
|
||||
@ -233,7 +233,7 @@ CI_CONFIG = {
|
||||
"sanitizer": "none",
|
||||
"bundled": "bundled",
|
||||
"splitted": "unsplitted",
|
||||
"clang-tidy": "disable",
|
||||
"clang_tidy": "disable",
|
||||
"with_coverage": False
|
||||
}
|
||||
},
|
||||
@ -245,7 +245,7 @@ CI_CONFIG = {
|
||||
"sanitizer": "none",
|
||||
"bundled": "bundled",
|
||||
"splitted": "unsplitted",
|
||||
"clang-tidy": "disable",
|
||||
"clang_tidy": "disable",
|
||||
"with_coverage": False
|
||||
}
|
||||
},
|
||||
@ -257,7 +257,7 @@ CI_CONFIG = {
|
||||
"sanitizer": "none",
|
||||
"bundled": "bundled",
|
||||
"splitted": "unsplitted",
|
||||
"clang-tidy": "disable",
|
||||
"clang_tidy": "disable",
|
||||
"with_coverage": False
|
||||
}
|
||||
},
|
||||
@ -269,7 +269,7 @@ CI_CONFIG = {
|
||||
"sanitizer": "none",
|
||||
"bundled": "bundled",
|
||||
"splitted": "unsplitted",
|
||||
"clang-tidy": "disable",
|
||||
"clang_tidy": "disable",
|
||||
"with_coverage": False
|
||||
}
|
||||
},
|
||||
@ -281,7 +281,7 @@ CI_CONFIG = {
|
||||
"sanitizer": "address",
|
||||
"bundled": "bundled",
|
||||
"splitted": "unsplitted",
|
||||
"clang-tidy": "disable",
|
||||
"clang_tidy": "disable",
|
||||
"with_coverage": False
|
||||
}
|
||||
},
|
||||
@ -293,7 +293,7 @@ CI_CONFIG = {
|
||||
"sanitizer": "thread",
|
||||
"bundled": "bundled",
|
||||
"splitted": "unsplitted",
|
||||
"clang-tidy": "disable",
|
||||
"clang_tidy": "disable",
|
||||
"with_coverage": False
|
||||
}
|
||||
},
|
||||
@ -305,7 +305,7 @@ CI_CONFIG = {
|
||||
"sanitizer": "memory",
|
||||
"bundled": "bundled",
|
||||
"splitted": "unsplitted",
|
||||
"clang-tidy": "disable",
|
||||
"clang_tidy": "disable",
|
||||
"with_coverage": False
|
||||
}
|
||||
},
|
||||
@ -317,7 +317,7 @@ CI_CONFIG = {
|
||||
"sanitizer": "undefined",
|
||||
"bundled": "bundled",
|
||||
"splitted": "unsplitted",
|
||||
"clang-tidy": "disable",
|
||||
"clang_tidy": "disable",
|
||||
"with_coverage": False
|
||||
}
|
||||
},
|
||||
@ -329,7 +329,7 @@ CI_CONFIG = {
|
||||
"sanitizer": "none",
|
||||
"bundled": "bundled",
|
||||
"splitted": "unsplitted",
|
||||
"clang-tidy": "disable",
|
||||
"clang_tidy": "disable",
|
||||
"with_coverage": False
|
||||
}
|
||||
},
|
||||
@ -341,7 +341,7 @@ CI_CONFIG = {
|
||||
"sanitizer": "none",
|
||||
"bundled": "bundled",
|
||||
"splitted": "unsplitted",
|
||||
"clang-tidy": "disable",
|
||||
"clang_tidy": "disable",
|
||||
"with_coverage": False
|
||||
}
|
||||
},
|
||||
@ -353,7 +353,7 @@ CI_CONFIG = {
|
||||
"sanitizer": "none",
|
||||
"bundled": "unbundled",
|
||||
"splitted": "unsplitted",
|
||||
"clang-tidy": "disable",
|
||||
"clang_tidy": "disable",
|
||||
"with_coverage": False
|
||||
}
|
||||
},
|
||||
@ -365,7 +365,7 @@ CI_CONFIG = {
|
||||
"sanitizer": "none",
|
||||
"bundled": "bundled",
|
||||
"splitted": "unsplitted",
|
||||
"clang-tidy": "disable",
|
||||
"clang_tidy": "disable",
|
||||
"with_coverage": False
|
||||
}
|
||||
},
|
||||
@ -377,7 +377,7 @@ CI_CONFIG = {
|
||||
"sanitizer": "none",
|
||||
"bundled": "bundled",
|
||||
"splitted": "unsplitted",
|
||||
"clang-tidy": "disable",
|
||||
"clang_tidy": "disable",
|
||||
"with_coverage": False
|
||||
}
|
||||
},
|
||||
@ -389,7 +389,7 @@ CI_CONFIG = {
|
||||
"sanitizer": "none",
|
||||
"bundled": "bundled",
|
||||
"splitted": "unsplitted",
|
||||
"clang-tidy": "disable",
|
||||
"clang_tidy": "disable",
|
||||
"with_coverage": False
|
||||
}
|
||||
},
|
||||
@ -401,7 +401,7 @@ CI_CONFIG = {
|
||||
"sanitizer": "address",
|
||||
"bundled": "bundled",
|
||||
"splitted": "unsplitted",
|
||||
"clang-tidy": "disable",
|
||||
"clang_tidy": "disable",
|
||||
"with_coverage": False
|
||||
}
|
||||
},
|
||||
@ -413,7 +413,7 @@ CI_CONFIG = {
|
||||
"sanitizer": "thread",
|
||||
"bundled": "bundled",
|
||||
"splitted": "unsplitted",
|
||||
"clang-tidy": "disable",
|
||||
"clang_tidy": "disable",
|
||||
"with_coverage": False
|
||||
}
|
||||
},
|
||||
@ -425,7 +425,7 @@ CI_CONFIG = {
|
||||
"sanitizer": "undefined",
|
||||
"bundled": "bundled",
|
||||
"splitted": "unsplitted",
|
||||
"clang-tidy": "disable",
|
||||
"clang_tidy": "disable",
|
||||
"with_coverage": False
|
||||
}
|
||||
},
|
||||
@ -437,7 +437,7 @@ CI_CONFIG = {
|
||||
"sanitizer": "memory",
|
||||
"bundled": "bundled",
|
||||
"splitted": "unsplitted",
|
||||
"clang-tidy": "disable",
|
||||
"clang_tidy": "disable",
|
||||
"with_coverage": False
|
||||
}
|
||||
},
|
||||
@ -449,7 +449,7 @@ CI_CONFIG = {
|
||||
"sanitizer": "none",
|
||||
"bundled": "bundled",
|
||||
"splitted": "unsplitted",
|
||||
"clang-tidy": "disable",
|
||||
"clang_tidy": "disable",
|
||||
"with_coverage": False
|
||||
}
|
||||
},
|
||||
@ -461,7 +461,7 @@ CI_CONFIG = {
|
||||
"sanitizer": "address",
|
||||
"bundled": "bundled",
|
||||
"splitted": "unsplitted",
|
||||
"clang-tidy": "disable",
|
||||
"clang_tidy": "disable",
|
||||
"with_coverage": False
|
||||
}
|
||||
},
|
||||
@ -473,7 +473,7 @@ CI_CONFIG = {
|
||||
"sanitizer": "thread",
|
||||
"bundled": "bundled",
|
||||
"splitted": "unsplitted",
|
||||
"clang-tidy": "disable",
|
||||
"clang_tidy": "disable",
|
||||
"with_coverage": False
|
||||
}
|
||||
},
|
||||
@ -485,7 +485,7 @@ CI_CONFIG = {
|
||||
"sanitizer": "none",
|
||||
"bundled": "bundled",
|
||||
"splitted": "unsplitted",
|
||||
"clang-tidy": "disable",
|
||||
"clang_tidy": "disable",
|
||||
"with_coverage": False
|
||||
}
|
||||
},
|
||||
@ -497,7 +497,7 @@ CI_CONFIG = {
|
||||
"sanitizer": "memory",
|
||||
"bundled": "bundled",
|
||||
"splitted": "unsplitted",
|
||||
"clang-tidy": "disable",
|
||||
"clang_tidy": "disable",
|
||||
"with_coverage": False
|
||||
}
|
||||
},
|
||||
@ -509,7 +509,7 @@ CI_CONFIG = {
|
||||
"sanitizer": "address",
|
||||
"bundled": "bundled",
|
||||
"splitted": "unsplitted",
|
||||
"clang-tidy": "disable",
|
||||
"clang_tidy": "disable",
|
||||
"with_coverage": False
|
||||
}
|
||||
},
|
||||
@ -521,7 +521,7 @@ CI_CONFIG = {
|
||||
"sanitizer": "none",
|
||||
"bundled": "bundled",
|
||||
"splitted": "unsplitted",
|
||||
"clang-tidy": "disable",
|
||||
"clang_tidy": "disable",
|
||||
"with_coverage": False
|
||||
}
|
||||
},
|
||||
@ -533,7 +533,7 @@ CI_CONFIG = {
|
||||
"sanitizer": "none",
|
||||
"bundled": "bundled",
|
||||
"splitted": "splitted",
|
||||
"clang-tidy": "disable",
|
||||
"clang_tidy": "disable",
|
||||
"with_coverage": False
|
||||
}
|
||||
},
|
||||
@ -545,7 +545,7 @@ CI_CONFIG = {
|
||||
"sanitizer": "none",
|
||||
"bundled": "bundled",
|
||||
"splitted": "unsplitted",
|
||||
"clang-tidy": "disable",
|
||||
"clang_tidy": "disable",
|
||||
"with_coverage": False
|
||||
}
|
||||
},
|
||||
@ -557,7 +557,7 @@ CI_CONFIG = {
|
||||
"sanitizer": "none",
|
||||
"bundled": "bundled",
|
||||
"splitted": "unsplitted",
|
||||
"clang-tidy": "disable",
|
||||
"clang_tidy": "disable",
|
||||
"with_coverage": False
|
||||
}
|
||||
},
|
||||
@ -569,7 +569,7 @@ CI_CONFIG = {
|
||||
"sanitizer": "none",
|
||||
"bundled": "bundled",
|
||||
"splitted": "unsplitted",
|
||||
"clang-tidy": "disable",
|
||||
"clang_tidy": "disable",
|
||||
"with_coverage": False
|
||||
}
|
||||
},
|
||||
@ -581,7 +581,7 @@ CI_CONFIG = {
|
||||
"sanitizer": "address",
|
||||
"bundled": "bundled",
|
||||
"splitted": "unsplitted",
|
||||
"clang-tidy": "disable",
|
||||
"clang_tidy": "disable",
|
||||
"with_coverage": False
|
||||
}
|
||||
},
|
||||
@ -593,7 +593,7 @@ CI_CONFIG = {
|
||||
"sanitizer": "memory",
|
||||
"bundled": "bundled",
|
||||
"splitted": "unsplitted",
|
||||
"clang-tidy": "disable",
|
||||
"clang_tidy": "disable",
|
||||
"with_coverage": False
|
||||
}
|
||||
},
|
||||
@ -605,7 +605,7 @@ CI_CONFIG = {
|
||||
"sanitizer": "thread",
|
||||
"bundled": "bundled",
|
||||
"splitted": "unsplitted",
|
||||
"clang-tidy": "disable",
|
||||
"clang_tidy": "disable",
|
||||
"with_coverage": False
|
||||
}
|
||||
},
|
||||
@ -617,7 +617,7 @@ CI_CONFIG = {
|
||||
"sanitizer": "thread",
|
||||
"bundled": "bundled",
|
||||
"splitted": "unsplitted",
|
||||
"clang-tidy": "disable",
|
||||
"clang_tidy": "disable",
|
||||
"with_coverage": False
|
||||
}
|
||||
},
|
||||
@ -629,7 +629,7 @@ CI_CONFIG = {
|
||||
"sanitizer": "none",
|
||||
"bundled": "bundled",
|
||||
"splitted": "unsplitted",
|
||||
"clang-tidy": "disable",
|
||||
"clang_tidy": "disable",
|
||||
"with_coverage": False
|
||||
}
|
||||
},
|
||||
@ -641,7 +641,7 @@ CI_CONFIG = {
|
||||
"sanitizer": "address",
|
||||
"bundled": "bundled",
|
||||
"splitted": "unsplitted",
|
||||
"clang-tidy": "disable",
|
||||
"clang_tidy": "disable",
|
||||
"with_coverage": False
|
||||
}
|
||||
},
|
||||
@ -653,7 +653,7 @@ CI_CONFIG = {
|
||||
"sanitizer": "memory",
|
||||
"bundled": "bundled",
|
||||
"splitted": "unsplitted",
|
||||
"clang-tidy": "disable",
|
||||
"clang_tidy": "disable",
|
||||
"with_coverage": False
|
||||
}
|
||||
},
|
||||
@ -665,7 +665,7 @@ CI_CONFIG = {
|
||||
"sanitizer": "thread",
|
||||
"bundled": "bundled",
|
||||
"splitted": "unsplitted",
|
||||
"clang-tidy": "disable",
|
||||
"clang_tidy": "disable",
|
||||
"with_coverage": False
|
||||
}
|
||||
},
|
||||
@ -677,7 +677,7 @@ CI_CONFIG = {
|
||||
"sanitizer": "undefined",
|
||||
"bundled": "bundled",
|
||||
"splitted": "unsplitted",
|
||||
"clang-tidy": "disable",
|
||||
"clang_tidy": "disable",
|
||||
"with_coverage": False
|
||||
}
|
||||
},
|
||||
@ -689,7 +689,7 @@ CI_CONFIG = {
|
||||
"sanitizer": "none",
|
||||
"bundled": "bundled",
|
||||
"splitted": "unsplitted",
|
||||
"clang-tidy": "disable",
|
||||
"clang_tidy": "disable",
|
||||
"with_coverage": False
|
||||
}
|
||||
},
|
||||
@ -701,7 +701,7 @@ CI_CONFIG = {
|
||||
"sanitizer": "address",
|
||||
"bundled": "bundled",
|
||||
"splitted": "unsplitted",
|
||||
"clang-tidy": "disable",
|
||||
"clang_tidy": "disable",
|
||||
"with_coverage": False
|
||||
}
|
||||
},
|
||||
@ -713,7 +713,7 @@ CI_CONFIG = {
|
||||
"sanitizer": "none",
|
||||
"bundled": "bundled",
|
||||
"splitted": "unsplitted",
|
||||
"clang-tidy": "disable",
|
||||
"clang_tidy": "disable",
|
||||
"with_coverage": False
|
||||
}
|
||||
}
|
||||
@ -721,16 +721,16 @@ CI_CONFIG = {
|
||||
}
|
||||
|
||||
def build_config_to_string(build_config):
|
||||
if build_config["package-type"] == "performance":
|
||||
if build_config["package_type"] == "performance":
|
||||
return "performance"
|
||||
|
||||
return "_".join([
|
||||
build_config['compiler'],
|
||||
build_config['build-type'] if build_config['build-type'] else "relwithdebuginfo",
|
||||
build_config['build_type'] if build_config['build_type'] else "relwithdebuginfo",
|
||||
build_config['sanitizer'] if build_config['sanitizer'] else "none",
|
||||
build_config['bundled'],
|
||||
build_config['splitted'],
|
||||
"tidy" if build_config['tidy'] == "enable" else "notidy",
|
||||
"with_coverage" if build_config['with_coverage'] else "without_coverage",
|
||||
build_config['package-type'],
|
||||
build_config['package_type'],
|
||||
])
|
||||
|
@ -5,7 +5,6 @@ import logging
|
||||
import os
|
||||
import json
|
||||
import subprocess
|
||||
import time
|
||||
|
||||
from github import Github
|
||||
|
||||
@ -14,7 +13,7 @@ from get_robot_token import get_best_robot_token
|
||||
from pr_info import PRInfo
|
||||
from build_download_helper import download_builds_filter
|
||||
from upload_result_helper import upload_results
|
||||
|
||||
from docker_pull_helper import get_images_with_versions
|
||||
|
||||
IMAGE_UBUNTU = "clickhouse/test-old-ubuntu"
|
||||
IMAGE_CENTOS = "clickhouse/test-old-centos"
|
||||
@ -112,37 +111,7 @@ if __name__ == "__main__":
|
||||
|
||||
gh = Github(get_best_robot_token())
|
||||
|
||||
for root, _, files in os.walk(reports_path):
|
||||
for f in files:
|
||||
if f == 'changed_images.json':
|
||||
images_path = os.path.join(root, 'changed_images.json')
|
||||
break
|
||||
|
||||
docker_images = []
|
||||
if images_path and os.path.exists(images_path):
|
||||
for image_name in [IMAGE_CENTOS, IMAGE_UBUNTU]:
|
||||
docker_image = image_name
|
||||
logging.info("Images file exists")
|
||||
with open(images_path, 'r', encoding='utf-8') as images_fd:
|
||||
images = json.load(images_fd)
|
||||
logging.info("Got images %s", images)
|
||||
if image_name in images:
|
||||
docker_image += ':' + images[image_name]
|
||||
docker_images.append(docker_image)
|
||||
else:
|
||||
logging.info("Images file not found")
|
||||
|
||||
for docker_image in docker_images:
|
||||
for i in range(10):
|
||||
try:
|
||||
logging.info("Pulling image %s", docker_image)
|
||||
subprocess.check_output(f"docker pull {docker_image}", stderr=subprocess.STDOUT, shell=True)
|
||||
break
|
||||
except Exception as ex:
|
||||
time.sleep(i * 3)
|
||||
logging.info("Got execption pulling docker %s", ex)
|
||||
else:
|
||||
raise Exception(f"Cannot pull dockerhub for image docker pull {docker_image}")
|
||||
docker_images = get_images_with_versions(reports_path, [IMAGE_CENTOS, IMAGE_UBUNTU])
|
||||
|
||||
packages_path = os.path.join(temp_path, "packages")
|
||||
if not os.path.exists(packages_path):
|
||||
|
57
tests/ci/docker_pull_helper.py
Normal file
57
tests/ci/docker_pull_helper.py
Normal file
@ -0,0 +1,57 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import os
|
||||
import json
|
||||
import time
|
||||
import subprocess
|
||||
import logging
|
||||
|
||||
class DockerImage:
|
||||
def __init__(self, name, version=None):
|
||||
self.name = name
|
||||
if version is None:
|
||||
self.version = 'latest'
|
||||
else:
|
||||
self.version = version
|
||||
|
||||
def __str__(self):
|
||||
return f"{self.name}:{self.version}"
|
||||
|
||||
def get_images_with_versions(reports_path, images, pull=True):
|
||||
for root, _, files in os.walk(reports_path):
|
||||
for f in files:
|
||||
if f == 'changed_images.json':
|
||||
images_path = os.path.join(root, 'changed_images.json')
|
||||
break
|
||||
|
||||
docker_images = []
|
||||
if images_path and os.path.exists(images_path):
|
||||
for image_name in images:
|
||||
docker_image = DockerImage(image_name)
|
||||
logging.info("Images file exists")
|
||||
with open(images_path, 'r', encoding='utf-8') as images_fd:
|
||||
images = json.load(images_fd)
|
||||
logging.info("Got images %s", images)
|
||||
if image_name in images:
|
||||
docker_image.version = images[image_name]
|
||||
docker_images.append(docker_image)
|
||||
else:
|
||||
logging.info("Images file not found")
|
||||
|
||||
if pull:
|
||||
for docker_image in docker_images:
|
||||
for i in range(10):
|
||||
try:
|
||||
logging.info("Pulling image %s", docker_image)
|
||||
latest_error = subprocess.check_output(f"docker pull {docker_image}", stderr=subprocess.STDOUT, shell=True)
|
||||
break
|
||||
except Exception as ex:
|
||||
time.sleep(i * 3)
|
||||
logging.info("Got execption pulling docker %s", ex)
|
||||
else:
|
||||
raise Exception(f"Cannot pull dockerhub for image docker pull {docker_image} because of {latest_error}")
|
||||
|
||||
return docker_images
|
||||
|
||||
def get_image_with_version(reports_path, image, pull=True):
|
||||
return get_images_with_versions(reports_path, [image], pull)[0]
|
@ -2,7 +2,6 @@
|
||||
import logging
|
||||
import subprocess
|
||||
import os
|
||||
import time
|
||||
import json
|
||||
import sys
|
||||
from github import Github
|
||||
@ -10,6 +9,7 @@ from s3_helper import S3Helper
|
||||
from pr_info import PRInfo
|
||||
from get_robot_token import get_best_robot_token
|
||||
from upload_result_helper import upload_results
|
||||
from docker_pull_helper import get_image_with_version
|
||||
|
||||
NAME = "Docs Check (actions)"
|
||||
|
||||
@ -41,27 +41,7 @@ if __name__ == "__main__":
|
||||
if not os.path.exists(temp_path):
|
||||
os.makedirs(temp_path)
|
||||
|
||||
images_path = os.path.join(temp_path, 'changed_images.json')
|
||||
|
||||
docker_image = 'clickhouse/docs-check'
|
||||
if os.path.exists(images_path):
|
||||
logging.info("Images file exists")
|
||||
with open(images_path, 'r', encoding='utf-8') as images_fd:
|
||||
images = json.load(images_fd)
|
||||
logging.info("Got images %s", images)
|
||||
if 'clickhouse/docs-check' in images:
|
||||
docker_image += ':' + images['clickhouse/docs-check']
|
||||
|
||||
logging.info("Got docker image %s", docker_image)
|
||||
for i in range(10):
|
||||
try:
|
||||
subprocess.check_output(f"docker pull {docker_image}", shell=True)
|
||||
break
|
||||
except Exception as ex:
|
||||
time.sleep(i * 3)
|
||||
logging.info("Got execption pulling docker %s", ex)
|
||||
else:
|
||||
raise Exception(f"Cannot pull dockerhub for image {docker_image}")
|
||||
docker_image = get_image_with_version(temp_path, 'clickhouse/docs-check')
|
||||
|
||||
test_output = os.path.join(temp_path, 'docs_check_log')
|
||||
if not os.path.exists(test_output):
|
||||
|
@ -15,6 +15,7 @@ from pr_info import PRInfo
|
||||
from get_robot_token import get_best_robot_token
|
||||
from ssh import SSHKey
|
||||
from upload_result_helper import upload_results
|
||||
from docker_pull_helper import get_chaned_images
|
||||
|
||||
NAME = "Docs Release (actions)"
|
||||
|
||||
@ -46,27 +47,7 @@ if __name__ == "__main__":
|
||||
if not os.path.exists(temp_path):
|
||||
os.makedirs(temp_path)
|
||||
|
||||
images_path = os.path.join(temp_path, 'changed_images.json')
|
||||
|
||||
docker_image = 'clickhouse/docs-release'
|
||||
if os.path.exists(images_path):
|
||||
logging.info("Images file exists")
|
||||
with open(images_path, 'r', encoding='utf-8') as images_fd:
|
||||
images = json.load(images_fd)
|
||||
logging.info("Got images %s", images)
|
||||
if 'clickhouse/docs-release' in images:
|
||||
docker_image += ':' + images['clickhouse/docs-release']
|
||||
|
||||
logging.info("Got docker image %s", docker_image)
|
||||
for i in range(10):
|
||||
try:
|
||||
subprocess.check_output(f"docker pull {docker_image}", shell=True)
|
||||
break
|
||||
except Exception as ex:
|
||||
time.sleep(i * 3)
|
||||
logging.info("Got execption pulling docker %s", ex)
|
||||
else:
|
||||
raise Exception(f"Cannot pull dockerhub for image {docker_image}")
|
||||
docker_image = get_chaned_images(temp_path, ['clickhouse/docs-release'])
|
||||
|
||||
test_output = os.path.join(temp_path, 'docs_release_log')
|
||||
if not os.path.exists(test_output):
|
||||
|
@ -11,6 +11,7 @@ from pr_info import PRInfo
|
||||
from s3_helper import S3Helper
|
||||
from get_robot_token import get_best_robot_token
|
||||
from upload_result_helper import upload_results
|
||||
from docker_pull_helper import get_image_with_version
|
||||
|
||||
|
||||
NAME = 'Fast test (actions)'
|
||||
@ -68,27 +69,7 @@ if __name__ == "__main__":
|
||||
|
||||
gh = Github(get_best_robot_token())
|
||||
|
||||
images_path = os.path.join(temp_path, 'changed_images.json')
|
||||
docker_image = 'clickhouse/fasttest'
|
||||
if os.path.exists(images_path):
|
||||
logging.info("Images file exists")
|
||||
with open(images_path, 'r') as images_fd:
|
||||
images = json.load(images_fd)
|
||||
logging.info("Got images %s", images)
|
||||
if 'clickhouse/fasttest' in images:
|
||||
docker_image += ':' + images['clickhouse/fasttest']
|
||||
|
||||
logging.info("Got docker image %s", docker_image)
|
||||
for i in range(10):
|
||||
try:
|
||||
subprocess.check_output(f"docker pull {docker_image}", shell=True)
|
||||
break
|
||||
except Exception as ex:
|
||||
time.sleep(i * 3)
|
||||
logging.info("Got execption pulling docker %s", ex)
|
||||
else:
|
||||
raise Exception(f"Cannot pull dockerhub for image {docker_image}")
|
||||
|
||||
docker_image = get_image_with_version(temp_path, 'clickhouse/fasttest')
|
||||
|
||||
s3_helper = S3Helper('https://s3.amazonaws.com')
|
||||
|
||||
|
@ -15,6 +15,7 @@ from get_robot_token import get_best_robot_token
|
||||
from pr_info import PRInfo
|
||||
from build_download_helper import download_all_deb_packages
|
||||
from upload_result_helper import upload_results
|
||||
from docker_pull_helper import get_image_with_version
|
||||
|
||||
|
||||
def get_commit(gh, commit_sha):
|
||||
@ -124,41 +125,15 @@ if __name__ == "__main__":
|
||||
commit.create_status(context=check_name, description='Not found changed stateless tests', state='success')
|
||||
sys.exit(0)
|
||||
|
||||
for root, _, files in os.walk(reports_path):
|
||||
for f in files:
|
||||
if f == 'changed_images.json':
|
||||
images_path = os.path.join(root, 'changed_images.json')
|
||||
break
|
||||
|
||||
image_name = get_image_name(check_name)
|
||||
|
||||
docker_image = image_name
|
||||
if images_path and os.path.exists(images_path):
|
||||
logging.info("Images file exists")
|
||||
with open(images_path, 'r', encoding='utf-8') as images_fd:
|
||||
images = json.load(images_fd)
|
||||
logging.info("Got images %s", images)
|
||||
if image_name in images:
|
||||
docker_image += ':' + images[image_name]
|
||||
else:
|
||||
logging.info("Images file not found")
|
||||
|
||||
for i in range(10):
|
||||
try:
|
||||
logging.info("Pulling image %s", docker_image)
|
||||
subprocess.check_output(f"docker pull {docker_image}", stderr=subprocess.STDOUT, shell=True)
|
||||
break
|
||||
except Exception as ex:
|
||||
time.sleep(i * 3)
|
||||
logging.info("Got execption pulling docker %s", ex)
|
||||
else:
|
||||
raise Exception(f"Cannot pull dockerhub for image docker pull {docker_image}")
|
||||
docker_image = get_image_with_version(reports_path, image_name)
|
||||
|
||||
packages_path = os.path.join(temp_path, "packages")
|
||||
if not os.path.exists(packages_path):
|
||||
os.makedirs(packages_path)
|
||||
|
||||
download_all_deb_packages(check_name, reports_path, result_path)
|
||||
download_all_deb_packages(check_name, reports_path, packages_path)
|
||||
|
||||
server_log_path = os.path.join(temp_path, "server_log")
|
||||
if not os.path.exists(server_log_path):
|
||||
|
@ -15,6 +15,7 @@ from get_robot_token import get_best_robot_token
|
||||
from pr_info import PRInfo
|
||||
from build_download_helper import download_all_deb_packages
|
||||
from upload_result_helper import upload_results
|
||||
from docker_pull_helper import get_images_with_versions
|
||||
|
||||
DOWNLOAD_RETRIES_COUNT = 5
|
||||
|
||||
@ -127,21 +128,8 @@ if __name__ == "__main__":
|
||||
|
||||
gh = Github(get_best_robot_token())
|
||||
|
||||
images_path = os.path.join(temp_path, 'changed_images.json')
|
||||
images_with_version = get_images_with_versions(images_path)
|
||||
for image, version in images_with_version.items():
|
||||
docker_image = image + ':' + version
|
||||
for i in range(10):
|
||||
try:
|
||||
logging.info("Pulling image %s", docker_image)
|
||||
subprocess.check_output(f"docker pull {docker_image}", stderr=subprocess.STDOUT, shell=True)
|
||||
break
|
||||
except Exception as ex:
|
||||
time.sleep(i * 3)
|
||||
logging.info("Got execption pulling docker %s", ex)
|
||||
else:
|
||||
raise Exception(f"Cannot pull dockerhub for image docker pull {docker_image}")
|
||||
|
||||
images = get_images_with_versions(temp_path, IMAGES)
|
||||
images_with_versions = {i.name: i.version for i in images}
|
||||
result_path = os.path.join(temp_path, "output_dir")
|
||||
if not os.path.exists(result_path):
|
||||
os.makedirs(result_path)
|
||||
|
@ -14,6 +14,7 @@ from get_robot_token import get_best_robot_token
|
||||
from pr_info import PRInfo
|
||||
from build_download_helper import download_shared_build
|
||||
from upload_result_helper import upload_results
|
||||
from docker_pull_helper import get_image_with_version
|
||||
|
||||
DOCKER_IMAGE = "clickhouse/split-build-smoke-test"
|
||||
DOWNLOAD_RETRIES_COUNT = 5
|
||||
@ -77,27 +78,7 @@ if __name__ == "__main__":
|
||||
images_path = os.path.join(root, 'changed_images.json')
|
||||
break
|
||||
|
||||
docker_image = DOCKER_IMAGE
|
||||
if images_path and os.path.exists(images_path):
|
||||
logging.info("Images file exists")
|
||||
with open(images_path, 'r', encoding='utf-8') as images_fd:
|
||||
images = json.load(images_fd)
|
||||
logging.info("Got images %s", images)
|
||||
if docker_image in images:
|
||||
docker_image += ':' + images[docker_image]
|
||||
else:
|
||||
logging.info("Images file not found")
|
||||
|
||||
for i in range(10):
|
||||
try:
|
||||
logging.info("Pulling image %s", docker_image)
|
||||
subprocess.check_output(f"docker pull {docker_image}", stderr=subprocess.STDOUT, shell=True)
|
||||
break
|
||||
except Exception as ex:
|
||||
time.sleep(i * 3)
|
||||
logging.info("Got execption pulling docker %s", ex)
|
||||
else:
|
||||
raise Exception(f"Cannot pull dockerhub for image docker pull {docker_image}")
|
||||
docker_image = get_image_with_version(reports_path, DOCKER_IMAGE)
|
||||
|
||||
packages_path = os.path.join(temp_path, "packages")
|
||||
if not os.path.exists(packages_path):
|
||||
|
@ -17,6 +17,7 @@ from get_robot_token import get_best_robot_token
|
||||
from pr_info import PRInfo
|
||||
from build_download_helper import download_all_deb_packages
|
||||
from upload_result_helper import upload_results
|
||||
from docker_pull_helper import get_image_with_version
|
||||
|
||||
def get_run_command(build_path, result_folder, server_log_folder, image):
|
||||
cmd = "docker run -e S3_URL='https://clickhouse-datasets.s3.amazonaws.com' " + \
|
||||
@ -85,35 +86,7 @@ if __name__ == "__main__":
|
||||
|
||||
gh = Github(get_best_robot_token())
|
||||
|
||||
for root, _, files in os.walk(reports_path):
|
||||
for f in files:
|
||||
if f == 'changed_images.json':
|
||||
images_path = os.path.join(root, 'changed_images.json')
|
||||
break
|
||||
|
||||
image_name = "clickhouse/stress-test"
|
||||
|
||||
docker_image = image_name
|
||||
if images_path and os.path.exists(images_path):
|
||||
logging.info("Images file exists")
|
||||
with open(images_path, 'r', encoding='utf-8') as images_fd:
|
||||
images = json.load(images_fd)
|
||||
logging.info("Got images %s", images)
|
||||
if image_name in images:
|
||||
docker_image += ':' + images[image_name]
|
||||
else:
|
||||
logging.info("Images file not found")
|
||||
|
||||
for i in range(10):
|
||||
try:
|
||||
logging.info("Pulling image %s", docker_image)
|
||||
subprocess.check_output(f"docker pull {docker_image}", stderr=subprocess.STDOUT, shell=True)
|
||||
break
|
||||
except Exception as ex:
|
||||
time.sleep(i * 3)
|
||||
logging.info("Got execption pulling docker %s", ex)
|
||||
else:
|
||||
raise Exception(f"Cannot pull dockerhub for image docker pull {docker_image}")
|
||||
docker_image = get_image_with_version(reports_path, 'clickhouse/stress-test')
|
||||
|
||||
packages_path = os.path.join(temp_path, "packages")
|
||||
if not os.path.exists(packages_path):
|
||||
|
@ -10,6 +10,7 @@ from s3_helper import S3Helper
|
||||
from pr_info import PRInfo
|
||||
from get_robot_token import get_best_robot_token
|
||||
from upload_result_helper import upload_results
|
||||
from docker_pull_helper import get_image_with_version
|
||||
|
||||
NAME = "Style Check (actions)"
|
||||
|
||||
@ -61,27 +62,7 @@ if __name__ == "__main__":
|
||||
|
||||
gh = Github(get_best_robot_token())
|
||||
|
||||
images_path = os.path.join(temp_path, 'changed_images.json')
|
||||
docker_image = 'clickhouse/style-test'
|
||||
if os.path.exists(images_path):
|
||||
logging.info("Images file exists")
|
||||
with open(images_path, 'r') as images_fd:
|
||||
images = json.load(images_fd)
|
||||
logging.info("Got images %s", images)
|
||||
if 'clickhouse/style-test' in images:
|
||||
docker_image += ':' + images['clickhouse/style-test']
|
||||
|
||||
logging.info("Got docker image %s", docker_image)
|
||||
for i in range(10):
|
||||
try:
|
||||
subprocess.check_output(f"docker pull {docker_image}", shell=True)
|
||||
break
|
||||
except Exception as ex:
|
||||
time.sleep(i * 3)
|
||||
logging.info("Got execption pulling docker %s", ex)
|
||||
else:
|
||||
raise Exception(f"Cannot pull dockerhub for image {docker_image}")
|
||||
|
||||
docker_image = get_image_with_version(temp_path, 'clickhouse/style-test')
|
||||
s3_helper = S3Helper('https://s3.amazonaws.com')
|
||||
|
||||
subprocess.check_output(f"docker run -u $(id -u ${{USER}}):$(id -g ${{USER}}) --cap-add=SYS_PTRACE --volume={repo_path}:/ClickHouse --volume={temp_path}:/test_output {docker_image}", shell=True)
|
||||
|
@ -3,7 +3,6 @@
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
import subprocess
|
||||
import json
|
||||
|
||||
@ -14,6 +13,7 @@ from get_robot_token import get_best_robot_token
|
||||
from pr_info import PRInfo
|
||||
from build_download_helper import download_unit_tests
|
||||
from upload_result_helper import upload_results
|
||||
from docker_pull_helper import get_image_with_version
|
||||
|
||||
IMAGE_NAME = 'clickhouse/unit-test'
|
||||
|
||||
@ -107,33 +107,7 @@ if __name__ == "__main__":
|
||||
|
||||
gh = Github(get_best_robot_token())
|
||||
|
||||
for root, _, files in os.walk(reports_path):
|
||||
for f in files:
|
||||
if f == 'changed_images.json':
|
||||
images_path = os.path.join(root, 'changed_images.json')
|
||||
break
|
||||
|
||||
docker_image = IMAGE_NAME
|
||||
if images_path and os.path.exists(images_path):
|
||||
logging.info("Images file exists")
|
||||
with open(images_path, 'r', encoding='utf-8') as images_fd:
|
||||
images = json.load(images_fd)
|
||||
logging.info("Got images %s", images)
|
||||
if IMAGE_NAME in images:
|
||||
docker_image += ':' + images[IMAGE_NAME]
|
||||
else:
|
||||
logging.info("Images file not found")
|
||||
|
||||
for i in range(10):
|
||||
try:
|
||||
logging.info("Pulling image %s", docker_image)
|
||||
subprocess.check_output(f"docker pull {docker_image}", stderr=subprocess.STDOUT, shell=True)
|
||||
break
|
||||
except Exception as ex:
|
||||
time.sleep(i * 3)
|
||||
logging.info("Got execption pulling docker %s", ex)
|
||||
else:
|
||||
raise Exception(f"Cannot pull dockerhub for image docker pull {docker_image}")
|
||||
docker_image = get_image_with_version(reports_path, IMAGE_NAME)
|
||||
|
||||
download_unit_tests(check_name, reports_path, temp_path)
|
||||
tests_binary_path = os.path.join(temp_path, "unit_tests_dbms")
|
||||
|
@ -36,7 +36,7 @@ def process_logs(s3_client, additional_logs, s3_path_prefix, test_results):
|
||||
|
||||
return additional_urls
|
||||
|
||||
def upload_results(s3_client, pr_number, commit_sha, test_results, additional_files, check_name):
|
||||
def upload_results(s3_client, pr_number, commit_sha, test_results, additional_files, check_name, with_raw_logs=True):
|
||||
s3_path_prefix = f"{pr_number}/{commit_sha}/" + check_name.lower().replace(' ', '_').replace('(', '_').replace(')', '_').replace(',', '_')
|
||||
additional_urls = process_logs(s3_client, additional_files, s3_path_prefix, test_results)
|
||||
|
||||
@ -55,7 +55,7 @@ def upload_results(s3_client, pr_number, commit_sha, test_results, additional_fi
|
||||
else:
|
||||
raw_log_url = task_url
|
||||
|
||||
html_report = create_test_html_report(check_name, test_results, raw_log_url, task_url, branch_url, branch_name, commit_url, additional_urls, True)
|
||||
html_report = create_test_html_report(check_name, test_results, raw_log_url, task_url, branch_url, branch_name, commit_url, additional_urls, with_raw_logs)
|
||||
with open('report.html', 'w', encoding='utf-8') as f:
|
||||
f.write(html_report)
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user