Merge pull request #51561 from ClickHouse/ci-for-backports

Decoupled commits from #51180 for backports
This commit is contained in:
Mikhail f. Shiryaev 2023-06-29 16:04:43 +02:00 committed by GitHub
commit ef55e4e94d
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
4 changed files with 17 additions and 5 deletions

View File

@ -52,6 +52,8 @@ export CLICKHOUSE_TESTS_BASE_CONFIG_DIR=/clickhouse-config
export CLICKHOUSE_ODBC_BRIDGE_BINARY_PATH=/clickhouse-odbc-bridge
export CLICKHOUSE_LIBRARY_BRIDGE_BINARY_PATH=/clickhouse-library-bridge
export DOCKER_BASE_TAG=${DOCKER_BASE_TAG:=latest}
export DOCKER_HELPER_TAG=${DOCKER_HELPER_TAG:=latest}
export DOCKER_MYSQL_GOLANG_CLIENT_TAG=${DOCKER_MYSQL_GOLANG_CLIENT_TAG:=latest}
export DOCKER_DOTNET_CLIENT_TAG=${DOCKER_DOTNET_CLIENT_TAG:=latest}
export DOCKER_MYSQL_JAVA_CLIENT_TAG=${DOCKER_MYSQL_JAVA_CLIENT_TAG:=latest}

View File

@ -1,5 +1,5 @@
from pathlib import Path
from typing import Dict, List
from typing import Dict, List, Optional
import os
import logging
@ -58,14 +58,19 @@ def upload_results(
test_results: TestResults,
additional_files: List[str],
check_name: str,
additional_urls: Optional[List[str]] = None,
) -> str:
normalized_check_name = check_name.lower()
for r in ((" ", "_"), ("(", "_"), (")", "_"), (",", "_"), ("/", "_")):
normalized_check_name = normalized_check_name.replace(*r)
# Preserve additional_urls to not modify the original one
original_additional_urls = additional_urls or []
s3_path_prefix = f"{pr_number}/{commit_sha}/{normalized_check_name}"
additional_urls = process_logs(
s3_client, additional_files, s3_path_prefix, test_results
)
additional_urls.extend(original_additional_urls)
branch_url = f"{GITHUB_SERVER_URL}/{GITHUB_REPOSITORY}/commits/master"
branch_name = "master"

View File

@ -231,6 +231,9 @@ class _NetworkManager:
def _ensure_container(self):
if self._container is None or self._container_expire_time <= time.time():
image_name = "clickhouse/integration-helper:" + os.getenv(
"DOCKER_HELPER_TAG", "latest"
)
for i in range(5):
if self._container is not None:
try:
@ -247,7 +250,7 @@ class _NetworkManager:
time.sleep(i)
image = subprocess.check_output(
"docker images -q clickhouse/integration-helper 2>/dev/null", shell=True
f"docker images -q {image_name} 2>/dev/null", shell=True
)
if not image.strip():
print("No network image helper, will try download")
@ -256,16 +259,16 @@ class _NetworkManager:
for i in range(5):
try:
subprocess.check_call( # STYLE_CHECK_ALLOW_SUBPROCESS_CHECK_CALL
"docker pull clickhouse/integration-helper", shell=True
f"docker pull {image_name}", shell=True
)
break
except:
time.sleep(i)
else:
raise Exception("Cannot pull clickhouse/integration-helper image")
raise Exception(f"Cannot pull {image_name} image")
self._container = self._docker_client.containers.run(
"clickhouse/integration-helper",
image_name,
auto_remove=True,
command=("sleep %s" % self.container_exit_timeout),
# /run/xtables.lock passed inside for correct iptables --wait

View File

@ -336,6 +336,8 @@ if __name__ == "__main__":
env_tags += "-e {}={} ".format("DOCKER_MYSQL_PHP_CLIENT_TAG", tag)
elif image == "clickhouse/postgresql-java-client":
env_tags += "-e {}={} ".format("DOCKER_POSTGRESQL_JAVA_CLIENT_TAG", tag)
elif image == "clickhouse/integration-helper":
env_tags += "-e {}={} ".format("DOCKER_HELPER_TAG", tag)
elif image == "clickhouse/integration-test":
env_tags += "-e {}={} ".format("DOCKER_BASE_TAG", tag)
elif image == "clickhouse/kerberized-hadoop":