Don't use envs for static links

This commit is contained in:
Mikhail f. Shiryaev 2022-08-12 09:46:36 +02:00
parent 372481e770
commit b46622cf3c
No known key found for this signature in database
GPG Key ID: 4B02ED204C7D93F4
2 changed files with 6 additions and 4 deletions

View File

@ -10,7 +10,7 @@ import atexit
from github import Github
from env_helper import TEMP_PATH, REPO_COPY, REPORTS_PATH, S3_DOWNLOAD
from env_helper import TEMP_PATH, REPO_COPY, REPORTS_PATH
from s3_helper import S3Helper
from get_robot_token import get_best_robot_token
from pr_info import FORCE_TESTS_LABEL, PRInfo
@ -88,7 +88,8 @@ def get_run_command(
envs = [
f"-e MAX_RUN_TIME={int(0.9 * kill_timeout)}",
f'-e S3_URL="{S3_DOWNLOAD}/clickhouse-datasets"',
# a static link, don't use S3_URL or S3_DOWNLOAD
'-e S3_URL="https://s3.amazonaws.com/clickhouse-datasets"',
]
if flaky_check:

View File

@ -8,7 +8,7 @@ import sys
from github import Github
from env_helper import TEMP_PATH, REPO_COPY, REPORTS_PATH, S3_DOWNLOAD
from env_helper import TEMP_PATH, REPO_COPY, REPORTS_PATH
from s3_helper import S3Helper
from get_robot_token import get_best_robot_token
from pr_info import PRInfo
@ -31,7 +31,8 @@ def get_run_command(
):
cmd = (
"docker run --cap-add=SYS_PTRACE "
f"-e S3_URL='{S3_DOWNLOAD}/clickhouse-datasets' "
# a static link, don't use S3_URL or S3_DOWNLOAD
"-e S3_URL='https://s3.amazonaws.com/clickhouse-datasets' "
f"--volume={build_path}:/package_folder "
f"--volume={result_folder}:/test_output "
f"--volume={repo_tests_path}:/usr/share/clickhouse-test "