From 4b2bba2ff1aa9a878335e3c975e648992fe76400 Mon Sep 17 00:00:00 2001 From: "Mikhail f. Shiryaev" Date: Wed, 10 Aug 2022 14:37:06 +0200 Subject: [PATCH 1/2] Do not upload unnecessary lambda sources --- .../build_and_deploy_archive.sh | 22 ++++++++++--------- 1 file changed, 12 insertions(+), 10 deletions(-) diff --git a/tests/ci/team_keys_lambda/build_and_deploy_archive.sh b/tests/ci/team_keys_lambda/build_and_deploy_archive.sh index cd5b0d26e3f..defa400453f 100644 --- a/tests/ci/team_keys_lambda/build_and_deploy_archive.sh +++ b/tests/ci/team_keys_lambda/build_and_deploy_archive.sh @@ -7,18 +7,20 @@ cd "$WORKDIR" PY_EXEC=python3.9 LAMBDA_NAME=$(basename "$PWD") LAMBDA_NAME=${LAMBDA_NAME//_/-} -VENV=lambda-venv -rm -rf "$VENV" lambda-package.zip -"$PY_EXEC" -m venv "$VENV" -#virtualenv "$VENV" -# shellcheck disable=SC1091 -source "$VENV/bin/activate" -pip install -r requirements.txt PACKAGE=lambda-package rm -rf "$PACKAGE" "$PACKAGE".zip -cp -r "$VENV/lib/$PY_EXEC/site-packages" "$PACKAGE" +mkdir "$PACKAGE" cp app.py "$PACKAGE" -rm -r "$PACKAGE"/{pip,pip-*,setuptools,setuptools-*} -( cd "$PACKAGE" && zip -r ../"$PACKAGE".zip . ) +if [ -f requirements.txt ]; then + VENV=lambda-venv + rm -rf "$VENV" lambda-package.zip + "$PY_EXEC" -m venv "$VENV" + # shellcheck disable=SC1091 + source "$VENV/bin/activate" + pip install -r requirements.txt + cp -rT "$VENV/lib/$PY_EXEC/site-packages/" "$PACKAGE" + rm -r "$PACKAGE"/{pip,pip-*,setuptools,setuptools-*} +fi +( cd "$PACKAGE" && zip -9 -r ../"$PACKAGE".zip . ) aws lambda update-function-code --function-name "$LAMBDA_NAME" --zip-file fileb://"$PACKAGE".zip From 2fb68078e828256218c2bdf994b766de7f1a7185 Mon Sep 17 00:00:00 2001 From: "Mikhail f. Shiryaev" Date: Wed, 10 Aug 2022 15:22:04 +0200 Subject: [PATCH 2/2] Replace S3 URLs by parameter --- tests/ci/ast_fuzzer_check.py | 3 ++- tests/ci/build_check.py | 9 ++++----- tests/ci/build_report_check.py | 3 ++- tests/ci/ccache_utils.py | 5 +++-- tests/ci/codebrowser_check.py | 11 +++++++---- tests/ci/compatibility_check.py | 4 ++-- tests/ci/docker_images_check.py | 4 ++-- tests/ci/docker_manifests_merge.py | 4 ++-- tests/ci/docker_server.py | 7 +++---- tests/ci/docs_check.py | 4 ++-- tests/ci/docs_release.py | 4 ++-- tests/ci/env_helper.py | 1 + tests/ci/fast_test_check.py | 4 ++-- tests/ci/functional_test_check.py | 6 +++--- tests/ci/integration_test_check.py | 4 ++-- tests/ci/keeper_jepsen_check.py | 6 +++--- tests/ci/performance_comparison_check.py | 6 +++--- tests/ci/push_to_artifactory.py | 4 ++-- tests/ci/s3_helper.py | 15 ++++----------- tests/ci/split_build_smoke_check.py | 4 ++-- tests/ci/stress_check.py | 6 +++--- tests/ci/style_check.py | 4 ++-- tests/ci/unit_tests_check.py | 4 ++-- 23 files changed, 60 insertions(+), 62 deletions(-) diff --git a/tests/ci/ast_fuzzer_check.py b/tests/ci/ast_fuzzer_check.py index 918e27a4e11..82e7a3271c1 100644 --- a/tests/ci/ast_fuzzer_check.py +++ b/tests/ci/ast_fuzzer_check.py @@ -12,6 +12,7 @@ from env_helper import ( GITHUB_RUN_URL, REPORTS_PATH, REPO_COPY, + S3_URL, TEMP_PATH, ) from s3_helper import S3Helper @@ -117,7 +118,7 @@ if __name__ == "__main__": "core.gz": os.path.join(workspace_path, "core.gz"), } - s3_helper = S3Helper("https://s3.amazonaws.com") + s3_helper = S3Helper(S3_URL) for f in paths: try: paths[f] = s3_helper.upload_test_report_to_s3(paths[f], s3_prefix + "/" + f) diff --git a/tests/ci/build_check.py b/tests/ci/build_check.py index 488fd1bbb34..2463ec669dd 100644 --- a/tests/ci/build_check.py +++ b/tests/ci/build_check.py @@ -15,6 +15,7 @@ from env_helper import ( IMAGES_PATH, REPO_COPY, S3_BUILDS_BUCKET, + S3_URL, TEMP_PATH, ) from s3_helper import S3Helper @@ -142,11 +143,9 @@ def check_for_success_run( for url in build_results: url_escaped = url.replace("+", "%2B").replace(" ", "%20") if BUILD_LOG_NAME in url: - log_url = f"https://s3.amazonaws.com/{S3_BUILDS_BUCKET}/{url_escaped}" + log_url = f"{S3_URL}/{S3_BUILDS_BUCKET}/{url_escaped}" else: - build_urls.append( - f"https://s3.amazonaws.com/{S3_BUILDS_BUCKET}/{url_escaped}" - ) + build_urls.append(f"{S3_URL}/{S3_BUILDS_BUCKET}/{url_escaped}") if not log_url: # log is uploaded the last, so if there's no log we need to rerun the build return @@ -250,7 +249,7 @@ def main(): logging.info("Repo copy path %s", REPO_COPY) - s3_helper = S3Helper("https://s3.amazonaws.com") + s3_helper = S3Helper(S3_URL) version = get_version_from_repo(git=Git(True)) release_or_pr, performance_pr = get_release_or_pr(pr_info, version) diff --git a/tests/ci/build_report_check.py b/tests/ci/build_report_check.py index f1f92cded1d..3155b4fd56d 100644 --- a/tests/ci/build_report_check.py +++ b/tests/ci/build_report_check.py @@ -14,6 +14,7 @@ from env_helper import ( GITHUB_RUN_URL, GITHUB_SERVER_URL, REPORTS_PATH, + S3_URL, TEMP_PATH, ) from report import create_build_html_report @@ -244,7 +245,7 @@ def main(): logging.error("No success builds, failing check") sys.exit(1) - s3_helper = S3Helper("https://s3.amazonaws.com") + s3_helper = S3Helper(S3_URL) branch_url = f"{GITHUB_SERVER_URL}/{GITHUB_REPOSITORY}/commits/master" branch_name = "master" diff --git a/tests/ci/ccache_utils.py b/tests/ci/ccache_utils.py index bd155b02cb4..fd3589e1bb3 100644 --- a/tests/ci/ccache_utils.py +++ b/tests/ci/ccache_utils.py @@ -7,9 +7,10 @@ import os import shutil from pathlib import Path -import requests +import requests # type: ignore from compress_files import decompress_fast, compress_fast +from env_helper import S3_URL, S3_BUILDS_BUCKET DOWNLOAD_RETRIES_COUNT = 5 @@ -73,7 +74,7 @@ def get_ccache_if_not_exists( for obj in objects: if ccache_name in obj: logging.info("Found ccache on path %s", obj) - url = "https://s3.amazonaws.com/clickhouse-builds/" + obj + url = f"{S3_URL}/{S3_BUILDS_BUCKET}/{obj}" compressed_cache = os.path.join(temp_path, os.path.basename(obj)) dowload_file_with_progress(url, compressed_cache) diff --git a/tests/ci/codebrowser_check.py b/tests/ci/codebrowser_check.py index 230a778c598..121339d9971 100644 --- a/tests/ci/codebrowser_check.py +++ b/tests/ci/codebrowser_check.py @@ -7,7 +7,7 @@ import logging from github import Github -from env_helper import IMAGES_PATH, REPO_COPY +from env_helper import IMAGES_PATH, REPO_COPY, S3_TEST_REPORTS_BUCKET, S3_URL from stopwatch import Stopwatch from upload_result_helper import upload_results from s3_helper import S3Helper @@ -23,7 +23,7 @@ def get_run_command(repo_path, output_path, image): cmd = ( "docker run " + f"--volume={repo_path}:/repo_folder " f"--volume={output_path}:/test_output " - f"-e 'DATA=https://s3.amazonaws.com/clickhouse-test-reports/codebrowser/data' {image}" + f"-e 'DATA={S3_URL}/{S3_TEST_REPORTS_BUCKET}/codebrowser/data' {image}" ) return cmd @@ -41,7 +41,7 @@ if __name__ == "__main__": os.makedirs(temp_path) docker_image = get_image_with_version(IMAGES_PATH, "clickhouse/codebrowser") - s3_helper = S3Helper("https://s3.amazonaws.com") + s3_helper = S3Helper(S3_URL) result_path = os.path.join(temp_path, "result_path") if not os.path.exists(result_path): @@ -69,7 +69,10 @@ if __name__ == "__main__": report_path, s3_path_prefix, "clickhouse-test-reports" ) - index_html = 'HTML report' + index_html = ( + '' + "HTML report" + ) test_results = [(index_html, "Look at the report")] diff --git a/tests/ci/compatibility_check.py b/tests/ci/compatibility_check.py index 2a1b9716189..fc7584536ef 100644 --- a/tests/ci/compatibility_check.py +++ b/tests/ci/compatibility_check.py @@ -8,7 +8,7 @@ import sys from github import Github -from env_helper import TEMP_PATH, REPO_COPY, REPORTS_PATH +from env_helper import TEMP_PATH, REPO_COPY, REPORTS_PATH, S3_URL from s3_helper import S3Helper from get_robot_token import get_best_robot_token from pr_info import PRInfo @@ -169,7 +169,7 @@ if __name__ == "__main__": subprocess.check_call(f"sudo chown -R ubuntu:ubuntu {temp_path}", shell=True) - s3_helper = S3Helper("https://s3.amazonaws.com") + s3_helper = S3Helper(S3_URL) state, description, test_results, additional_logs = process_result( result_path, server_log_path ) diff --git a/tests/ci/docker_images_check.py b/tests/ci/docker_images_check.py index 8b838defa8b..76ebbb78c7b 100644 --- a/tests/ci/docker_images_check.py +++ b/tests/ci/docker_images_check.py @@ -14,7 +14,7 @@ from github import Github from clickhouse_helper import ClickHouseHelper, prepare_tests_results_for_clickhouse from commit_status_helper import post_commit_status -from env_helper import GITHUB_WORKSPACE, RUNNER_TEMP, GITHUB_RUN_URL +from env_helper import GITHUB_WORKSPACE, RUNNER_TEMP, GITHUB_RUN_URL, S3_URL from get_robot_token import get_best_robot_token, get_parameter_from_ssm from pr_info import PRInfo from s3_helper import S3Helper @@ -460,7 +460,7 @@ def main(): with open(changed_json, "w", encoding="utf-8") as images_file: json.dump(result_images, images_file) - s3_helper = S3Helper("https://s3.amazonaws.com") + s3_helper = S3Helper(S3_URL) s3_path_prefix = ( str(pr_info.number) + "/" + pr_info.sha + "/" + NAME.lower().replace(" ", "_") diff --git a/tests/ci/docker_manifests_merge.py b/tests/ci/docker_manifests_merge.py index 00ab0b9e77f..78f236be786 100644 --- a/tests/ci/docker_manifests_merge.py +++ b/tests/ci/docker_manifests_merge.py @@ -11,7 +11,7 @@ from github import Github from clickhouse_helper import ClickHouseHelper, prepare_tests_results_for_clickhouse from commit_status_helper import post_commit_status -from env_helper import RUNNER_TEMP +from env_helper import RUNNER_TEMP, S3_URL from get_robot_token import get_best_robot_token, get_parameter_from_ssm from pr_info import PRInfo from s3_helper import S3Helper @@ -203,7 +203,7 @@ def main(): json.dump(changed_images, ci) pr_info = PRInfo() - s3_helper = S3Helper("https://s3.amazonaws.com") + s3_helper = S3Helper(S3_URL) url = upload_results(s3_helper, pr_info.number, pr_info.sha, test_results, [], NAME) diff --git a/tests/ci/docker_server.py b/tests/ci/docker_server.py index 09a75206442..64172b90ebc 100644 --- a/tests/ci/docker_server.py +++ b/tests/ci/docker_server.py @@ -16,7 +16,7 @@ from build_check import get_release_or_pr from clickhouse_helper import ClickHouseHelper, prepare_tests_results_for_clickhouse from commit_status_helper import post_commit_status from docker_images_check import DockerImage -from env_helper import CI, GITHUB_RUN_URL, RUNNER_TEMP, S3_BUILDS_BUCKET +from env_helper import CI, GITHUB_RUN_URL, RUNNER_TEMP, S3_BUILDS_BUCKET, S3_URL from get_robot_token import get_best_robot_token, get_parameter_from_ssm from git_helper import Git from pr_info import PRInfo @@ -309,8 +309,7 @@ def main(): pr_info = PRInfo() release_or_pr, _ = get_release_or_pr(pr_info, args.version) args.bucket_prefix = ( - f"https://s3.amazonaws.com/{S3_BUILDS_BUCKET}/" - f"{release_or_pr}/{pr_info.sha}" + f"{S3_URL}/{S3_BUILDS_BUCKET}/{release_or_pr}/{pr_info.sha}" ) if args.push: @@ -336,7 +335,7 @@ def main(): status = "failure" pr_info = pr_info or PRInfo() - s3_helper = S3Helper("https://s3.amazonaws.com") + s3_helper = S3Helper(S3_URL) url = upload_results(s3_helper, pr_info.number, pr_info.sha, test_results, [], NAME) diff --git a/tests/ci/docs_check.py b/tests/ci/docs_check.py index cf4fd8da692..f260d1f1e50 100644 --- a/tests/ci/docs_check.py +++ b/tests/ci/docs_check.py @@ -6,7 +6,7 @@ import os import sys from github import Github -from env_helper import TEMP_PATH, REPO_COPY +from env_helper import TEMP_PATH, REPO_COPY, S3_URL from s3_helper import S3Helper from pr_info import PRInfo from get_robot_token import get_best_robot_token @@ -120,7 +120,7 @@ if __name__ == "__main__": else: lines.append(("Non zero exit code", "FAIL")) - s3_helper = S3Helper("https://s3.amazonaws.com") + s3_helper = S3Helper(S3_URL) ch_helper = ClickHouseHelper() report_url = upload_results( diff --git a/tests/ci/docs_release.py b/tests/ci/docs_release.py index 35203486fae..96b0e7048c6 100644 --- a/tests/ci/docs_release.py +++ b/tests/ci/docs_release.py @@ -7,7 +7,7 @@ import sys from github import Github -from env_helper import TEMP_PATH, REPO_COPY, CLOUDFLARE_TOKEN +from env_helper import TEMP_PATH, REPO_COPY, CLOUDFLARE_TOKEN, S3_URL from s3_helper import S3Helper from pr_info import PRInfo from get_robot_token import get_best_robot_token @@ -106,7 +106,7 @@ if __name__ == "__main__": else: lines.append(("Non zero exit code", "FAIL")) - s3_helper = S3Helper("https://s3.amazonaws.com") + s3_helper = S3Helper(S3_URL) report_url = upload_results( s3_helper, pr_info.number, pr_info.sha, lines, additional_files, NAME diff --git a/tests/ci/env_helper.py b/tests/ci/env_helper.py index b37d38763be..b6541205ed3 100644 --- a/tests/ci/env_helper.py +++ b/tests/ci/env_helper.py @@ -22,6 +22,7 @@ IMAGES_PATH = os.getenv("IMAGES_PATH", TEMP_PATH) REPORTS_PATH = os.getenv("REPORTS_PATH", p.abspath(p.join(module_dir, "./reports"))) REPO_COPY = os.getenv("REPO_COPY", git_root) RUNNER_TEMP = os.getenv("RUNNER_TEMP", p.abspath(p.join(module_dir, "./tmp"))) +S3_URL = os.getenv("S3_URL", "https://s3.amazonaws.com") S3_BUILDS_BUCKET = os.getenv("S3_BUILDS_BUCKET", "clickhouse-builds") S3_TEST_REPORTS_BUCKET = os.getenv("S3_TEST_REPORTS_BUCKET", "clickhouse-test-reports") diff --git a/tests/ci/fast_test_check.py b/tests/ci/fast_test_check.py index 9852175ca92..84d9d3f16d8 100644 --- a/tests/ci/fast_test_check.py +++ b/tests/ci/fast_test_check.py @@ -9,7 +9,7 @@ import atexit from github import Github -from env_helper import CACHES_PATH, TEMP_PATH +from env_helper import CACHES_PATH, TEMP_PATH, S3_URL from pr_info import FORCE_TESTS_LABEL, PRInfo from s3_helper import S3Helper from get_robot_token import get_best_robot_token @@ -105,7 +105,7 @@ if __name__ == "__main__": docker_image = get_image_with_version(temp_path, "clickhouse/fasttest") - s3_helper = S3Helper("https://s3.amazonaws.com") + s3_helper = S3Helper(S3_URL) workspace = os.path.join(temp_path, "fasttest-workspace") if not os.path.exists(workspace): diff --git a/tests/ci/functional_test_check.py b/tests/ci/functional_test_check.py index 690ac3c1851..bcfeaa9973a 100644 --- a/tests/ci/functional_test_check.py +++ b/tests/ci/functional_test_check.py @@ -10,7 +10,7 @@ import atexit from github import Github -from env_helper import TEMP_PATH, REPO_COPY, REPORTS_PATH +from env_helper import TEMP_PATH, REPO_COPY, REPORTS_PATH, S3_URL from s3_helper import S3Helper from get_robot_token import get_best_robot_token from pr_info import FORCE_TESTS_LABEL, PRInfo @@ -88,7 +88,7 @@ def get_run_command( envs = [ f"-e MAX_RUN_TIME={int(0.9 * kill_timeout)}", - '-e S3_URL="https://clickhouse-datasets.s3.amazonaws.com"', + f'-e S3_URL="{S3_URL}/clickhouse-datasets"', ] if flaky_check: @@ -314,7 +314,7 @@ if __name__ == "__main__": subprocess.check_call(f"sudo chown -R ubuntu:ubuntu {temp_path}", shell=True) - s3_helper = S3Helper("https://s3.amazonaws.com") + s3_helper = S3Helper(S3_URL) state, description, test_results, additional_logs = process_results( result_path, server_log_path diff --git a/tests/ci/integration_test_check.py b/tests/ci/integration_test_check.py index 565864d576c..49a95748f6c 100644 --- a/tests/ci/integration_test_check.py +++ b/tests/ci/integration_test_check.py @@ -10,7 +10,7 @@ import sys from github import Github -from env_helper import TEMP_PATH, REPO_COPY, REPORTS_PATH +from env_helper import TEMP_PATH, REPO_COPY, REPORTS_PATH, S3_URL from s3_helper import S3Helper from get_robot_token import get_best_robot_token from pr_info import PRInfo @@ -249,7 +249,7 @@ if __name__ == "__main__": ch_helper = ClickHouseHelper() mark_flaky_tests(ch_helper, check_name, test_results) - s3_helper = S3Helper("https://s3.amazonaws.com") + s3_helper = S3Helper(S3_URL) report_url = upload_results( s3_helper, pr_info.number, diff --git a/tests/ci/keeper_jepsen_check.py b/tests/ci/keeper_jepsen_check.py index 88ccf8e8828..af44b87b897 100644 --- a/tests/ci/keeper_jepsen_check.py +++ b/tests/ci/keeper_jepsen_check.py @@ -9,7 +9,7 @@ import boto3 from github import Github import requests -from env_helper import REPO_COPY, TEMP_PATH +from env_helper import REPO_COPY, TEMP_PATH, S3_BUILDS_BUCKET, S3_URL from stopwatch import Stopwatch from upload_result_helper import upload_results from s3_helper import S3Helper @@ -192,7 +192,7 @@ if __name__ == "__main__": # run (see .github/workflows/jepsen.yml) So we cannot add explicit # dependency on a build job and using busy loop on it's results. For the # same reason we are using latest docker image. - build_url = f"https://s3.amazonaws.com/clickhouse-builds/{release_or_pr}/{pr_info.sha}/{build_name}/clickhouse" + build_url = f"{S3_URL}/{S3_BUILDS_BUCKET}/{release_or_pr}/{pr_info.sha}/{build_name}/clickhouse" head = requests.head(build_url) counter = 0 while head.status_code != 200: @@ -248,7 +248,7 @@ if __name__ == "__main__": description = "No Jepsen output log" test_result = [("No Jepsen output log", "FAIL")] - s3_helper = S3Helper("https://s3.amazonaws.com") + s3_helper = S3Helper(S3_URL) report_url = upload_results( s3_helper, pr_info.number, diff --git a/tests/ci/performance_comparison_check.py b/tests/ci/performance_comparison_check.py index 57a52dcaa6a..ce5226aeb04 100644 --- a/tests/ci/performance_comparison_check.py +++ b/tests/ci/performance_comparison_check.py @@ -15,7 +15,7 @@ from github import Github from commit_status_helper import get_commit, post_commit_status from ci_config import CI_CONFIG from docker_pull_helper import get_image_with_version -from env_helper import GITHUB_EVENT_PATH, GITHUB_RUN_URL +from env_helper import GITHUB_EVENT_PATH, GITHUB_RUN_URL, S3_BUILDS_BUCKET, S3_URL from get_robot_token import get_best_robot_token, get_parameter_from_ssm from pr_info import PRInfo from rerun_helper import RerunHelper @@ -86,7 +86,7 @@ if __name__ == "__main__": docker_env = "" - docker_env += " -e S3_URL=https://s3.amazonaws.com/clickhouse-builds" + docker_env += f" -e S3_URL={S3_URL}/{S3_BUILDS_BUCKET}" docker_env += f" -e BUILD_NAME={required_build}" if pr_info.number == 0: @@ -197,7 +197,7 @@ if __name__ == "__main__": } s3_prefix = f"{pr_info.number}/{pr_info.sha}/{check_name_prefix}/" - s3_helper = S3Helper("https://s3.amazonaws.com") + s3_helper = S3Helper(S3_URL) uploaded = {} # type: Dict[str, str] for name, path in paths.items(): try: diff --git a/tests/ci/push_to_artifactory.py b/tests/ci/push_to_artifactory.py index 98de315ddae..b04fa723580 100755 --- a/tests/ci/push_to_artifactory.py +++ b/tests/ci/push_to_artifactory.py @@ -9,7 +9,7 @@ from typing import Dict, List, Tuple from artifactory import ArtifactorySaaSPath # type: ignore from build_download_helper import dowload_build_with_progress -from env_helper import RUNNER_TEMP, S3_BUILDS_BUCKET +from env_helper import RUNNER_TEMP, S3_BUILDS_BUCKET, S3_URL from git_helper import TAG_REGEXP, commit, removeprefix, removesuffix @@ -98,7 +98,7 @@ class Packages: class S3: template = ( - "https://s3.amazonaws.com/" + f"{S3_URL}" # "clickhouse-builds/" f"{S3_BUILDS_BUCKET}/" # "33333/" or "21.11/" from --release, if pull request is omitted diff --git a/tests/ci/s3_helper.py b/tests/ci/s3_helper.py index 91e67135f6f..483d6aee60e 100644 --- a/tests/ci/s3_helper.py +++ b/tests/ci/s3_helper.py @@ -9,7 +9,7 @@ from multiprocessing.dummy import Pool import boto3 # type: ignore -from env_helper import S3_TEST_REPORTS_BUCKET, S3_BUILDS_BUCKET, RUNNER_TEMP, CI +from env_helper import S3_TEST_REPORTS_BUCKET, S3_BUILDS_BUCKET, RUNNER_TEMP, CI, S3_URL from compress_files import compress_file_fast @@ -98,13 +98,8 @@ class S3Helper: logging.info("Upload %s to %s. Meta: %s", file_path, s3_path, metadata) # last two replacements are specifics of AWS urls: # https://jamesd3142.wordpress.com/2018/02/28/amazon-s3-and-the-plus-symbol/ - return ( - "https://s3.amazonaws.com/{bucket}/{path}".format( - bucket=bucket_name, path=s3_path - ) - .replace("+", "%2B") - .replace(" ", "%20") - ) + url = f"{S3_URL}/{bucket_name}/{s3_path}" + return url.replace("+", "%2B").replace(" ", "%20") def upload_test_report_to_s3(self, file_path, s3_path): if CI: @@ -175,9 +170,7 @@ class S3Helper: t = time.time() except Exception as ex: logging.critical("Failed to upload file, expcetion %s", ex) - return "https://s3.amazonaws.com/{bucket}/{path}".format( - bucket=bucket_name, path=s3_path - ) + return f"{S3_URL}/{bucket_name}/{s3_path}" p = Pool(256) diff --git a/tests/ci/split_build_smoke_check.py b/tests/ci/split_build_smoke_check.py index 87a528d2761..5052b6b362e 100644 --- a/tests/ci/split_build_smoke_check.py +++ b/tests/ci/split_build_smoke_check.py @@ -7,7 +7,7 @@ import sys from github import Github -from env_helper import TEMP_PATH, REPO_COPY, REPORTS_PATH +from env_helper import TEMP_PATH, REPO_COPY, REPORTS_PATH, S3_URL from s3_helper import S3Helper from get_robot_token import get_best_robot_token from pr_info import PRInfo @@ -126,7 +126,7 @@ if __name__ == "__main__": ) ch_helper = ClickHouseHelper() - s3_helper = S3Helper("https://s3.amazonaws.com") + s3_helper = S3Helper(S3_URL) report_url = upload_results( s3_helper, pr_info.number, diff --git a/tests/ci/stress_check.py b/tests/ci/stress_check.py index e63f66e2e50..6073b03f8a6 100644 --- a/tests/ci/stress_check.py +++ b/tests/ci/stress_check.py @@ -8,7 +8,7 @@ import sys from github import Github -from env_helper import TEMP_PATH, REPO_COPY, REPORTS_PATH +from env_helper import TEMP_PATH, REPO_COPY, REPORTS_PATH, S3_URL from s3_helper import S3Helper from get_robot_token import get_best_robot_token from pr_info import PRInfo @@ -31,7 +31,7 @@ def get_run_command( ): cmd = ( "docker run --cap-add=SYS_PTRACE " - "-e S3_URL='https://clickhouse-datasets.s3.amazonaws.com' " + f"-e S3_URL='{S3_URL}/clickhouse-datasets' " f"--volume={build_path}:/package_folder " f"--volume={result_folder}:/test_output " f"--volume={repo_tests_path}:/usr/share/clickhouse-test " @@ -148,7 +148,7 @@ if __name__ == "__main__": subprocess.check_call(f"sudo chown -R ubuntu:ubuntu {temp_path}", shell=True) - s3_helper = S3Helper("https://s3.amazonaws.com") + s3_helper = S3Helper(S3_URL) state, description, test_results, additional_logs = process_results( result_path, server_log_path, run_log_path ) diff --git a/tests/ci/style_check.py b/tests/ci/style_check.py index 66837ccb84e..db286ec7f6c 100644 --- a/tests/ci/style_check.py +++ b/tests/ci/style_check.py @@ -15,7 +15,7 @@ from clickhouse_helper import ( ) from commit_status_helper import post_commit_status, update_mergeable_check from docker_pull_helper import get_image_with_version -from env_helper import GITHUB_WORKSPACE, RUNNER_TEMP +from env_helper import GITHUB_WORKSPACE, RUNNER_TEMP, S3_URL from get_robot_token import get_best_robot_token from github_helper import GitHub from git_helper import git_runner @@ -166,7 +166,7 @@ if __name__ == "__main__": os.makedirs(temp_path) docker_image = get_image_with_version(temp_path, "clickhouse/style-test") - s3_helper = S3Helper("https://s3.amazonaws.com") + s3_helper = S3Helper(S3_URL) cmd = ( f"docker run -u $(id -u ${{USER}}):$(id -g ${{USER}}) --cap-add=SYS_PTRACE " diff --git a/tests/ci/unit_tests_check.py b/tests/ci/unit_tests_check.py index 4441709cb7b..95011b728e9 100644 --- a/tests/ci/unit_tests_check.py +++ b/tests/ci/unit_tests_check.py @@ -7,7 +7,7 @@ import subprocess from github import Github -from env_helper import TEMP_PATH, REPO_COPY, REPORTS_PATH +from env_helper import TEMP_PATH, REPO_COPY, REPORTS_PATH, S3_URL from s3_helper import S3Helper from get_robot_token import get_best_robot_token from pr_info import PRInfo @@ -147,7 +147,7 @@ if __name__ == "__main__": subprocess.check_call(f"sudo chown -R ubuntu:ubuntu {temp_path}", shell=True) - s3_helper = S3Helper("https://s3.amazonaws.com") + s3_helper = S3Helper(S3_URL) state, description, test_results, additional_logs = process_result(test_output) ch_helper = ClickHouseHelper()