mirror of
https://github.com/ClickHouse/ClickHouse.git
synced 2024-12-18 04:12:19 +00:00
Merge pull request #43132 from ClickHouse/strict-typing-ci
Enable strict typing check in tests/ci
This commit is contained in:
commit
8897aa7fe8
@ -17,7 +17,7 @@ RUN apt-get update && env DEBIAN_FRONTEND=noninteractive apt-get install --yes \
|
|||||||
python3-pip \
|
python3-pip \
|
||||||
shellcheck \
|
shellcheck \
|
||||||
yamllint \
|
yamllint \
|
||||||
&& pip3 install black==22.8.0 boto3 codespell==2.2.1 dohq-artifactory PyGithub unidiff pylint==2.6.2 \
|
&& pip3 install black==22.8.0 boto3 codespell==2.2.1 dohq-artifactory mypy PyGithub unidiff pylint==2.6.2 \
|
||||||
&& apt-get clean \
|
&& apt-get clean \
|
||||||
&& rm -rf /root/.cache/pip
|
&& rm -rf /root/.cache/pip
|
||||||
|
|
||||||
|
@ -11,17 +11,19 @@ def process_result(result_folder):
|
|||||||
description = ""
|
description = ""
|
||||||
test_results = []
|
test_results = []
|
||||||
checks = (
|
checks = (
|
||||||
("header duplicates", "duplicate_output.txt"),
|
"duplicate includes",
|
||||||
("shellcheck", "shellcheck_output.txt"),
|
"shellcheck",
|
||||||
("style", "style_output.txt"),
|
"style",
|
||||||
("black", "black_output.txt"),
|
"black",
|
||||||
("typos", "typos_output.txt"),
|
"mypy",
|
||||||
("whitespaces", "whitespaces_output.txt"),
|
"typos",
|
||||||
("workflows", "workflows_output.txt"),
|
"whitespaces",
|
||||||
("doc typos", "doc_spell_output.txt"),
|
"workflows",
|
||||||
|
"docs spelling",
|
||||||
)
|
)
|
||||||
|
|
||||||
for name, out_file in checks:
|
for name in checks:
|
||||||
|
out_file = name.replace(" ", "_") + "_output.txt"
|
||||||
full_path = os.path.join(result_folder, out_file)
|
full_path = os.path.join(result_folder, out_file)
|
||||||
if not os.path.exists(full_path):
|
if not os.path.exists(full_path):
|
||||||
logging.info("No %s check log on path %s", name, full_path)
|
logging.info("No %s check log on path %s", name, full_path)
|
||||||
|
@ -4,15 +4,17 @@
|
|||||||
|
|
||||||
cd /ClickHouse/utils/check-style || echo -e "failure\tRepo not found" > /test_output/check_status.tsv
|
cd /ClickHouse/utils/check-style || echo -e "failure\tRepo not found" > /test_output/check_status.tsv
|
||||||
echo "Check duplicates" | ts
|
echo "Check duplicates" | ts
|
||||||
./check-duplicate-includes.sh |& tee /test_output/duplicate_output.txt
|
./check-duplicate-includes.sh |& tee /test_output/duplicate_includes_output.txt
|
||||||
echo "Check style" | ts
|
echo "Check style" | ts
|
||||||
./check-style -n |& tee /test_output/style_output.txt
|
./check-style -n |& tee /test_output/style_output.txt
|
||||||
echo "Check python formatting with black" | ts
|
echo "Check python formatting with black" | ts
|
||||||
./check-black -n |& tee /test_output/black_output.txt
|
./check-black -n |& tee /test_output/black_output.txt
|
||||||
|
echo "Check python type hinting with mypy" | ts
|
||||||
|
./check-mypy -n |& tee /test_output/mypy_output.txt
|
||||||
echo "Check typos" | ts
|
echo "Check typos" | ts
|
||||||
./check-typos |& tee /test_output/typos_output.txt
|
./check-typos |& tee /test_output/typos_output.txt
|
||||||
echo "Check docs spelling" | ts
|
echo "Check docs spelling" | ts
|
||||||
./check-doc-aspell |& tee /test_output/doc_spell_output.txt
|
./check-doc-aspell |& tee /test_output/docs_spelling_output.txt
|
||||||
echo "Check whitespaces" | ts
|
echo "Check whitespaces" | ts
|
||||||
./check-whitespaces -n |& tee /test_output/whitespaces_output.txt
|
./check-whitespaces -n |& tee /test_output/whitespaces_output.txt
|
||||||
echo "Check workflows" | ts
|
echo "Check workflows" | ts
|
||||||
|
16
tests/ci/.mypy.ini
Normal file
16
tests/ci/.mypy.ini
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
[mypy]
|
||||||
|
warn_no_return = False
|
||||||
|
warn_unused_configs = True
|
||||||
|
disallow_subclassing_any = True
|
||||||
|
disallow_untyped_calls = False
|
||||||
|
disallow_untyped_defs = False
|
||||||
|
disallow_incomplete_defs = True
|
||||||
|
check_untyped_defs = True
|
||||||
|
disallow_untyped_decorators = True
|
||||||
|
no_implicit_optional = True
|
||||||
|
warn_redundant_casts = True
|
||||||
|
warn_unused_ignores = True
|
||||||
|
warn_return_any = True
|
||||||
|
no_implicit_reexport = True
|
||||||
|
strict_equality = True
|
||||||
|
strict_concatenate = True
|
@ -121,7 +121,7 @@ def check_for_success_run(
|
|||||||
s3_prefix: str,
|
s3_prefix: str,
|
||||||
build_name: str,
|
build_name: str,
|
||||||
build_config: BuildConfig,
|
build_config: BuildConfig,
|
||||||
):
|
) -> None:
|
||||||
logged_prefix = os.path.join(S3_BUILDS_BUCKET, s3_prefix)
|
logged_prefix = os.path.join(S3_BUILDS_BUCKET, s3_prefix)
|
||||||
logging.info("Checking for artifacts in %s", logged_prefix)
|
logging.info("Checking for artifacts in %s", logged_prefix)
|
||||||
try:
|
try:
|
||||||
@ -174,7 +174,7 @@ def create_json_artifact(
|
|||||||
build_config: BuildConfig,
|
build_config: BuildConfig,
|
||||||
elapsed: int,
|
elapsed: int,
|
||||||
success: bool,
|
success: bool,
|
||||||
):
|
) -> None:
|
||||||
subprocess.check_call(
|
subprocess.check_call(
|
||||||
f"echo 'BUILD_URLS=build_urls_{build_name}' >> $GITHUB_ENV", shell=True
|
f"echo 'BUILD_URLS=build_urls_{build_name}' >> $GITHUB_ENV", shell=True
|
||||||
)
|
)
|
||||||
@ -218,7 +218,7 @@ def upload_master_static_binaries(
|
|||||||
build_config: BuildConfig,
|
build_config: BuildConfig,
|
||||||
s3_helper: S3Helper,
|
s3_helper: S3Helper,
|
||||||
build_output_path: str,
|
build_output_path: str,
|
||||||
):
|
) -> None:
|
||||||
"""Upload binary artifacts to a static S3 links"""
|
"""Upload binary artifacts to a static S3 links"""
|
||||||
static_binary_name = build_config.get("static_binary_name", False)
|
static_binary_name = build_config.get("static_binary_name", False)
|
||||||
if pr_info.number != 0:
|
if pr_info.number != 0:
|
||||||
|
@ -5,7 +5,7 @@ import logging
|
|||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
import time
|
import time
|
||||||
from typing import List, Optional
|
from typing import Any, List, Optional
|
||||||
|
|
||||||
import requests # type: ignore
|
import requests # type: ignore
|
||||||
|
|
||||||
@ -18,7 +18,7 @@ def get_with_retries(
|
|||||||
url: str,
|
url: str,
|
||||||
retries: int = DOWNLOAD_RETRIES_COUNT,
|
retries: int = DOWNLOAD_RETRIES_COUNT,
|
||||||
sleep: int = 3,
|
sleep: int = 3,
|
||||||
**kwargs,
|
**kwargs: Any,
|
||||||
) -> requests.Response:
|
) -> requests.Response:
|
||||||
logging.info(
|
logging.info(
|
||||||
"Getting URL with %i tries and sleep %i in between: %s", retries, sleep, url
|
"Getting URL with %i tries and sleep %i in between: %s", retries, sleep, url
|
||||||
@ -41,18 +41,18 @@ def get_with_retries(
|
|||||||
return response
|
return response
|
||||||
|
|
||||||
|
|
||||||
def get_build_name_for_check(check_name) -> str:
|
def get_build_name_for_check(check_name: str) -> str:
|
||||||
return CI_CONFIG["tests_config"][check_name]["required_build"]
|
return CI_CONFIG["tests_config"][check_name]["required_build"] # type: ignore
|
||||||
|
|
||||||
|
|
||||||
def read_build_urls(build_name, reports_path) -> List[str]:
|
def read_build_urls(build_name: str, reports_path: str) -> List[str]:
|
||||||
for root, _, files in os.walk(reports_path):
|
for root, _, files in os.walk(reports_path):
|
||||||
for f in files:
|
for f in files:
|
||||||
if build_name in f:
|
if build_name in f:
|
||||||
logging.info("Found build report json %s", f)
|
logging.info("Found build report json %s", f)
|
||||||
with open(os.path.join(root, f), "r", encoding="utf-8") as file_handler:
|
with open(os.path.join(root, f), "r", encoding="utf-8") as file_handler:
|
||||||
build_report = json.load(file_handler)
|
build_report = json.load(file_handler)
|
||||||
return build_report["build_urls"]
|
return build_report["build_urls"] # type: ignore
|
||||||
return []
|
return []
|
||||||
|
|
||||||
|
|
||||||
|
@ -19,7 +19,7 @@ from env_helper import (
|
|||||||
from report import create_build_html_report
|
from report import create_build_html_report
|
||||||
from s3_helper import S3Helper
|
from s3_helper import S3Helper
|
||||||
from get_robot_token import get_best_robot_token
|
from get_robot_token import get_best_robot_token
|
||||||
from pr_info import PRInfo
|
from pr_info import NeedsDataType, PRInfo
|
||||||
from commit_status_helper import (
|
from commit_status_helper import (
|
||||||
get_commit,
|
get_commit,
|
||||||
update_mergeable_check,
|
update_mergeable_check,
|
||||||
@ -28,7 +28,7 @@ from ci_config import CI_CONFIG
|
|||||||
from rerun_helper import RerunHelper
|
from rerun_helper import RerunHelper
|
||||||
|
|
||||||
|
|
||||||
NEEDS_DATA_PATH = os.getenv("NEEDS_DATA_PATH")
|
NEEDS_DATA_PATH = os.getenv("NEEDS_DATA_PATH", "")
|
||||||
|
|
||||||
|
|
||||||
class BuildResult:
|
class BuildResult:
|
||||||
@ -98,7 +98,7 @@ def get_failed_report(
|
|||||||
|
|
||||||
|
|
||||||
def process_report(
|
def process_report(
|
||||||
build_report,
|
build_report: dict,
|
||||||
) -> Tuple[List[BuildResult], List[List[str]], List[str]]:
|
) -> Tuple[List[BuildResult], List[List[str]], List[str]]:
|
||||||
build_config = build_report["build_config"]
|
build_config = build_report["build_config"]
|
||||||
build_result = BuildResult(
|
build_result = BuildResult(
|
||||||
@ -144,16 +144,14 @@ def main():
|
|||||||
os.makedirs(temp_path)
|
os.makedirs(temp_path)
|
||||||
|
|
||||||
build_check_name = sys.argv[1]
|
build_check_name = sys.argv[1]
|
||||||
needs_data = None
|
needs_data = {} # type: NeedsDataType
|
||||||
required_builds = 0
|
required_builds = 0
|
||||||
if os.path.exists(NEEDS_DATA_PATH):
|
if os.path.exists(NEEDS_DATA_PATH):
|
||||||
with open(NEEDS_DATA_PATH, "rb") as file_handler:
|
with open(NEEDS_DATA_PATH, "rb") as file_handler:
|
||||||
needs_data = json.load(file_handler)
|
needs_data = json.load(file_handler)
|
||||||
required_builds = len(needs_data)
|
required_builds = len(needs_data)
|
||||||
|
|
||||||
if needs_data is not None and all(
|
if needs_data and all(i["result"] == "skipped" for i in needs_data.values()):
|
||||||
i["result"] == "skipped" for i in needs_data.values()
|
|
||||||
):
|
|
||||||
logging.info("All builds are skipped, exiting")
|
logging.info("All builds are skipped, exiting")
|
||||||
sys.exit(0)
|
sys.exit(0)
|
||||||
|
|
||||||
@ -218,19 +216,21 @@ def main():
|
|||||||
build_logs = []
|
build_logs = []
|
||||||
|
|
||||||
for build_report in build_reports:
|
for build_report in build_reports:
|
||||||
build_result, build_artifacts_url, build_logs_url = process_report(build_report)
|
_build_results, build_artifacts_url, build_logs_url = process_report(
|
||||||
logging.info(
|
build_report
|
||||||
"Got %s artifact groups for build report report", len(build_result)
|
|
||||||
)
|
)
|
||||||
build_results.extend(build_result)
|
logging.info(
|
||||||
|
"Got %s artifact groups for build report report", len(_build_results)
|
||||||
|
)
|
||||||
|
build_results.extend(_build_results)
|
||||||
build_artifacts.extend(build_artifacts_url)
|
build_artifacts.extend(build_artifacts_url)
|
||||||
build_logs.extend(build_logs_url)
|
build_logs.extend(build_logs_url)
|
||||||
|
|
||||||
for failed_job in missing_build_names:
|
for failed_job in missing_build_names:
|
||||||
build_result, build_artifacts_url, build_logs_url = get_failed_report(
|
_build_results, build_artifacts_url, build_logs_url = get_failed_report(
|
||||||
failed_job
|
failed_job
|
||||||
)
|
)
|
||||||
build_results.extend(build_result)
|
build_results.extend(_build_results)
|
||||||
build_artifacts.extend(build_artifacts_url)
|
build_artifacts.extend(build_artifacts_url)
|
||||||
build_logs.extend(build_logs_url)
|
build_logs.extend(build_logs_url)
|
||||||
|
|
||||||
|
@ -106,7 +106,7 @@ def _exec_get_with_retry(url: str, token: str) -> dict:
|
|||||||
try:
|
try:
|
||||||
response = requests.get(url, headers=headers)
|
response = requests.get(url, headers=headers)
|
||||||
response.raise_for_status()
|
response.raise_for_status()
|
||||||
return response.json()
|
return response.json() # type: ignore
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
print("Got exception executing request", ex)
|
print("Got exception executing request", ex)
|
||||||
time.sleep(i + 1)
|
time.sleep(i + 1)
|
||||||
@ -130,8 +130,7 @@ WorkflowDescription = namedtuple(
|
|||||||
|
|
||||||
|
|
||||||
def get_workflows_description_for_pull_request(
|
def get_workflows_description_for_pull_request(
|
||||||
pull_request_event,
|
pull_request_event: dict, token: str
|
||||||
token,
|
|
||||||
) -> List[WorkflowDescription]:
|
) -> List[WorkflowDescription]:
|
||||||
head_repo = pull_request_event["head"]["repo"]["full_name"]
|
head_repo = pull_request_event["head"]["repo"]["full_name"]
|
||||||
head_branch = pull_request_event["head"]["ref"]
|
head_branch = pull_request_event["head"]["ref"]
|
||||||
@ -193,7 +192,7 @@ def get_workflows_description_for_pull_request(
|
|||||||
|
|
||||||
|
|
||||||
def get_workflow_description_fallback(
|
def get_workflow_description_fallback(
|
||||||
pull_request_event, token
|
pull_request_event: dict, token: str
|
||||||
) -> List[WorkflowDescription]:
|
) -> List[WorkflowDescription]:
|
||||||
head_repo = pull_request_event["head"]["repo"]["full_name"]
|
head_repo = pull_request_event["head"]["repo"]["full_name"]
|
||||||
head_branch = pull_request_event["head"]["ref"]
|
head_branch = pull_request_event["head"]["ref"]
|
||||||
@ -241,7 +240,7 @@ def get_workflow_description_fallback(
|
|||||||
WorkflowDescription(
|
WorkflowDescription(
|
||||||
url=wf["url"],
|
url=wf["url"],
|
||||||
run_id=wf["id"],
|
run_id=wf["id"],
|
||||||
name=workflow["name"],
|
name=wf["name"],
|
||||||
head_sha=wf["head_sha"],
|
head_sha=wf["head_sha"],
|
||||||
status=wf["status"],
|
status=wf["status"],
|
||||||
rerun_url=wf["rerun_url"],
|
rerun_url=wf["rerun_url"],
|
||||||
@ -254,7 +253,7 @@ def get_workflow_description_fallback(
|
|||||||
return workflow_descriptions
|
return workflow_descriptions
|
||||||
|
|
||||||
|
|
||||||
def get_workflow_description(workflow_url, token) -> WorkflowDescription:
|
def get_workflow_description(workflow_url: str, token: str) -> WorkflowDescription:
|
||||||
workflow = _exec_get_with_retry(workflow_url, token)
|
workflow = _exec_get_with_retry(workflow_url, token)
|
||||||
return WorkflowDescription(
|
return WorkflowDescription(
|
||||||
url=workflow["url"],
|
url=workflow["url"],
|
||||||
@ -331,7 +330,7 @@ def main(event):
|
|||||||
workflow_descriptions
|
workflow_descriptions
|
||||||
or get_workflow_description_fallback(pull_request, token)
|
or get_workflow_description_fallback(pull_request, token)
|
||||||
)
|
)
|
||||||
workflow_descriptions.sort(key=lambda x: x.run_id)
|
workflow_descriptions.sort(key=lambda x: x.run_id) # type: ignore
|
||||||
most_recent_workflow = workflow_descriptions[-1]
|
most_recent_workflow = workflow_descriptions[-1]
|
||||||
if (
|
if (
|
||||||
most_recent_workflow.status == "completed"
|
most_recent_workflow.status == "completed"
|
||||||
@ -376,7 +375,7 @@ def main(event):
|
|||||||
print("Not found any workflows")
|
print("Not found any workflows")
|
||||||
return
|
return
|
||||||
|
|
||||||
workflow_descriptions.sort(key=lambda x: x.run_id)
|
workflow_descriptions.sort(key=lambda x: x.run_id) # type: ignore
|
||||||
most_recent_workflow = workflow_descriptions[-1]
|
most_recent_workflow = workflow_descriptions[-1]
|
||||||
print("Latest workflow", most_recent_workflow)
|
print("Latest workflow", most_recent_workflow)
|
||||||
if (
|
if (
|
||||||
|
@ -92,7 +92,8 @@ Merge it only if you intend to backport changes to the target branch, otherwise
|
|||||||
if branch_updated:
|
if branch_updated:
|
||||||
self._backported = True
|
self._backported = True
|
||||||
|
|
||||||
def pop_prs(self, prs: PullRequests):
|
def pop_prs(self, prs: PullRequests) -> None:
|
||||||
|
"""the method processes all prs and pops the ReleaseBranch related prs"""
|
||||||
to_pop = [] # type: List[int]
|
to_pop = [] # type: List[int]
|
||||||
for i, pr in enumerate(prs):
|
for i, pr in enumerate(prs):
|
||||||
if self.name not in pr.head.ref:
|
if self.name not in pr.head.ref:
|
||||||
@ -105,14 +106,14 @@ Merge it only if you intend to backport changes to the target branch, otherwise
|
|||||||
to_pop.append(i)
|
to_pop.append(i)
|
||||||
else:
|
else:
|
||||||
logging.error(
|
logging.error(
|
||||||
"PR #%s doesn't head ref starting with known suffix",
|
"head ref of PR #%s isn't starting with known suffix",
|
||||||
pr.number,
|
pr.number,
|
||||||
)
|
)
|
||||||
for i in reversed(to_pop):
|
for i in reversed(to_pop):
|
||||||
# Going from the tail to keep the order and pop greater index first
|
# Going from the tail to keep the order and pop greater index first
|
||||||
prs.pop(i)
|
prs.pop(i)
|
||||||
|
|
||||||
def process(self, dry_run: bool):
|
def process(self, dry_run: bool) -> None:
|
||||||
if self.backported:
|
if self.backported:
|
||||||
return
|
return
|
||||||
if not self.cherrypick_pr:
|
if not self.cherrypick_pr:
|
||||||
@ -209,6 +210,7 @@ Merge it only if you intend to backport changes to the target branch, otherwise
|
|||||||
self._assign_new_pr(self.cherrypick_pr)
|
self._assign_new_pr(self.cherrypick_pr)
|
||||||
|
|
||||||
def create_backport(self):
|
def create_backport(self):
|
||||||
|
assert self.cherrypick_pr is not None
|
||||||
# Checkout the backport branch from the remote and make all changes to
|
# Checkout the backport branch from the remote and make all changes to
|
||||||
# apply like they are only one cherry-pick commit on top of release
|
# apply like they are only one cherry-pick commit on top of release
|
||||||
git_runner(f"{self.git_prefix} checkout -f {self.backport_branch}")
|
git_runner(f"{self.git_prefix} checkout -f {self.backport_branch}")
|
||||||
@ -239,7 +241,7 @@ Merge it only if you intend to backport changes to the target branch, otherwise
|
|||||||
self.backport_pr.add_to_labels(Labels.BACKPORT)
|
self.backport_pr.add_to_labels(Labels.BACKPORT)
|
||||||
self._assign_new_pr(self.backport_pr)
|
self._assign_new_pr(self.backport_pr)
|
||||||
|
|
||||||
def _assign_new_pr(self, new_pr: PullRequest):
|
def _assign_new_pr(self, new_pr: PullRequest) -> None:
|
||||||
"""Assign `new_pr` to author, merger and assignees of an original PR"""
|
"""Assign `new_pr` to author, merger and assignees of an original PR"""
|
||||||
# It looks there some race when multiple .add_to_assignees are executed,
|
# It looks there some race when multiple .add_to_assignees are executed,
|
||||||
# so we'll add all at once
|
# so we'll add all at once
|
||||||
@ -340,7 +342,7 @@ class Backport:
|
|||||||
)
|
)
|
||||||
self.error = e
|
self.error = e
|
||||||
|
|
||||||
def process_pr(self, pr: PullRequest):
|
def process_pr(self, pr: PullRequest) -> None:
|
||||||
pr_labels = [label.name for label in pr.labels]
|
pr_labels = [label.name for label in pr.labels]
|
||||||
if Labels.MUST_BACKPORT in pr_labels:
|
if Labels.MUST_BACKPORT in pr_labels:
|
||||||
branches = [
|
branches = [
|
||||||
@ -403,7 +405,7 @@ class Backport:
|
|||||||
# And check it after the running
|
# And check it after the running
|
||||||
self.mark_pr_backported(pr)
|
self.mark_pr_backported(pr)
|
||||||
|
|
||||||
def mark_pr_backported(self, pr: PullRequest):
|
def mark_pr_backported(self, pr: PullRequest) -> None:
|
||||||
if self.dry_run:
|
if self.dry_run:
|
||||||
logging.info("DRY RUN: would mark PR #%s as done", pr.number)
|
logging.info("DRY RUN: would mark PR #%s as done", pr.number)
|
||||||
return
|
return
|
||||||
@ -488,7 +490,8 @@ def main():
|
|||||||
|
|
||||||
gh = GitHub(token, per_page=100)
|
gh = GitHub(token, per_page=100)
|
||||||
bp = Backport(gh, args.repo, args.dry_run)
|
bp = Backport(gh, args.repo, args.dry_run)
|
||||||
bp.gh.cache_path = str(f"{TEMP_PATH}/gh_cache")
|
# https://github.com/python/mypy/issues/3004
|
||||||
|
bp.gh.cache_path = f"{TEMP_PATH}/gh_cache" # type: ignore
|
||||||
bp.receive_release_prs()
|
bp.receive_release_prs()
|
||||||
bp.receive_prs_for_backport()
|
bp.receive_prs_for_backport()
|
||||||
bp.process_backports()
|
bp.process_backports()
|
||||||
|
@ -12,11 +12,12 @@ import json
|
|||||||
import time
|
import time
|
||||||
from collections import namedtuple
|
from collections import namedtuple
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
from typing import Dict, List, Tuple
|
||||||
|
|
||||||
import jwt
|
import jwt
|
||||||
import requests
|
import requests # type: ignore
|
||||||
import boto3
|
import boto3 # type: ignore
|
||||||
from botocore.exceptions import ClientError
|
from botocore.exceptions import ClientError # type: ignore
|
||||||
|
|
||||||
UNIVERSAL_LABEL = "universal"
|
UNIVERSAL_LABEL = "universal"
|
||||||
RUNNER_TYPE_LABELS = [
|
RUNNER_TYPE_LABELS = [
|
||||||
@ -29,8 +30,13 @@ RUNNER_TYPE_LABELS = [
|
|||||||
"style-checker-aarch64",
|
"style-checker-aarch64",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
RunnerDescription = namedtuple(
|
||||||
|
"RunnerDescription", ["id", "name", "tags", "offline", "busy"]
|
||||||
|
)
|
||||||
|
RunnerDescriptions = List[RunnerDescription]
|
||||||
|
|
||||||
def get_dead_runners_in_ec2(runners):
|
|
||||||
|
def get_dead_runners_in_ec2(runners: RunnerDescriptions) -> RunnerDescriptions:
|
||||||
ids = {
|
ids = {
|
||||||
runner.name: runner
|
runner.name: runner
|
||||||
for runner in runners
|
for runner in runners
|
||||||
@ -92,7 +98,7 @@ def get_dead_runners_in_ec2(runners):
|
|||||||
return result_to_delete
|
return result_to_delete
|
||||||
|
|
||||||
|
|
||||||
def get_lost_ec2_instances(runners):
|
def get_lost_ec2_instances(runners: RunnerDescriptions) -> List[dict]:
|
||||||
client = boto3.client("ec2")
|
client = boto3.client("ec2")
|
||||||
reservations = client.describe_instances(
|
reservations = client.describe_instances(
|
||||||
Filters=[{"Name": "tag-key", "Values": ["github:runner-type"]}]
|
Filters=[{"Name": "tag-key", "Values": ["github:runner-type"]}]
|
||||||
@ -130,7 +136,7 @@ def get_lost_ec2_instances(runners):
|
|||||||
return lost_instances
|
return lost_instances
|
||||||
|
|
||||||
|
|
||||||
def get_key_and_app_from_aws():
|
def get_key_and_app_from_aws() -> Tuple[str, int]:
|
||||||
secret_name = "clickhouse_github_secret_key"
|
secret_name = "clickhouse_github_secret_key"
|
||||||
session = boto3.session.Session()
|
session = boto3.session.Session()
|
||||||
client = session.client(
|
client = session.client(
|
||||||
@ -146,7 +152,7 @@ def handler(event, context):
|
|||||||
main(private_key, app_id, True, True)
|
main(private_key, app_id, True, True)
|
||||||
|
|
||||||
|
|
||||||
def get_installation_id(jwt_token):
|
def get_installation_id(jwt_token: str) -> int:
|
||||||
headers = {
|
headers = {
|
||||||
"Authorization": f"Bearer {jwt_token}",
|
"Authorization": f"Bearer {jwt_token}",
|
||||||
"Accept": "application/vnd.github.v3+json",
|
"Accept": "application/vnd.github.v3+json",
|
||||||
@ -157,10 +163,12 @@ def get_installation_id(jwt_token):
|
|||||||
for installation in data:
|
for installation in data:
|
||||||
if installation["account"]["login"] == "ClickHouse":
|
if installation["account"]["login"] == "ClickHouse":
|
||||||
installation_id = installation["id"]
|
installation_id = installation["id"]
|
||||||
return installation_id
|
break
|
||||||
|
|
||||||
|
return installation_id # type: ignore
|
||||||
|
|
||||||
|
|
||||||
def get_access_token(jwt_token, installation_id):
|
def get_access_token(jwt_token: str, installation_id: int) -> str:
|
||||||
headers = {
|
headers = {
|
||||||
"Authorization": f"Bearer {jwt_token}",
|
"Authorization": f"Bearer {jwt_token}",
|
||||||
"Accept": "application/vnd.github.v3+json",
|
"Accept": "application/vnd.github.v3+json",
|
||||||
@ -171,15 +179,10 @@ def get_access_token(jwt_token, installation_id):
|
|||||||
)
|
)
|
||||||
response.raise_for_status()
|
response.raise_for_status()
|
||||||
data = response.json()
|
data = response.json()
|
||||||
return data["token"]
|
return data["token"] # type: ignore
|
||||||
|
|
||||||
|
|
||||||
RunnerDescription = namedtuple(
|
def list_runners(access_token: str) -> RunnerDescriptions:
|
||||||
"RunnerDescription", ["id", "name", "tags", "offline", "busy"]
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def list_runners(access_token):
|
|
||||||
headers = {
|
headers = {
|
||||||
"Authorization": f"token {access_token}",
|
"Authorization": f"token {access_token}",
|
||||||
"Accept": "application/vnd.github.v3+json",
|
"Accept": "application/vnd.github.v3+json",
|
||||||
@ -225,8 +228,10 @@ def list_runners(access_token):
|
|||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
def group_runners_by_tag(listed_runners):
|
def group_runners_by_tag(
|
||||||
result = {}
|
listed_runners: RunnerDescriptions,
|
||||||
|
) -> Dict[str, RunnerDescriptions]:
|
||||||
|
result = {} # type: Dict[str, RunnerDescriptions]
|
||||||
|
|
||||||
def add_to_result(tag, runner):
|
def add_to_result(tag, runner):
|
||||||
if tag not in result:
|
if tag not in result:
|
||||||
@ -248,7 +253,9 @@ def group_runners_by_tag(listed_runners):
|
|||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
def push_metrics_to_cloudwatch(listed_runners, namespace):
|
def push_metrics_to_cloudwatch(
|
||||||
|
listed_runners: RunnerDescriptions, namespace: str
|
||||||
|
) -> None:
|
||||||
client = boto3.client("cloudwatch")
|
client = boto3.client("cloudwatch")
|
||||||
metrics_data = []
|
metrics_data = []
|
||||||
busy_runners = sum(
|
busy_runners = sum(
|
||||||
@ -278,7 +285,7 @@ def push_metrics_to_cloudwatch(listed_runners, namespace):
|
|||||||
}
|
}
|
||||||
)
|
)
|
||||||
if total_active_runners == 0:
|
if total_active_runners == 0:
|
||||||
busy_ratio = 100
|
busy_ratio = 100.0
|
||||||
else:
|
else:
|
||||||
busy_ratio = busy_runners / total_active_runners * 100
|
busy_ratio = busy_runners / total_active_runners * 100
|
||||||
|
|
||||||
@ -293,7 +300,7 @@ def push_metrics_to_cloudwatch(listed_runners, namespace):
|
|||||||
client.put_metric_data(Namespace=namespace, MetricData=metrics_data)
|
client.put_metric_data(Namespace=namespace, MetricData=metrics_data)
|
||||||
|
|
||||||
|
|
||||||
def delete_runner(access_token, runner):
|
def delete_runner(access_token: str, runner: RunnerDescription) -> bool:
|
||||||
headers = {
|
headers = {
|
||||||
"Authorization": f"token {access_token}",
|
"Authorization": f"token {access_token}",
|
||||||
"Accept": "application/vnd.github.v3+json",
|
"Accept": "application/vnd.github.v3+json",
|
||||||
@ -305,10 +312,15 @@ def delete_runner(access_token, runner):
|
|||||||
)
|
)
|
||||||
response.raise_for_status()
|
response.raise_for_status()
|
||||||
print(f"Response code deleting {runner.name} is {response.status_code}")
|
print(f"Response code deleting {runner.name} is {response.status_code}")
|
||||||
return response.status_code == 204
|
return bool(response.status_code == 204)
|
||||||
|
|
||||||
|
|
||||||
def main(github_secret_key, github_app_id, push_to_cloudwatch, delete_offline_runners):
|
def main(
|
||||||
|
github_secret_key: str,
|
||||||
|
github_app_id: int,
|
||||||
|
push_to_cloudwatch: bool,
|
||||||
|
delete_offline_runners: bool,
|
||||||
|
) -> None:
|
||||||
payload = {
|
payload = {
|
||||||
"iat": int(time.time()) - 60,
|
"iat": int(time.time()) - 60,
|
||||||
"exp": int(time.time()) + (10 * 60),
|
"exp": int(time.time()) + (10 * 60),
|
||||||
|
@ -7,14 +7,21 @@ import logging
|
|||||||
|
|
||||||
from github import Github
|
from github import Github
|
||||||
|
|
||||||
from env_helper import IMAGES_PATH, REPO_COPY, S3_TEST_REPORTS_BUCKET, S3_DOWNLOAD
|
from env_helper import (
|
||||||
from stopwatch import Stopwatch
|
IMAGES_PATH,
|
||||||
from upload_result_helper import upload_results
|
REPO_COPY,
|
||||||
from s3_helper import S3Helper
|
S3_DOWNLOAD,
|
||||||
from get_robot_token import get_best_robot_token
|
S3_TEST_REPORTS_BUCKET,
|
||||||
|
TEMP_PATH,
|
||||||
|
)
|
||||||
from commit_status_helper import post_commit_status
|
from commit_status_helper import post_commit_status
|
||||||
from docker_pull_helper import get_image_with_version
|
from docker_pull_helper import get_image_with_version
|
||||||
|
from get_robot_token import get_best_robot_token
|
||||||
|
from pr_info import PRInfo
|
||||||
|
from s3_helper import S3Helper
|
||||||
|
from stopwatch import Stopwatch
|
||||||
from tee_popen import TeePopen
|
from tee_popen import TeePopen
|
||||||
|
from upload_result_helper import upload_results
|
||||||
|
|
||||||
NAME = "Woboq Build"
|
NAME = "Woboq Build"
|
||||||
|
|
||||||
@ -33,17 +40,16 @@ if __name__ == "__main__":
|
|||||||
|
|
||||||
stopwatch = Stopwatch()
|
stopwatch = Stopwatch()
|
||||||
|
|
||||||
temp_path = os.getenv("TEMP_PATH", os.path.abspath("."))
|
|
||||||
|
|
||||||
gh = Github(get_best_robot_token(), per_page=100)
|
gh = Github(get_best_robot_token(), per_page=100)
|
||||||
|
pr_info = PRInfo()
|
||||||
|
|
||||||
if not os.path.exists(temp_path):
|
if not os.path.exists(TEMP_PATH):
|
||||||
os.makedirs(temp_path)
|
os.makedirs(TEMP_PATH)
|
||||||
|
|
||||||
docker_image = get_image_with_version(IMAGES_PATH, "clickhouse/codebrowser")
|
docker_image = get_image_with_version(IMAGES_PATH, "clickhouse/codebrowser")
|
||||||
s3_helper = S3Helper()
|
s3_helper = S3Helper()
|
||||||
|
|
||||||
result_path = os.path.join(temp_path, "result_path")
|
result_path = os.path.join(TEMP_PATH, "result_path")
|
||||||
if not os.path.exists(result_path):
|
if not os.path.exists(result_path):
|
||||||
os.makedirs(result_path)
|
os.makedirs(result_path)
|
||||||
|
|
||||||
@ -51,7 +57,7 @@ if __name__ == "__main__":
|
|||||||
|
|
||||||
logging.info("Going to run codebrowser: %s", run_command)
|
logging.info("Going to run codebrowser: %s", run_command)
|
||||||
|
|
||||||
run_log_path = os.path.join(temp_path, "runlog.log")
|
run_log_path = os.path.join(TEMP_PATH, "runlog.log")
|
||||||
|
|
||||||
with TeePopen(run_command, run_log_path) as process:
|
with TeePopen(run_command, run_log_path) as process:
|
||||||
retcode = process.wait()
|
retcode = process.wait()
|
||||||
@ -60,7 +66,7 @@ if __name__ == "__main__":
|
|||||||
else:
|
else:
|
||||||
logging.info("Run failed")
|
logging.info("Run failed")
|
||||||
|
|
||||||
subprocess.check_call(f"sudo chown -R ubuntu:ubuntu {temp_path}", shell=True)
|
subprocess.check_call(f"sudo chown -R ubuntu:ubuntu {TEMP_PATH}", shell=True)
|
||||||
|
|
||||||
report_path = os.path.join(result_path, "html_report")
|
report_path = os.path.join(result_path, "html_report")
|
||||||
logging.info("Report path %s", report_path)
|
logging.info("Report path %s", report_path)
|
||||||
@ -76,12 +82,8 @@ if __name__ == "__main__":
|
|||||||
|
|
||||||
test_results = [(index_html, "Look at the report")]
|
test_results = [(index_html, "Look at the report")]
|
||||||
|
|
||||||
report_url = upload_results(
|
report_url = upload_results(s3_helper, 0, pr_info.sha, test_results, [], NAME)
|
||||||
s3_helper, 0, os.getenv("GITHUB_SHA"), test_results, [], NAME
|
|
||||||
)
|
|
||||||
|
|
||||||
print(f"::notice ::Report url: {report_url}")
|
print(f"::notice ::Report url: {report_url}")
|
||||||
|
|
||||||
post_commit_status(
|
post_commit_status(gh, pr_info.sha, NAME, "Report built", "success", report_url)
|
||||||
gh, os.getenv("GITHUB_SHA"), NAME, "Report built", "success", report_url
|
|
||||||
)
|
|
||||||
|
@ -17,7 +17,7 @@ RETRY = 5
|
|||||||
CommitStatuses = List[CommitStatus]
|
CommitStatuses = List[CommitStatus]
|
||||||
|
|
||||||
|
|
||||||
def override_status(status: str, check_name: str, invert=False) -> str:
|
def override_status(status: str, check_name: str, invert: bool = False) -> str:
|
||||||
if CI_CONFIG["tests_config"].get(check_name, {}).get("force_tests", False):
|
if CI_CONFIG["tests_config"].get(check_name, {}).get("force_tests", False):
|
||||||
return "success"
|
return "success"
|
||||||
|
|
||||||
@ -45,7 +45,7 @@ def get_commit(gh: Github, commit_sha: str, retry_count: int = RETRY) -> Commit:
|
|||||||
|
|
||||||
def post_commit_status(
|
def post_commit_status(
|
||||||
gh: Github, sha: str, check_name: str, description: str, state: str, report_url: str
|
gh: Github, sha: str, check_name: str, description: str, state: str, report_url: str
|
||||||
):
|
) -> None:
|
||||||
for i in range(RETRY):
|
for i in range(RETRY):
|
||||||
try:
|
try:
|
||||||
commit = get_commit(gh, sha, 1)
|
commit = get_commit(gh, sha, 1)
|
||||||
@ -64,7 +64,7 @@ def post_commit_status(
|
|||||||
|
|
||||||
def post_commit_status_to_file(
|
def post_commit_status_to_file(
|
||||||
file_path: str, description: str, state: str, report_url: str
|
file_path: str, description: str, state: str, report_url: str
|
||||||
):
|
) -> None:
|
||||||
if os.path.exists(file_path):
|
if os.path.exists(file_path):
|
||||||
raise Exception(f'File "{file_path}" already exists!')
|
raise Exception(f'File "{file_path}" already exists!')
|
||||||
with open(file_path, "w", encoding="utf-8") as f:
|
with open(file_path, "w", encoding="utf-8") as f:
|
||||||
@ -88,21 +88,21 @@ def get_commit_filtered_statuses(commit: Commit) -> CommitStatuses:
|
|||||||
return list(filtered.values())
|
return list(filtered.values())
|
||||||
|
|
||||||
|
|
||||||
def remove_labels(gh: Github, pr_info: PRInfo, labels_names: List[str]):
|
def remove_labels(gh: Github, pr_info: PRInfo, labels_names: List[str]) -> None:
|
||||||
repo = gh.get_repo(GITHUB_REPOSITORY)
|
repo = gh.get_repo(GITHUB_REPOSITORY)
|
||||||
pull_request = repo.get_pull(pr_info.number)
|
pull_request = repo.get_pull(pr_info.number)
|
||||||
for label in labels_names:
|
for label in labels_names:
|
||||||
pull_request.remove_from_labels(label)
|
pull_request.remove_from_labels(label)
|
||||||
|
|
||||||
|
|
||||||
def post_labels(gh: Github, pr_info: PRInfo, labels_names: List[str]):
|
def post_labels(gh: Github, pr_info: PRInfo, labels_names: List[str]) -> None:
|
||||||
repo = gh.get_repo(GITHUB_REPOSITORY)
|
repo = gh.get_repo(GITHUB_REPOSITORY)
|
||||||
pull_request = repo.get_pull(pr_info.number)
|
pull_request = repo.get_pull(pr_info.number)
|
||||||
for label in labels_names:
|
for label in labels_names:
|
||||||
pull_request.add_to_labels(label)
|
pull_request.add_to_labels(label)
|
||||||
|
|
||||||
|
|
||||||
def fail_mergeable_check(commit: Commit, description: str):
|
def fail_mergeable_check(commit: Commit, description: str) -> None:
|
||||||
commit.create_status(
|
commit.create_status(
|
||||||
context="Mergeable Check",
|
context="Mergeable Check",
|
||||||
description=description,
|
description=description,
|
||||||
@ -111,7 +111,7 @@ def fail_mergeable_check(commit: Commit, description: str):
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def reset_mergeable_check(commit: Commit, description: str = ""):
|
def reset_mergeable_check(commit: Commit, description: str = "") -> None:
|
||||||
commit.create_status(
|
commit.create_status(
|
||||||
context="Mergeable Check",
|
context="Mergeable Check",
|
||||||
description=description,
|
description=description,
|
||||||
@ -120,7 +120,7 @@ def reset_mergeable_check(commit: Commit, description: str = ""):
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def update_mergeable_check(gh: Github, pr_info: PRInfo, check_name: str):
|
def update_mergeable_check(gh: Github, pr_info: PRInfo, check_name: str) -> None:
|
||||||
if SKIP_MERGEABLE_CHECK_LABEL in pr_info.labels:
|
if SKIP_MERGEABLE_CHECK_LABEL in pr_info.labels:
|
||||||
return
|
return
|
||||||
|
|
||||||
|
@ -8,7 +8,7 @@ import shutil
|
|||||||
import subprocess
|
import subprocess
|
||||||
import time
|
import time
|
||||||
import sys
|
import sys
|
||||||
from typing import Dict, List, Optional, Set, Tuple, Union
|
from typing import Any, Dict, List, Optional, Set, Tuple, Union
|
||||||
|
|
||||||
from github import Github
|
from github import Github
|
||||||
|
|
||||||
@ -52,7 +52,7 @@ class DockerImage:
|
|||||||
and self.only_amd64 == other.only_amd64
|
and self.only_amd64 == other.only_amd64
|
||||||
)
|
)
|
||||||
|
|
||||||
def __lt__(self, other) -> bool:
|
def __lt__(self, other: Any) -> bool:
|
||||||
if not isinstance(other, DockerImage):
|
if not isinstance(other, DockerImage):
|
||||||
return False
|
return False
|
||||||
if self.parent and not other.parent:
|
if self.parent and not other.parent:
|
||||||
@ -270,7 +270,7 @@ def build_and_push_one_image(
|
|||||||
def process_single_image(
|
def process_single_image(
|
||||||
image: DockerImage,
|
image: DockerImage,
|
||||||
versions: List[str],
|
versions: List[str],
|
||||||
additional_cache,
|
additional_cache: str,
|
||||||
push: bool,
|
push: bool,
|
||||||
child: bool,
|
child: bool,
|
||||||
) -> List[Tuple[str, str, str]]:
|
) -> List[Tuple[str, str, str]]:
|
||||||
|
@ -70,7 +70,7 @@ def parse_args() -> argparse.Namespace:
|
|||||||
|
|
||||||
def load_images(path: str, suffix: str) -> Images:
|
def load_images(path: str, suffix: str) -> Images:
|
||||||
with open(os.path.join(path, CHANGED_IMAGES.format(suffix)), "rb") as images:
|
with open(os.path.join(path, CHANGED_IMAGES.format(suffix)), "rb") as images:
|
||||||
return json.load(images)
|
return json.load(images) # type: ignore
|
||||||
|
|
||||||
|
|
||||||
def strip_suffix(suffix: str, images: Images) -> Images:
|
def strip_suffix(suffix: str, images: Images) -> Images:
|
||||||
|
@ -6,11 +6,11 @@ import time
|
|||||||
import subprocess
|
import subprocess
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
from typing import Optional
|
from typing import List, Optional
|
||||||
|
|
||||||
|
|
||||||
class DockerImage:
|
class DockerImage:
|
||||||
def __init__(self, name, version: Optional[str] = None):
|
def __init__(self, name: str, version: Optional[str] = None):
|
||||||
self.name = name
|
self.name = name
|
||||||
if version is None:
|
if version is None:
|
||||||
self.version = "latest"
|
self.version = "latest"
|
||||||
@ -22,8 +22,11 @@ class DockerImage:
|
|||||||
|
|
||||||
|
|
||||||
def get_images_with_versions(
|
def get_images_with_versions(
|
||||||
reports_path, required_image, pull=True, version: Optional[str] = None
|
reports_path: str,
|
||||||
):
|
required_images: List[str],
|
||||||
|
pull: bool = True,
|
||||||
|
version: Optional[str] = None,
|
||||||
|
) -> List[DockerImage]:
|
||||||
images_path = None
|
images_path = None
|
||||||
for root, _, files in os.walk(reports_path):
|
for root, _, files in os.walk(reports_path):
|
||||||
for f in files:
|
for f in files:
|
||||||
@ -45,12 +48,13 @@ def get_images_with_versions(
|
|||||||
images = {}
|
images = {}
|
||||||
|
|
||||||
docker_images = []
|
docker_images = []
|
||||||
for image_name in required_image:
|
for image_name in required_images:
|
||||||
docker_image = DockerImage(image_name, version)
|
docker_image = DockerImage(image_name, version)
|
||||||
if image_name in images:
|
if image_name in images:
|
||||||
docker_image.version = images[image_name]
|
docker_image.version = images[image_name]
|
||||||
docker_images.append(docker_image)
|
docker_images.append(docker_image)
|
||||||
|
|
||||||
|
latest_error = Exception("predefined to avoid access before created")
|
||||||
if pull:
|
if pull:
|
||||||
for docker_image in docker_images:
|
for docker_image in docker_images:
|
||||||
for i in range(10):
|
for i in range(10):
|
||||||
@ -75,6 +79,8 @@ def get_images_with_versions(
|
|||||||
return docker_images
|
return docker_images
|
||||||
|
|
||||||
|
|
||||||
def get_image_with_version(reports_path, image, pull=True, version=None):
|
def get_image_with_version(
|
||||||
|
reports_path: str, image: str, pull: bool = True, version: Optional[str] = None
|
||||||
|
) -> DockerImage:
|
||||||
logging.info("Looking for images file in %s", reports_path)
|
logging.info("Looking for images file in %s", reports_path)
|
||||||
return get_images_with_versions(reports_path, [image], pull, version=version)[0]
|
return get_images_with_versions(reports_path, [image], pull, version=version)[0]
|
||||||
|
@ -43,55 +43,55 @@ class TestDockerImageCheck(unittest.TestCase):
|
|||||||
"docker/test/stateless",
|
"docker/test/stateless",
|
||||||
"clickhouse/stateless-test",
|
"clickhouse/stateless-test",
|
||||||
False,
|
False,
|
||||||
"clickhouse/test-base",
|
"clickhouse/test-base", # type: ignore
|
||||||
),
|
),
|
||||||
di.DockerImage(
|
di.DockerImage(
|
||||||
"docker/test/integration/base",
|
"docker/test/integration/base",
|
||||||
"clickhouse/integration-test",
|
"clickhouse/integration-test",
|
||||||
False,
|
False,
|
||||||
"clickhouse/test-base",
|
"clickhouse/test-base", # type: ignore
|
||||||
),
|
),
|
||||||
di.DockerImage(
|
di.DockerImage(
|
||||||
"docker/test/fuzzer",
|
"docker/test/fuzzer",
|
||||||
"clickhouse/fuzzer",
|
"clickhouse/fuzzer",
|
||||||
False,
|
False,
|
||||||
"clickhouse/test-base",
|
"clickhouse/test-base", # type: ignore
|
||||||
),
|
),
|
||||||
di.DockerImage(
|
di.DockerImage(
|
||||||
"docker/test/keeper-jepsen",
|
"docker/test/keeper-jepsen",
|
||||||
"clickhouse/keeper-jepsen-test",
|
"clickhouse/keeper-jepsen-test",
|
||||||
False,
|
False,
|
||||||
"clickhouse/test-base",
|
"clickhouse/test-base", # type: ignore
|
||||||
),
|
),
|
||||||
di.DockerImage(
|
di.DockerImage(
|
||||||
"docker/docs/check",
|
"docker/docs/check",
|
||||||
"clickhouse/docs-check",
|
"clickhouse/docs-check",
|
||||||
False,
|
False,
|
||||||
"clickhouse/docs-builder",
|
"clickhouse/docs-builder", # type: ignore
|
||||||
),
|
),
|
||||||
di.DockerImage(
|
di.DockerImage(
|
||||||
"docker/docs/release",
|
"docker/docs/release",
|
||||||
"clickhouse/docs-release",
|
"clickhouse/docs-release",
|
||||||
False,
|
False,
|
||||||
"clickhouse/docs-builder",
|
"clickhouse/docs-builder", # type: ignore
|
||||||
),
|
),
|
||||||
di.DockerImage(
|
di.DockerImage(
|
||||||
"docker/test/stateful",
|
"docker/test/stateful",
|
||||||
"clickhouse/stateful-test",
|
"clickhouse/stateful-test",
|
||||||
False,
|
False,
|
||||||
"clickhouse/stateless-test",
|
"clickhouse/stateless-test", # type: ignore
|
||||||
),
|
),
|
||||||
di.DockerImage(
|
di.DockerImage(
|
||||||
"docker/test/unit",
|
"docker/test/unit",
|
||||||
"clickhouse/unit-test",
|
"clickhouse/unit-test",
|
||||||
False,
|
False,
|
||||||
"clickhouse/stateless-test",
|
"clickhouse/stateless-test", # type: ignore
|
||||||
),
|
),
|
||||||
di.DockerImage(
|
di.DockerImage(
|
||||||
"docker/test/stress",
|
"docker/test/stress",
|
||||||
"clickhouse/stress-test",
|
"clickhouse/stress-test",
|
||||||
False,
|
False,
|
||||||
"clickhouse/stateful-test",
|
"clickhouse/stateful-test", # type: ignore
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
@ -277,7 +277,7 @@ class TestDockerServer(unittest.TestCase):
|
|||||||
ds.gen_tags(version, "auto")
|
ds.gen_tags(version, "auto")
|
||||||
|
|
||||||
@patch("docker_server.get_tagged_versions")
|
@patch("docker_server.get_tagged_versions")
|
||||||
def test_auto_release_type(self, mock_tagged_versions: MagicMock):
|
def test_auto_release_type(self, mock_tagged_versions: MagicMock) -> None:
|
||||||
mock_tagged_versions.return_value = [
|
mock_tagged_versions.return_value = [
|
||||||
get_version_from_string("1.1.1.1"),
|
get_version_from_string("1.1.1.1"),
|
||||||
get_version_from_string("1.2.1.1"),
|
get_version_from_string("1.2.1.1"),
|
||||||
|
@ -6,6 +6,7 @@ import os
|
|||||||
import csv
|
import csv
|
||||||
import sys
|
import sys
|
||||||
import atexit
|
import atexit
|
||||||
|
from typing import List, Tuple
|
||||||
|
|
||||||
from github import Github
|
from github import Github
|
||||||
|
|
||||||
@ -50,8 +51,10 @@ def get_fasttest_cmd(
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def process_results(result_folder):
|
def process_results(
|
||||||
test_results = []
|
result_folder: str,
|
||||||
|
) -> Tuple[str, str, List[Tuple[str, str]], List[str]]:
|
||||||
|
test_results = [] # type: List[Tuple[str, str]]
|
||||||
additional_files = []
|
additional_files = []
|
||||||
# Just upload all files from result_folder.
|
# Just upload all files from result_folder.
|
||||||
# If task provides processed results, then it's responsible for content of
|
# If task provides processed results, then it's responsible for content of
|
||||||
@ -78,7 +81,7 @@ def process_results(result_folder):
|
|||||||
results_path = os.path.join(result_folder, "test_results.tsv")
|
results_path = os.path.join(result_folder, "test_results.tsv")
|
||||||
if os.path.exists(results_path):
|
if os.path.exists(results_path):
|
||||||
with open(results_path, "r", encoding="utf-8") as results_file:
|
with open(results_path, "r", encoding="utf-8") as results_file:
|
||||||
test_results = list(csv.reader(results_file, delimiter="\t"))
|
test_results = list(csv.reader(results_file, delimiter="\t")) # type: ignore
|
||||||
if len(test_results) == 0:
|
if len(test_results) == 0:
|
||||||
return "error", "Empty test_results.tsv", test_results, additional_files
|
return "error", "Empty test_results.tsv", test_results, additional_files
|
||||||
|
|
||||||
@ -172,7 +175,7 @@ if __name__ == "__main__":
|
|||||||
"test_log.txt" in test_output_files or "test_result.txt" in test_output_files
|
"test_log.txt" in test_output_files or "test_result.txt" in test_output_files
|
||||||
)
|
)
|
||||||
test_result_exists = "test_results.tsv" in test_output_files
|
test_result_exists = "test_results.tsv" in test_output_files
|
||||||
test_results = []
|
test_results = [] # type: List[Tuple[str, str]]
|
||||||
if "submodule_log.txt" not in test_output_files:
|
if "submodule_log.txt" not in test_output_files:
|
||||||
description = "Cannot clone repository"
|
description = "Cannot clone repository"
|
||||||
state = "failure"
|
state = "failure"
|
||||||
|
@ -5,27 +5,11 @@ from github import Github
|
|||||||
from env_helper import GITHUB_RUN_URL
|
from env_helper import GITHUB_RUN_URL
|
||||||
from pr_info import PRInfo
|
from pr_info import PRInfo
|
||||||
from get_robot_token import get_best_robot_token
|
from get_robot_token import get_best_robot_token
|
||||||
from commit_status_helper import get_commit
|
from commit_status_helper import get_commit, get_commit_filtered_statuses
|
||||||
|
|
||||||
NAME = "Run Check"
|
NAME = "Run Check"
|
||||||
|
|
||||||
|
|
||||||
def filter_statuses(statuses):
|
|
||||||
"""
|
|
||||||
Squash statuses to latest state
|
|
||||||
1. context="first", state="success", update_time=1
|
|
||||||
2. context="second", state="success", update_time=2
|
|
||||||
3. context="first", stat="failure", update_time=3
|
|
||||||
=========>
|
|
||||||
1. context="second", state="success"
|
|
||||||
2. context="first", stat="failure"
|
|
||||||
"""
|
|
||||||
filt = {}
|
|
||||||
for status in sorted(statuses, key=lambda x: x.updated_at):
|
|
||||||
filt[status.context] = status
|
|
||||||
return filt
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
logging.basicConfig(level=logging.INFO)
|
logging.basicConfig(level=logging.INFO)
|
||||||
|
|
||||||
@ -34,8 +18,13 @@ if __name__ == "__main__":
|
|||||||
commit = get_commit(gh, pr_info.sha)
|
commit = get_commit(gh, pr_info.sha)
|
||||||
|
|
||||||
url = GITHUB_RUN_URL
|
url = GITHUB_RUN_URL
|
||||||
statuses = filter_statuses(list(commit.get_statuses()))
|
statuses = get_commit_filtered_statuses(commit)
|
||||||
if NAME in statuses and statuses[NAME].state == "pending":
|
pending_status = any( # find NAME status in pending state
|
||||||
|
True
|
||||||
|
for status in statuses
|
||||||
|
if status.context == NAME and status.state == "pending"
|
||||||
|
)
|
||||||
|
if pending_status:
|
||||||
commit.create_status(
|
commit.create_status(
|
||||||
context=NAME,
|
context=NAME,
|
||||||
description="All checks finished",
|
description="All checks finished",
|
||||||
|
@ -7,6 +7,7 @@ import os
|
|||||||
import subprocess
|
import subprocess
|
||||||
import sys
|
import sys
|
||||||
import atexit
|
import atexit
|
||||||
|
from typing import List, Tuple
|
||||||
|
|
||||||
from github import Github
|
from github import Github
|
||||||
|
|
||||||
@ -122,8 +123,11 @@ def get_tests_to_run(pr_info):
|
|||||||
return list(result)
|
return list(result)
|
||||||
|
|
||||||
|
|
||||||
def process_results(result_folder, server_log_path):
|
def process_results(
|
||||||
test_results = []
|
result_folder: str,
|
||||||
|
server_log_path: str,
|
||||||
|
) -> Tuple[str, str, List[Tuple[str, str]], List[str]]:
|
||||||
|
test_results = [] # type: List[Tuple[str, str]]
|
||||||
additional_files = []
|
additional_files = []
|
||||||
# Just upload all files from result_folder.
|
# Just upload all files from result_folder.
|
||||||
# If task provides processed results, then it's responsible for content of result_folder.
|
# If task provides processed results, then it's responsible for content of result_folder.
|
||||||
@ -166,7 +170,7 @@ def process_results(result_folder, server_log_path):
|
|||||||
return "error", "Not found test_results.tsv", test_results, additional_files
|
return "error", "Not found test_results.tsv", test_results, additional_files
|
||||||
|
|
||||||
with open(results_path, "r", encoding="utf-8") as results_file:
|
with open(results_path, "r", encoding="utf-8") as results_file:
|
||||||
test_results = list(csv.reader(results_file, delimiter="\t"))
|
test_results = list(csv.reader(results_file, delimiter="\t")) # type: ignore
|
||||||
if len(test_results) == 0:
|
if len(test_results) == 0:
|
||||||
return "error", "Empty test_results.tsv", test_results, additional_files
|
return "error", "Empty test_results.tsv", test_results, additional_files
|
||||||
|
|
||||||
@ -232,8 +236,8 @@ if __name__ == "__main__":
|
|||||||
sys.exit(0)
|
sys.exit(0)
|
||||||
|
|
||||||
if "RUN_BY_HASH_NUM" in os.environ:
|
if "RUN_BY_HASH_NUM" in os.environ:
|
||||||
run_by_hash_num = int(os.getenv("RUN_BY_HASH_NUM"))
|
run_by_hash_num = int(os.getenv("RUN_BY_HASH_NUM", "0"))
|
||||||
run_by_hash_total = int(os.getenv("RUN_BY_HASH_TOTAL"))
|
run_by_hash_total = int(os.getenv("RUN_BY_HASH_TOTAL", "0"))
|
||||||
check_name_with_group = (
|
check_name_with_group = (
|
||||||
check_name + f" [{run_by_hash_num + 1}/{run_by_hash_total}]"
|
check_name + f" [{run_by_hash_num + 1}/{run_by_hash_total}]"
|
||||||
)
|
)
|
||||||
|
@ -3,7 +3,7 @@
|
|||||||
import re
|
import re
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
import requests
|
import requests # type: ignore
|
||||||
|
|
||||||
CLICKHOUSE_TAGS_URL = "https://api.github.com/repos/ClickHouse/ClickHouse/tags"
|
CLICKHOUSE_TAGS_URL = "https://api.github.com/repos/ClickHouse/ClickHouse/tags"
|
||||||
VERSION_PATTERN = r"(v(?:\d+\.)?(?:\d+\.)?(?:\d+\.)?\d+-[a-zA-Z]*)"
|
VERSION_PATTERN = r"(v(?:\d+\.)?(?:\d+\.)?(?:\d+\.)?\d+-[a-zA-Z]*)"
|
||||||
|
@ -1,8 +1,17 @@
|
|||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
import logging
|
import logging
|
||||||
|
from dataclasses import dataclass
|
||||||
|
|
||||||
import boto3 # type: ignore
|
import boto3 # type: ignore
|
||||||
from github import Github # type: ignore
|
from github import Github
|
||||||
|
from github.AuthenticatedUser import AuthenticatedUser
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class Token:
|
||||||
|
user: AuthenticatedUser
|
||||||
|
value: str
|
||||||
|
rest: int
|
||||||
|
|
||||||
|
|
||||||
def get_parameter_from_ssm(name, decrypt=True, client=None):
|
def get_parameter_from_ssm(name, decrypt=True, client=None):
|
||||||
@ -19,7 +28,7 @@ def get_best_robot_token(token_prefix_env_name="github_robot_token_"):
|
|||||||
]
|
]
|
||||||
)["Parameters"]
|
)["Parameters"]
|
||||||
assert parameters
|
assert parameters
|
||||||
token = {"login": "", "value": "", "rest": 0}
|
token = None
|
||||||
|
|
||||||
for token_name in [p["Name"] for p in parameters]:
|
for token_name in [p["Name"] for p in parameters]:
|
||||||
value = get_parameter_from_ssm(token_name, True, client)
|
value = get_parameter_from_ssm(token_name, True, client)
|
||||||
@ -29,12 +38,15 @@ def get_best_robot_token(token_prefix_env_name="github_robot_token_"):
|
|||||||
user = gh.get_user()
|
user = gh.get_user()
|
||||||
rest, _ = gh.rate_limiting
|
rest, _ = gh.rate_limiting
|
||||||
logging.info("Get token with %s remaining requests", rest)
|
logging.info("Get token with %s remaining requests", rest)
|
||||||
if token["rest"] < rest:
|
if token is None:
|
||||||
token = {"user": user, "value": value, "rest": rest}
|
token = Token(user, value, rest)
|
||||||
|
continue
|
||||||
|
if token.rest < rest:
|
||||||
|
token.user, token.value, token.rest = user, value, rest
|
||||||
|
|
||||||
assert token["value"]
|
assert token
|
||||||
logging.info(
|
logging.info(
|
||||||
"User %s with %s remaining requests is used", token["user"].login, token["rest"]
|
"User %s with %s remaining requests is used", token.user.login, token.rest
|
||||||
)
|
)
|
||||||
|
|
||||||
return token["value"]
|
return token.value
|
||||||
|
@ -4,7 +4,7 @@ import logging
|
|||||||
import os.path as p
|
import os.path as p
|
||||||
import re
|
import re
|
||||||
import subprocess
|
import subprocess
|
||||||
from typing import List, Optional
|
from typing import Any, List, Optional
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@ -21,19 +21,19 @@ TWEAK = 1
|
|||||||
|
|
||||||
|
|
||||||
# Py 3.8 removeprefix and removesuffix
|
# Py 3.8 removeprefix and removesuffix
|
||||||
def removeprefix(string: str, prefix: str):
|
def removeprefix(string: str, prefix: str) -> str:
|
||||||
if string.startswith(prefix):
|
if string.startswith(prefix):
|
||||||
return string[len(prefix) :] # noqa: ignore E203, false positive
|
return string[len(prefix) :] # noqa: ignore E203, false positive
|
||||||
return string
|
return string
|
||||||
|
|
||||||
|
|
||||||
def removesuffix(string: str, suffix: str):
|
def removesuffix(string: str, suffix: str) -> str:
|
||||||
if string.endswith(suffix):
|
if string.endswith(suffix):
|
||||||
return string[: -len(suffix)]
|
return string[: -len(suffix)]
|
||||||
return string
|
return string
|
||||||
|
|
||||||
|
|
||||||
def commit(name: str):
|
def commit(name: str) -> str:
|
||||||
r = re.compile(SHA_REGEXP)
|
r = re.compile(SHA_REGEXP)
|
||||||
if not r.match(name):
|
if not r.match(name):
|
||||||
raise argparse.ArgumentTypeError(
|
raise argparse.ArgumentTypeError(
|
||||||
@ -42,7 +42,7 @@ def commit(name: str):
|
|||||||
return name
|
return name
|
||||||
|
|
||||||
|
|
||||||
def release_branch(name: str):
|
def release_branch(name: str) -> str:
|
||||||
r = re.compile(RELEASE_BRANCH_REGEXP)
|
r = re.compile(RELEASE_BRANCH_REGEXP)
|
||||||
if not r.match(name):
|
if not r.match(name):
|
||||||
raise argparse.ArgumentTypeError("release branch should be as 12.1")
|
raise argparse.ArgumentTypeError("release branch should be as 12.1")
|
||||||
@ -55,20 +55,23 @@ class Runner:
|
|||||||
def __init__(self, cwd: str = CWD):
|
def __init__(self, cwd: str = CWD):
|
||||||
self._cwd = cwd
|
self._cwd = cwd
|
||||||
|
|
||||||
def run(self, cmd: str, cwd: Optional[str] = None, **kwargs) -> str:
|
def run(self, cmd: str, cwd: Optional[str] = None, **kwargs: Any) -> str:
|
||||||
if cwd is None:
|
if cwd is None:
|
||||||
cwd = self.cwd
|
cwd = self.cwd
|
||||||
logger.debug("Running command: %s", cmd)
|
logger.debug("Running command: %s", cmd)
|
||||||
return subprocess.check_output(
|
output = str(
|
||||||
|
subprocess.check_output(
|
||||||
cmd, shell=True, cwd=cwd, encoding="utf-8", **kwargs
|
cmd, shell=True, cwd=cwd, encoding="utf-8", **kwargs
|
||||||
).strip()
|
).strip()
|
||||||
|
)
|
||||||
|
return output
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def cwd(self) -> str:
|
def cwd(self) -> str:
|
||||||
return self._cwd
|
return self._cwd
|
||||||
|
|
||||||
@cwd.setter
|
@cwd.setter
|
||||||
def cwd(self, value: str):
|
def cwd(self, value: str) -> None:
|
||||||
# Set _cwd only once, then set it to readonly
|
# Set _cwd only once, then set it to readonly
|
||||||
if self._cwd != CWD:
|
if self._cwd != CWD:
|
||||||
return
|
return
|
||||||
@ -139,7 +142,7 @@ class Git:
|
|||||||
)
|
)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def check_tag(value: str):
|
def check_tag(value: str) -> None:
|
||||||
if value == "":
|
if value == "":
|
||||||
return
|
return
|
||||||
if not Git._tag_pattern.match(value):
|
if not Git._tag_pattern.match(value):
|
||||||
@ -150,7 +153,7 @@ class Git:
|
|||||||
return self._latest_tag
|
return self._latest_tag
|
||||||
|
|
||||||
@latest_tag.setter
|
@latest_tag.setter
|
||||||
def latest_tag(self, value: str):
|
def latest_tag(self, value: str) -> None:
|
||||||
self.check_tag(value)
|
self.check_tag(value)
|
||||||
self._latest_tag = value
|
self._latest_tag = value
|
||||||
|
|
||||||
@ -159,7 +162,7 @@ class Git:
|
|||||||
return self._new_tag
|
return self._new_tag
|
||||||
|
|
||||||
@new_tag.setter
|
@new_tag.setter
|
||||||
def new_tag(self, value: str):
|
def new_tag(self, value: str) -> None:
|
||||||
self.check_tag(value)
|
self.check_tag(value)
|
||||||
self._new_tag = value
|
self._new_tag = value
|
||||||
|
|
||||||
|
@ -8,11 +8,18 @@ from time import sleep
|
|||||||
from typing import List, Optional, Tuple
|
from typing import List, Optional, Tuple
|
||||||
|
|
||||||
import github
|
import github
|
||||||
from github.GithubException import RateLimitExceededException
|
|
||||||
from github.Issue import Issue
|
# explicit reimport
|
||||||
from github.NamedUser import NamedUser
|
# pylint: disable=useless-import-alias
|
||||||
from github.PullRequest import PullRequest
|
from github.GithubException import (
|
||||||
from github.Repository import Repository
|
RateLimitExceededException as RateLimitExceededException,
|
||||||
|
)
|
||||||
|
from github.Issue import Issue as Issue
|
||||||
|
from github.NamedUser import NamedUser as NamedUser
|
||||||
|
from github.PullRequest import PullRequest as PullRequest
|
||||||
|
from github.Repository import Repository as Repository
|
||||||
|
|
||||||
|
# pylint: enable=useless-import-alias
|
||||||
|
|
||||||
CACHE_PATH = p.join(p.dirname(p.realpath(__file__)), "gh_cache")
|
CACHE_PATH = p.join(p.dirname(p.realpath(__file__)), "gh_cache")
|
||||||
|
|
||||||
@ -90,7 +97,7 @@ class GitHub(github.Github):
|
|||||||
raise exception
|
raise exception
|
||||||
|
|
||||||
# pylint: enable=signature-differs
|
# pylint: enable=signature-differs
|
||||||
def get_pulls_from_search(self, *args, **kwargs) -> PullRequests:
|
def get_pulls_from_search(self, *args, **kwargs) -> PullRequests: # type: ignore
|
||||||
"""The search api returns actually issues, so we need to fetch PullRequests"""
|
"""The search api returns actually issues, so we need to fetch PullRequests"""
|
||||||
issues = self.search_issues(*args, **kwargs)
|
issues = self.search_issues(*args, **kwargs)
|
||||||
repos = {}
|
repos = {}
|
||||||
@ -168,7 +175,7 @@ class GitHub(github.Github):
|
|||||||
self.dump(user, prfd) # type: ignore
|
self.dump(user, prfd) # type: ignore
|
||||||
return user
|
return user
|
||||||
|
|
||||||
def _get_cached(self, path: Path):
|
def _get_cached(self, path: Path): # type: ignore
|
||||||
with open(path, "rb") as ob_fd:
|
with open(path, "rb") as ob_fd:
|
||||||
return self.load(ob_fd) # type: ignore
|
return self.load(ob_fd) # type: ignore
|
||||||
|
|
||||||
@ -190,11 +197,11 @@ class GitHub(github.Github):
|
|||||||
return False, cached_obj
|
return False, cached_obj
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def cache_path(self):
|
def cache_path(self) -> Path:
|
||||||
return self._cache_path
|
return self._cache_path
|
||||||
|
|
||||||
@cache_path.setter
|
@cache_path.setter
|
||||||
def cache_path(self, value: str):
|
def cache_path(self, value: str) -> None:
|
||||||
self._cache_path = Path(value)
|
self._cache_path = Path(value)
|
||||||
if self._cache_path.exists():
|
if self._cache_path.exists():
|
||||||
assert self._cache_path.is_dir()
|
assert self._cache_path.is_dir()
|
||||||
@ -208,5 +215,6 @@ class GitHub(github.Github):
|
|||||||
return self._retries
|
return self._retries
|
||||||
|
|
||||||
@retries.setter
|
@retries.setter
|
||||||
def retries(self, value: int):
|
def retries(self, value: int) -> None:
|
||||||
|
assert isinstance(value, int)
|
||||||
self._retries = value
|
self._retries = value
|
||||||
|
@ -7,6 +7,7 @@ import logging
|
|||||||
import os
|
import os
|
||||||
import subprocess
|
import subprocess
|
||||||
import sys
|
import sys
|
||||||
|
from typing import List, Tuple
|
||||||
|
|
||||||
from github import Github
|
from github import Github
|
||||||
|
|
||||||
@ -87,8 +88,10 @@ def get_env_for_runner(build_path, repo_path, result_path, work_path):
|
|||||||
return my_env
|
return my_env
|
||||||
|
|
||||||
|
|
||||||
def process_results(result_folder):
|
def process_results(
|
||||||
test_results = []
|
result_folder: str,
|
||||||
|
) -> Tuple[str, str, List[Tuple[str, str]], List[str]]:
|
||||||
|
test_results = [] # type: List[Tuple[str, str]]
|
||||||
additional_files = []
|
additional_files = []
|
||||||
# Just upload all files from result_folder.
|
# Just upload all files from result_folder.
|
||||||
# If task provides processed results, then it's responsible for content of result_folder.
|
# If task provides processed results, then it's responsible for content of result_folder.
|
||||||
@ -115,7 +118,7 @@ def process_results(result_folder):
|
|||||||
results_path = os.path.join(result_folder, "test_results.tsv")
|
results_path = os.path.join(result_folder, "test_results.tsv")
|
||||||
if os.path.exists(results_path):
|
if os.path.exists(results_path):
|
||||||
with open(results_path, "r", encoding="utf-8") as results_file:
|
with open(results_path, "r", encoding="utf-8") as results_file:
|
||||||
test_results = list(csv.reader(results_file, delimiter="\t"))
|
test_results = list(csv.reader(results_file, delimiter="\t")) # type: ignore
|
||||||
if len(test_results) == 0:
|
if len(test_results) == 0:
|
||||||
return "error", "Empty test_results.tsv", test_results, additional_files
|
return "error", "Empty test_results.tsv", test_results, additional_files
|
||||||
|
|
||||||
@ -153,8 +156,8 @@ if __name__ == "__main__":
|
|||||||
validate_bugix_check = args.validate_bugfix
|
validate_bugix_check = args.validate_bugfix
|
||||||
|
|
||||||
if "RUN_BY_HASH_NUM" in os.environ:
|
if "RUN_BY_HASH_NUM" in os.environ:
|
||||||
run_by_hash_num = int(os.getenv("RUN_BY_HASH_NUM"))
|
run_by_hash_num = int(os.getenv("RUN_BY_HASH_NUM", "0"))
|
||||||
run_by_hash_total = int(os.getenv("RUN_BY_HASH_TOTAL"))
|
run_by_hash_total = int(os.getenv("RUN_BY_HASH_TOTAL", "0"))
|
||||||
check_name_with_group = (
|
check_name_with_group = (
|
||||||
check_name + f" [{run_by_hash_num + 1}/{run_by_hash_total}]"
|
check_name + f" [{run_by_hash_num + 1}/{run_by_hash_total}]"
|
||||||
)
|
)
|
||||||
|
@ -7,9 +7,9 @@ import sys
|
|||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
|
|
||||||
import boto3
|
import boto3 # type: ignore
|
||||||
|
import requests # type: ignore
|
||||||
from github import Github
|
from github import Github
|
||||||
import requests
|
|
||||||
|
|
||||||
from env_helper import REPO_COPY, TEMP_PATH, S3_BUILDS_BUCKET, S3_DOWNLOAD
|
from env_helper import REPO_COPY, TEMP_PATH, S3_BUILDS_BUCKET, S3_DOWNLOAD
|
||||||
from stopwatch import Stopwatch
|
from stopwatch import Stopwatch
|
||||||
|
@ -2,7 +2,7 @@
|
|||||||
import json
|
import json
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
from typing import Set
|
from typing import Dict, Set, Union
|
||||||
|
|
||||||
from unidiff import PatchSet # type: ignore
|
from unidiff import PatchSet # type: ignore
|
||||||
|
|
||||||
@ -16,6 +16,7 @@ from env_helper import (
|
|||||||
|
|
||||||
FORCE_TESTS_LABEL = "force tests"
|
FORCE_TESTS_LABEL = "force tests"
|
||||||
SKIP_MERGEABLE_CHECK_LABEL = "skip mergeable check"
|
SKIP_MERGEABLE_CHECK_LABEL = "skip mergeable check"
|
||||||
|
NeedsDataType = Dict[str, Dict[str, Union[str, Dict[str, str]]]]
|
||||||
|
|
||||||
DIFF_IN_DOCUMENTATION_EXT = [
|
DIFF_IN_DOCUMENTATION_EXT = [
|
||||||
".html",
|
".html",
|
||||||
@ -146,7 +147,7 @@ class PRInfo:
|
|||||||
self.body = github_event["pull_request"]["body"]
|
self.body = github_event["pull_request"]["body"]
|
||||||
self.labels = {
|
self.labels = {
|
||||||
label["name"] for label in github_event["pull_request"]["labels"]
|
label["name"] for label in github_event["pull_request"]["labels"]
|
||||||
}
|
} # type: Set[str]
|
||||||
|
|
||||||
self.user_login = github_event["pull_request"]["user"]["login"]
|
self.user_login = github_event["pull_request"]["user"]["login"]
|
||||||
self.user_orgs = set([])
|
self.user_orgs = set([])
|
||||||
@ -178,7 +179,7 @@ class PRInfo:
|
|||||||
if pull_request is None or pull_request["state"] == "closed":
|
if pull_request is None or pull_request["state"] == "closed":
|
||||||
# it's merged PR to master
|
# it's merged PR to master
|
||||||
self.number = 0
|
self.number = 0
|
||||||
self.labels = {}
|
self.labels = set()
|
||||||
self.pr_html_url = f"{repo_prefix}/commits/{ref}"
|
self.pr_html_url = f"{repo_prefix}/commits/{ref}"
|
||||||
self.base_ref = ref
|
self.base_ref = ref
|
||||||
self.base_name = self.repo_full_name
|
self.base_name = self.repo_full_name
|
||||||
@ -228,7 +229,7 @@ class PRInfo:
|
|||||||
print(json.dumps(github_event, sort_keys=True, indent=4))
|
print(json.dumps(github_event, sort_keys=True, indent=4))
|
||||||
self.sha = os.getenv("GITHUB_SHA")
|
self.sha = os.getenv("GITHUB_SHA")
|
||||||
self.number = 0
|
self.number = 0
|
||||||
self.labels = {}
|
self.labels = set()
|
||||||
repo_prefix = f"{GITHUB_SERVER_URL}/{GITHUB_REPOSITORY}"
|
repo_prefix = f"{GITHUB_SERVER_URL}/{GITHUB_REPOSITORY}"
|
||||||
self.task_url = GITHUB_RUN_URL
|
self.task_url = GITHUB_RUN_URL
|
||||||
self.commit_html_url = f"{repo_prefix}/commits/{self.sha}"
|
self.commit_html_url = f"{repo_prefix}/commits/{self.sha}"
|
||||||
|
@ -5,7 +5,7 @@ import logging
|
|||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
from collections import namedtuple
|
from collections import namedtuple
|
||||||
from typing import Dict, List, Tuple
|
from typing import Dict, List, Optional, Tuple
|
||||||
|
|
||||||
from artifactory import ArtifactorySaaSPath # type: ignore
|
from artifactory import ArtifactorySaaSPath # type: ignore
|
||||||
from build_download_helper import download_build_with_progress
|
from build_download_helper import download_build_with_progress
|
||||||
@ -14,7 +14,7 @@ from git_helper import TAG_REGEXP, commit, removeprefix, removesuffix
|
|||||||
|
|
||||||
|
|
||||||
# Necessary ENV variables
|
# Necessary ENV variables
|
||||||
def getenv(name: str, default: str = None):
|
def getenv(name: str, default: Optional[str] = None) -> str:
|
||||||
env = os.getenv(name, default)
|
env = os.getenv(name, default)
|
||||||
if env is not None:
|
if env is not None:
|
||||||
return env
|
return env
|
||||||
@ -62,7 +62,7 @@ class Packages:
|
|||||||
raise ValueError(f"{deb_pkg} not in {self.deb}")
|
raise ValueError(f"{deb_pkg} not in {self.deb}")
|
||||||
return removesuffix(deb_pkg, ".deb").split("_")[-1]
|
return removesuffix(deb_pkg, ".deb").split("_")[-1]
|
||||||
|
|
||||||
def replace_with_fallback(self, name: str):
|
def replace_with_fallback(self, name: str) -> None:
|
||||||
if name.endswith(".deb"):
|
if name.endswith(".deb"):
|
||||||
suffix = self.deb.pop(name)
|
suffix = self.deb.pop(name)
|
||||||
self.deb[self.fallback_to_all(name)] = self.fallback_to_all(suffix)
|
self.deb[self.fallback_to_all(name)] = self.fallback_to_all(suffix)
|
||||||
@ -80,7 +80,7 @@ class Packages:
|
|||||||
return os.path.join(TEMP_PATH, package_file)
|
return os.path.join(TEMP_PATH, package_file)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def fallback_to_all(url_or_name: str):
|
def fallback_to_all(url_or_name: str) -> str:
|
||||||
"""Until July 2022 we had clickhouse-server and clickhouse-client with
|
"""Until July 2022 we had clickhouse-server and clickhouse-client with
|
||||||
arch 'all'"""
|
arch 'all'"""
|
||||||
# deb
|
# deb
|
||||||
@ -111,7 +111,7 @@ class S3:
|
|||||||
self.force_download = force_download
|
self.force_download = force_download
|
||||||
self.packages = Packages(version)
|
self.packages = Packages(version)
|
||||||
|
|
||||||
def download_package(self, package_file: str, s3_path_suffix: str):
|
def download_package(self, package_file: str, s3_path_suffix: str) -> None:
|
||||||
path = Packages.path(package_file)
|
path = Packages.path(package_file)
|
||||||
fallback_path = Packages.fallback_to_all(path)
|
fallback_path = Packages.fallback_to_all(path)
|
||||||
if not self.force_download and (
|
if not self.force_download and (
|
||||||
@ -186,7 +186,12 @@ class Release:
|
|||||||
|
|
||||||
class Artifactory:
|
class Artifactory:
|
||||||
def __init__(
|
def __init__(
|
||||||
self, url: str, release: str, deb_repo="deb", rpm_repo="rpm", tgz_repo="tgz"
|
self,
|
||||||
|
url: str,
|
||||||
|
release: str,
|
||||||
|
deb_repo: str = "deb",
|
||||||
|
rpm_repo: str = "rpm",
|
||||||
|
tgz_repo: str = "tgz",
|
||||||
):
|
):
|
||||||
self._url = url
|
self._url = url
|
||||||
self._release = release
|
self._release = release
|
||||||
@ -196,7 +201,7 @@ class Artifactory:
|
|||||||
# check the credentials ENVs for early exit
|
# check the credentials ENVs for early exit
|
||||||
self.__path_helper("_deb", "")
|
self.__path_helper("_deb", "")
|
||||||
|
|
||||||
def deploy_deb(self, packages: Packages):
|
def deploy_deb(self, packages: Packages) -> None:
|
||||||
for package_file in packages.deb:
|
for package_file in packages.deb:
|
||||||
path = packages.path(package_file)
|
path = packages.path(package_file)
|
||||||
dist = self._release
|
dist = self._release
|
||||||
@ -212,13 +217,13 @@ class Artifactory:
|
|||||||
)
|
)
|
||||||
self.deb_path(package_file).deploy_deb(path, dist, comp, arch)
|
self.deb_path(package_file).deploy_deb(path, dist, comp, arch)
|
||||||
|
|
||||||
def deploy_rpm(self, packages: Packages):
|
def deploy_rpm(self, packages: Packages) -> None:
|
||||||
for package_file in packages.rpm:
|
for package_file in packages.rpm:
|
||||||
path = packages.path(package_file)
|
path = packages.path(package_file)
|
||||||
logging.info("Deploy %s to artifactory", path)
|
logging.info("Deploy %s to artifactory", path)
|
||||||
self.rpm_path(package_file).deploy_file(path)
|
self.rpm_path(package_file).deploy_file(path)
|
||||||
|
|
||||||
def deploy_tgz(self, packages: Packages):
|
def deploy_tgz(self, packages: Packages) -> None:
|
||||||
for package_file in packages.tgz:
|
for package_file in packages.tgz:
|
||||||
path = packages.path(package_file)
|
path = packages.path(package_file)
|
||||||
logging.info("Deploy %s to artifactory", path)
|
logging.info("Deploy %s to artifactory", path)
|
||||||
@ -316,19 +321,19 @@ def parse_args() -> argparse.Namespace:
|
|||||||
return args
|
return args
|
||||||
|
|
||||||
|
|
||||||
def process_deb(s3: S3, art_clients: List[Artifactory]):
|
def process_deb(s3: S3, art_clients: List[Artifactory]) -> None:
|
||||||
s3.download_deb()
|
s3.download_deb()
|
||||||
for art_client in art_clients:
|
for art_client in art_clients:
|
||||||
art_client.deploy_deb(s3.packages)
|
art_client.deploy_deb(s3.packages)
|
||||||
|
|
||||||
|
|
||||||
def process_rpm(s3: S3, art_clients: List[Artifactory]):
|
def process_rpm(s3: S3, art_clients: List[Artifactory]) -> None:
|
||||||
s3.download_rpm()
|
s3.download_rpm()
|
||||||
for art_client in art_clients:
|
for art_client in art_clients:
|
||||||
art_client.deploy_rpm(s3.packages)
|
art_client.deploy_rpm(s3.packages)
|
||||||
|
|
||||||
|
|
||||||
def process_tgz(s3: S3, art_clients: List[Artifactory]):
|
def process_tgz(s3: S3, art_clients: List[Artifactory]) -> None:
|
||||||
s3.download_tgz()
|
s3.download_tgz()
|
||||||
for art_client in art_clients:
|
for art_client in art_clients:
|
||||||
art_client.deploy_tgz(s3.packages)
|
art_client.deploy_tgz(s3.packages)
|
||||||
|
@ -11,7 +11,7 @@ On another hand, PyGithub is used for convenient getting commit's status from AP
|
|||||||
|
|
||||||
|
|
||||||
from contextlib import contextmanager
|
from contextlib import contextmanager
|
||||||
from typing import List, Optional
|
from typing import Any, Iterator, List, Literal, Optional
|
||||||
import argparse
|
import argparse
|
||||||
import logging
|
import logging
|
||||||
import subprocess
|
import subprocess
|
||||||
@ -48,7 +48,7 @@ class Repo:
|
|||||||
return self._url
|
return self._url
|
||||||
|
|
||||||
@url.setter
|
@url.setter
|
||||||
def url(self, protocol: str):
|
def url(self, protocol: str) -> None:
|
||||||
if protocol == "ssh":
|
if protocol == "ssh":
|
||||||
self._url = f"git@github.com:{self}.git"
|
self._url = f"git@github.com:{self}.git"
|
||||||
elif protocol == "https":
|
elif protocol == "https":
|
||||||
@ -68,17 +68,23 @@ class Release:
|
|||||||
CMAKE_PATH = get_abs_path(FILE_WITH_VERSION_PATH)
|
CMAKE_PATH = get_abs_path(FILE_WITH_VERSION_PATH)
|
||||||
CONTRIBUTORS_PATH = get_abs_path(GENERATED_CONTRIBUTORS)
|
CONTRIBUTORS_PATH = get_abs_path(GENERATED_CONTRIBUTORS)
|
||||||
|
|
||||||
def __init__(self, repo: Repo, release_commit: str, release_type: str):
|
def __init__(
|
||||||
|
self,
|
||||||
|
repo: Repo,
|
||||||
|
release_commit: str,
|
||||||
|
release_type: Literal["major", "minor", "patch"],
|
||||||
|
):
|
||||||
self.repo = repo
|
self.repo = repo
|
||||||
self._release_commit = ""
|
self._release_commit = ""
|
||||||
self.release_commit = release_commit
|
self.release_commit = release_commit
|
||||||
|
assert release_type in self.BIG + self.SMALL
|
||||||
self.release_type = release_type
|
self.release_type = release_type
|
||||||
self._git = git
|
self._git = git
|
||||||
self._version = get_version_from_repo(git=self._git)
|
self._version = get_version_from_repo(git=self._git)
|
||||||
self._release_branch = ""
|
self._release_branch = ""
|
||||||
self._rollback_stack = [] # type: List[str]
|
self._rollback_stack = [] # type: List[str]
|
||||||
|
|
||||||
def run(self, cmd: str, cwd: Optional[str] = None, **kwargs) -> str:
|
def run(self, cmd: str, cwd: Optional[str] = None, **kwargs: Any) -> str:
|
||||||
cwd_text = ""
|
cwd_text = ""
|
||||||
if cwd:
|
if cwd:
|
||||||
cwd_text = f" (CWD='{cwd}')"
|
cwd_text = f" (CWD='{cwd}')"
|
||||||
@ -153,7 +159,9 @@ class Release:
|
|||||||
|
|
||||||
self.check_commit_release_ready()
|
self.check_commit_release_ready()
|
||||||
|
|
||||||
def do(self, check_dirty: bool, check_branch: bool, with_release_branch: bool):
|
def do(
|
||||||
|
self, check_dirty: bool, check_branch: bool, with_release_branch: bool
|
||||||
|
) -> None:
|
||||||
self.check_prerequisites()
|
self.check_prerequisites()
|
||||||
|
|
||||||
if check_dirty:
|
if check_dirty:
|
||||||
@ -310,7 +318,7 @@ class Release:
|
|||||||
return self._version
|
return self._version
|
||||||
|
|
||||||
@version.setter
|
@version.setter
|
||||||
def version(self, version: ClickHouseVersion):
|
def version(self, version: ClickHouseVersion) -> None:
|
||||||
if not isinstance(version, ClickHouseVersion):
|
if not isinstance(version, ClickHouseVersion):
|
||||||
raise ValueError(f"version must be ClickHouseVersion, not {type(version)}")
|
raise ValueError(f"version must be ClickHouseVersion, not {type(version)}")
|
||||||
self._version = version
|
self._version = version
|
||||||
@ -320,7 +328,7 @@ class Release:
|
|||||||
return self._release_branch
|
return self._release_branch
|
||||||
|
|
||||||
@release_branch.setter
|
@release_branch.setter
|
||||||
def release_branch(self, branch: str):
|
def release_branch(self, branch: str) -> None:
|
||||||
self._release_branch = release_branch(branch)
|
self._release_branch = release_branch(branch)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@ -328,7 +336,7 @@ class Release:
|
|||||||
return self._release_commit
|
return self._release_commit
|
||||||
|
|
||||||
@release_commit.setter
|
@release_commit.setter
|
||||||
def release_commit(self, release_commit: str):
|
def release_commit(self, release_commit: str) -> None:
|
||||||
self._release_commit = commit(release_commit)
|
self._release_commit = commit(release_commit)
|
||||||
|
|
||||||
@contextmanager
|
@contextmanager
|
||||||
@ -367,7 +375,7 @@ class Release:
|
|||||||
yield
|
yield
|
||||||
|
|
||||||
@contextmanager
|
@contextmanager
|
||||||
def _bump_testing_version(self, helper_branch: str):
|
def _bump_testing_version(self, helper_branch: str) -> Iterator[None]:
|
||||||
self.read_version()
|
self.read_version()
|
||||||
self.version = self.version.update(self.release_type)
|
self.version = self.version.update(self.release_type)
|
||||||
self.version.with_description(VersionType.TESTING)
|
self.version.with_description(VersionType.TESTING)
|
||||||
@ -387,7 +395,7 @@ class Release:
|
|||||||
yield
|
yield
|
||||||
|
|
||||||
@contextmanager
|
@contextmanager
|
||||||
def _checkout(self, ref: str, with_checkout_back: bool = False):
|
def _checkout(self, ref: str, with_checkout_back: bool = False) -> Iterator[None]:
|
||||||
orig_ref = self._git.branch or self._git.sha
|
orig_ref = self._git.branch or self._git.sha
|
||||||
need_rollback = False
|
need_rollback = False
|
||||||
if ref not in (self._git.branch, self._git.sha):
|
if ref not in (self._git.branch, self._git.sha):
|
||||||
@ -406,7 +414,7 @@ class Release:
|
|||||||
self.run(rollback_cmd)
|
self.run(rollback_cmd)
|
||||||
|
|
||||||
@contextmanager
|
@contextmanager
|
||||||
def _create_branch(self, name: str, start_point: str = ""):
|
def _create_branch(self, name: str, start_point: str = "") -> Iterator[None]:
|
||||||
self.run(f"git branch {name} {start_point}")
|
self.run(f"git branch {name} {start_point}")
|
||||||
rollback_cmd = f"git branch -D {name}"
|
rollback_cmd = f"git branch -D {name}"
|
||||||
self._rollback_stack.append(rollback_cmd)
|
self._rollback_stack.append(rollback_cmd)
|
||||||
@ -418,7 +426,7 @@ class Release:
|
|||||||
raise
|
raise
|
||||||
|
|
||||||
@contextmanager
|
@contextmanager
|
||||||
def _create_gh_label(self, label: str, color_hex: str):
|
def _create_gh_label(self, label: str, color_hex: str) -> Iterator[None]:
|
||||||
# API call, https://docs.github.com/en/rest/reference/issues#create-a-label
|
# API call, https://docs.github.com/en/rest/reference/issues#create-a-label
|
||||||
self.run(
|
self.run(
|
||||||
f"gh api repos/{self.repo}/labels -f name={label} -f color={color_hex}"
|
f"gh api repos/{self.repo}/labels -f name={label} -f color={color_hex}"
|
||||||
@ -433,7 +441,7 @@ class Release:
|
|||||||
raise
|
raise
|
||||||
|
|
||||||
@contextmanager
|
@contextmanager
|
||||||
def _create_gh_release(self, as_prerelease: bool):
|
def _create_gh_release(self, as_prerelease: bool) -> Iterator[None]:
|
||||||
with self._create_tag():
|
with self._create_tag():
|
||||||
# Preserve tag if version is changed
|
# Preserve tag if version is changed
|
||||||
tag = self.version.describe
|
tag = self.version.describe
|
||||||
@ -468,7 +476,9 @@ class Release:
|
|||||||
raise
|
raise
|
||||||
|
|
||||||
@contextmanager
|
@contextmanager
|
||||||
def _push(self, ref: str, with_rollback_on_fail: bool = True, remote_ref: str = ""):
|
def _push(
|
||||||
|
self, ref: str, with_rollback_on_fail: bool = True, remote_ref: str = ""
|
||||||
|
) -> Iterator[None]:
|
||||||
if remote_ref == "":
|
if remote_ref == "":
|
||||||
remote_ref = ref
|
remote_ref = ref
|
||||||
|
|
||||||
|
@ -101,7 +101,7 @@ def _format_header(header, branch_name, branch_url=None):
|
|||||||
result = "ClickHouse " + result
|
result = "ClickHouse " + result
|
||||||
result += " for "
|
result += " for "
|
||||||
if branch_url:
|
if branch_url:
|
||||||
result += '<a href="{url}">{name}</a>'.format(url=branch_url, name=branch_name)
|
result += f'<a href="{branch_url}">{branch_name}</a>'
|
||||||
else:
|
else:
|
||||||
result += branch_name
|
result += branch_name
|
||||||
return result
|
return result
|
||||||
@ -140,9 +140,7 @@ def _get_html_url(url):
|
|||||||
if isinstance(url, tuple):
|
if isinstance(url, tuple):
|
||||||
href, name = url[0], _get_html_url_name(url)
|
href, name = url[0], _get_html_url_name(url)
|
||||||
if href and name:
|
if href and name:
|
||||||
return '<a href="{href}">{name}</a>'.format(
|
return f'<a href="{href}">{_get_html_url_name(url)}</a>'
|
||||||
href=href, name=_get_html_url_name(url)
|
|
||||||
)
|
|
||||||
return ""
|
return ""
|
||||||
|
|
||||||
|
|
||||||
@ -199,13 +197,7 @@ def create_test_html_report(
|
|||||||
num_fails = num_fails + 1
|
num_fails = num_fails + 1
|
||||||
is_fail_id = 'id="fail' + str(num_fails) + '" '
|
is_fail_id = 'id="fail' + str(num_fails) + '" '
|
||||||
|
|
||||||
row += (
|
row += f'<td {is_fail_id}style="{style}">{test_status}</td>'
|
||||||
"<td "
|
|
||||||
+ is_fail_id
|
|
||||||
+ 'style="{}">'.format(style)
|
|
||||||
+ test_status
|
|
||||||
+ "</td>"
|
|
||||||
)
|
|
||||||
|
|
||||||
if test_time is not None:
|
if test_time is not None:
|
||||||
row += "<td>" + test_time + "</td>"
|
row += "<td>" + test_time + "</td>"
|
||||||
@ -229,8 +221,8 @@ def create_test_html_report(
|
|||||||
if has_test_logs and not with_raw_logs:
|
if has_test_logs and not with_raw_logs:
|
||||||
headers.append("Logs")
|
headers.append("Logs")
|
||||||
|
|
||||||
headers = "".join(["<th>" + h + "</th>" for h in headers])
|
headers_html = "".join(["<th>" + h + "</th>" for h in headers])
|
||||||
test_part = HTML_TEST_PART.format(headers=headers, rows=rows_part)
|
test_part = HTML_TEST_PART.format(headers=headers_html, rows=rows_part)
|
||||||
else:
|
else:
|
||||||
test_part = ""
|
test_part = ""
|
||||||
|
|
||||||
@ -317,33 +309,33 @@ def create_build_html_report(
|
|||||||
build_results, build_logs_urls, artifact_urls_list
|
build_results, build_logs_urls, artifact_urls_list
|
||||||
):
|
):
|
||||||
row = "<tr>"
|
row = "<tr>"
|
||||||
row += "<td>{}</td>".format(build_result.compiler)
|
row += f"<td>{build_result.compiler}</td>"
|
||||||
if build_result.build_type:
|
if build_result.build_type:
|
||||||
row += "<td>{}</td>".format(build_result.build_type)
|
row += f"<td>{build_result.build_type}</td>"
|
||||||
else:
|
else:
|
||||||
row += "<td>{}</td>".format("relwithdebuginfo")
|
row += "<td>relwithdebuginfo</td>"
|
||||||
if build_result.sanitizer:
|
if build_result.sanitizer:
|
||||||
row += "<td>{}</td>".format(build_result.sanitizer)
|
row += f"<td>{build_result.sanitizer}</td>"
|
||||||
else:
|
else:
|
||||||
row += "<td>{}</td>".format("none")
|
row += "<td>none</td>"
|
||||||
|
|
||||||
row += "<td>{}</td>".format(build_result.libraries)
|
row += f"<td>{build_result.libraries}</td>"
|
||||||
|
|
||||||
if build_result.status:
|
if build_result.status:
|
||||||
style = _get_status_style(build_result.status)
|
style = _get_status_style(build_result.status)
|
||||||
row += '<td style="{}">{}</td>'.format(style, build_result.status)
|
row += f'<td style="{style}">{build_result.status}</td>'
|
||||||
else:
|
else:
|
||||||
style = _get_status_style("error")
|
style = _get_status_style("error")
|
||||||
row += '<td style="{}">{}</td>'.format(style, "error")
|
row += f'<td style="{style}">error</td>'
|
||||||
|
|
||||||
row += '<td><a href="{}">link</a></td>'.format(build_log_url)
|
row += f'<td><a href="{build_log_url}">link</a></td>'
|
||||||
|
|
||||||
if build_result.elapsed_seconds:
|
if build_result.elapsed_seconds:
|
||||||
delta = datetime.timedelta(seconds=build_result.elapsed_seconds)
|
delta = datetime.timedelta(seconds=build_result.elapsed_seconds)
|
||||||
else:
|
else:
|
||||||
delta = "unknown"
|
delta = "unknown" # type: ignore
|
||||||
|
|
||||||
row += "<td>{}</td>".format(str(delta))
|
row += f"<td>{delta}</td>"
|
||||||
|
|
||||||
links = ""
|
links = ""
|
||||||
link_separator = "<br/>"
|
link_separator = "<br/>"
|
||||||
@ -355,7 +347,7 @@ def create_build_html_report(
|
|||||||
links += link_separator
|
links += link_separator
|
||||||
if links:
|
if links:
|
||||||
links = links[: -len(link_separator)]
|
links = links[: -len(link_separator)]
|
||||||
row += "<td>{}</td>".format(links)
|
row += f"<td>{links}</td>"
|
||||||
|
|
||||||
row += "</tr>"
|
row += "</tr>"
|
||||||
rows += row
|
rows += row
|
||||||
|
@ -112,7 +112,7 @@ def should_run_checks_for_pr(pr_info: PRInfo) -> Tuple[bool, str, str]:
|
|||||||
return True, "No special conditions apply", "pending"
|
return True, "No special conditions apply", "pending"
|
||||||
|
|
||||||
|
|
||||||
def check_pr_description(pr_info) -> Tuple[str, str]:
|
def check_pr_description(pr_info: PRInfo) -> Tuple[str, str]:
|
||||||
lines = list(
|
lines = list(
|
||||||
map(lambda x: x.strip(), pr_info.body.split("\n") if pr_info.body else [])
|
map(lambda x: x.strip(), pr_info.body.split("\n") if pr_info.body else [])
|
||||||
)
|
)
|
||||||
|
0
tests/ci/runner_token_rotation_lambda/__init__.py
Normal file
0
tests/ci/runner_token_rotation_lambda/__init__.py
Normal file
@ -46,7 +46,7 @@ class S3Helper:
|
|||||||
self.host = host
|
self.host = host
|
||||||
self.download_host = download_host
|
self.download_host = download_host
|
||||||
|
|
||||||
def _upload_file_to_s3(self, bucket_name, file_path, s3_path):
|
def _upload_file_to_s3(self, bucket_name: str, file_path: str, s3_path: str) -> str:
|
||||||
logging.debug(
|
logging.debug(
|
||||||
"Start uploading %s to bucket=%s path=%s", file_path, bucket_name, s3_path
|
"Start uploading %s to bucket=%s path=%s", file_path, bucket_name, s3_path
|
||||||
)
|
)
|
||||||
@ -110,7 +110,7 @@ class S3Helper:
|
|||||||
url = f"{self.download_host}/{bucket_name}/{s3_path}"
|
url = f"{self.download_host}/{bucket_name}/{s3_path}"
|
||||||
return url.replace("+", "%2B").replace(" ", "%20")
|
return url.replace("+", "%2B").replace(" ", "%20")
|
||||||
|
|
||||||
def upload_test_report_to_s3(self, file_path, s3_path):
|
def upload_test_report_to_s3(self, file_path: str, s3_path: str) -> str:
|
||||||
if CI:
|
if CI:
|
||||||
return self._upload_file_to_s3(S3_TEST_REPORTS_BUCKET, file_path, s3_path)
|
return self._upload_file_to_s3(S3_TEST_REPORTS_BUCKET, file_path, s3_path)
|
||||||
else:
|
else:
|
||||||
@ -296,7 +296,7 @@ class S3Helper:
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def copy_file_to_local(bucket_name, file_path, s3_path):
|
def copy_file_to_local(bucket_name: str, file_path: str, s3_path: str) -> str:
|
||||||
local_path = os.path.abspath(
|
local_path = os.path.abspath(
|
||||||
os.path.join(RUNNER_TEMP, "s3", bucket_name, s3_path)
|
os.path.join(RUNNER_TEMP, "s3", bucket_name, s3_path)
|
||||||
)
|
)
|
||||||
|
@ -4,6 +4,7 @@ import logging
|
|||||||
import subprocess
|
import subprocess
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
|
from typing import List, Tuple
|
||||||
|
|
||||||
from github import Github
|
from github import Github
|
||||||
|
|
||||||
@ -137,7 +138,7 @@ if __name__ == "__main__":
|
|||||||
report_url = GITHUB_RUN_URL
|
report_url = GITHUB_RUN_URL
|
||||||
|
|
||||||
status = "success"
|
status = "success"
|
||||||
test_results = []
|
test_results = [] # type: List[Tuple[str, str]]
|
||||||
# Try to get status message saved by the SQLancer
|
# Try to get status message saved by the SQLancer
|
||||||
try:
|
try:
|
||||||
# with open(
|
# with open(
|
||||||
@ -145,7 +146,7 @@ if __name__ == "__main__":
|
|||||||
# ) as status_f:
|
# ) as status_f:
|
||||||
# status = status_f.readline().rstrip("\n")
|
# status = status_f.readline().rstrip("\n")
|
||||||
if os.path.exists(os.path.join(workspace_path, "server_crashed.log")):
|
if os.path.exists(os.path.join(workspace_path, "server_crashed.log")):
|
||||||
test_results.append("Server crashed", "FAIL")
|
test_results.append(("Server crashed", "FAIL"))
|
||||||
with open(
|
with open(
|
||||||
os.path.join(workspace_path, "summary.tsv"), "r", encoding="utf-8"
|
os.path.join(workspace_path, "summary.tsv"), "r", encoding="utf-8"
|
||||||
) as summary_f:
|
) as summary_f:
|
||||||
|
@ -5,6 +5,7 @@ import logging
|
|||||||
import subprocess
|
import subprocess
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
|
from typing import List, Tuple
|
||||||
|
|
||||||
from github import Github
|
from github import Github
|
||||||
|
|
||||||
@ -44,8 +45,10 @@ def get_run_command(
|
|||||||
return cmd
|
return cmd
|
||||||
|
|
||||||
|
|
||||||
def process_results(result_folder, server_log_path, run_log_path):
|
def process_results(
|
||||||
test_results = []
|
result_folder: str, server_log_path: str, run_log_path: str
|
||||||
|
) -> Tuple[str, str, List[Tuple[str, str]], List[str]]:
|
||||||
|
test_results = [] # type: List[Tuple[str, str]]
|
||||||
additional_files = []
|
additional_files = []
|
||||||
# Just upload all files from result_folder.
|
# Just upload all files from result_folder.
|
||||||
# If task provides processed results, then it's responsible for content
|
# If task provides processed results, then it's responsible for content
|
||||||
@ -89,7 +92,7 @@ def process_results(result_folder, server_log_path, run_log_path):
|
|||||||
|
|
||||||
results_path = os.path.join(result_folder, "test_results.tsv")
|
results_path = os.path.join(result_folder, "test_results.tsv")
|
||||||
with open(results_path, "r", encoding="utf-8") as results_file:
|
with open(results_path, "r", encoding="utf-8") as results_file:
|
||||||
test_results = list(csv.reader(results_file, delimiter="\t"))
|
test_results = list(csv.reader(results_file, delimiter="\t")) # type: ignore
|
||||||
if len(test_results) == 0:
|
if len(test_results) == 0:
|
||||||
raise Exception("Empty results")
|
raise Exception("Empty results")
|
||||||
|
|
||||||
|
@ -1,11 +1,13 @@
|
|||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
import argparse
|
import argparse
|
||||||
|
import atexit
|
||||||
import csv
|
import csv
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
import subprocess
|
import subprocess
|
||||||
import sys
|
import sys
|
||||||
import atexit
|
|
||||||
|
from typing import List, Tuple
|
||||||
|
|
||||||
|
|
||||||
from clickhouse_helper import (
|
from clickhouse_helper import (
|
||||||
@ -36,8 +38,10 @@ GIT_PREFIX = ( # All commits to remote are done as robot-clickhouse
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def process_result(result_folder):
|
def process_result(
|
||||||
test_results = []
|
result_folder: str,
|
||||||
|
) -> Tuple[str, str, List[Tuple[str, str]], List[str]]:
|
||||||
|
test_results = [] # type: List[Tuple[str, str]]
|
||||||
additional_files = []
|
additional_files = []
|
||||||
# Just upload all files from result_folder.
|
# Just upload all files from result_folder.
|
||||||
# If task provides processed results, then it's responsible
|
# If task provides processed results, then it's responsible
|
||||||
@ -64,7 +68,7 @@ def process_result(result_folder):
|
|||||||
try:
|
try:
|
||||||
results_path = os.path.join(result_folder, "test_results.tsv")
|
results_path = os.path.join(result_folder, "test_results.tsv")
|
||||||
with open(results_path, "r", encoding="utf-8") as fd:
|
with open(results_path, "r", encoding="utf-8") as fd:
|
||||||
test_results = list(csv.reader(fd, delimiter="\t"))
|
test_results = list(csv.reader(fd, delimiter="\t")) # type: ignore
|
||||||
if len(test_results) == 0:
|
if len(test_results) == 0:
|
||||||
raise Exception("Empty results")
|
raise Exception("Empty results")
|
||||||
|
|
||||||
@ -88,7 +92,7 @@ def parse_args():
|
|||||||
return parser.parse_args()
|
return parser.parse_args()
|
||||||
|
|
||||||
|
|
||||||
def checkout_head(pr_info: PRInfo):
|
def checkout_head(pr_info: PRInfo) -> None:
|
||||||
# It works ONLY for PRs, and only over ssh, so either
|
# It works ONLY for PRs, and only over ssh, so either
|
||||||
# ROBOT_CLICKHOUSE_SSH_KEY should be set or ssh-agent should work
|
# ROBOT_CLICKHOUSE_SSH_KEY should be set or ssh-agent should work
|
||||||
assert pr_info.number
|
assert pr_info.number
|
||||||
@ -108,7 +112,7 @@ def checkout_head(pr_info: PRInfo):
|
|||||||
git_runner(f"git checkout -f head-{pr_info.head_ref}")
|
git_runner(f"git checkout -f head-{pr_info.head_ref}")
|
||||||
|
|
||||||
|
|
||||||
def commit_push_staged(pr_info: PRInfo):
|
def commit_push_staged(pr_info: PRInfo) -> None:
|
||||||
# It works ONLY for PRs, and only over ssh, so either
|
# It works ONLY for PRs, and only over ssh, so either
|
||||||
# ROBOT_CLICKHOUSE_SSH_KEY should be set or ssh-agent should work
|
# ROBOT_CLICKHOUSE_SSH_KEY should be set or ssh-agent should work
|
||||||
assert pr_info.number
|
assert pr_info.number
|
||||||
|
0
tests/ci/team_keys_lambda/__init__.py
Normal file
0
tests/ci/team_keys_lambda/__init__.py
Normal file
@ -14,7 +14,7 @@ import boto3 # type: ignore
|
|||||||
class Keys(set):
|
class Keys(set):
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
super().__init__(*args, **kwargs)
|
super().__init__(*args, **kwargs)
|
||||||
self.updated_at = 0
|
self.updated_at = 0.0
|
||||||
|
|
||||||
def update_now(self):
|
def update_now(self):
|
||||||
self.updated_at = datetime.now().timestamp()
|
self.updated_at = datetime.now().timestamp()
|
||||||
@ -88,7 +88,7 @@ def get_token_from_aws() -> str:
|
|||||||
)
|
)
|
||||||
get_secret_value_response = client.get_secret_value(SecretId=secret_name)
|
get_secret_value_response = client.get_secret_value(SecretId=secret_name)
|
||||||
data = json.loads(get_secret_value_response["SecretString"])
|
data = json.loads(get_secret_value_response["SecretString"])
|
||||||
return data["clickhouse_robot_token"]
|
return data["clickhouse_robot_token"] # type: ignore
|
||||||
|
|
||||||
|
|
||||||
def main(token: str, org: str, team_slug: str) -> str:
|
def main(token: str, org: str, team_slug: str) -> str:
|
||||||
|
@ -3,6 +3,7 @@
|
|||||||
from subprocess import Popen, PIPE, STDOUT
|
from subprocess import Popen, PIPE, STDOUT
|
||||||
from threading import Thread
|
from threading import Thread
|
||||||
from time import sleep
|
from time import sleep
|
||||||
|
from typing import Optional
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
@ -18,7 +19,7 @@ class TeePopen:
|
|||||||
self.command = command
|
self.command = command
|
||||||
self.log_file = log_file
|
self.log_file = log_file
|
||||||
self.env = env
|
self.env = env
|
||||||
self.process = None
|
self._process = None # type: Optional[Popen]
|
||||||
self.timeout = timeout
|
self.timeout = timeout
|
||||||
|
|
||||||
def _check_timeout(self):
|
def _check_timeout(self):
|
||||||
@ -51,7 +52,7 @@ class TeePopen:
|
|||||||
return self
|
return self
|
||||||
|
|
||||||
def __exit__(self, t, value, traceback):
|
def __exit__(self, t, value, traceback):
|
||||||
for line in self.process.stdout:
|
for line in self.process.stdout: # type: ignore
|
||||||
sys.stdout.write(line)
|
sys.stdout.write(line)
|
||||||
self.log_file.write(line)
|
self.log_file.write(line)
|
||||||
|
|
||||||
@ -59,8 +60,18 @@ class TeePopen:
|
|||||||
self.log_file.close()
|
self.log_file.close()
|
||||||
|
|
||||||
def wait(self):
|
def wait(self):
|
||||||
for line in self.process.stdout:
|
for line in self.process.stdout: # type: ignore
|
||||||
sys.stdout.write(line)
|
sys.stdout.write(line)
|
||||||
self.log_file.write(line)
|
self.log_file.write(line)
|
||||||
|
|
||||||
return self.process.wait()
|
return self.process.wait()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def process(self) -> Popen:
|
||||||
|
if self._process is not None:
|
||||||
|
return self._process
|
||||||
|
raise AttributeError("process is not created yet")
|
||||||
|
|
||||||
|
@process.setter
|
||||||
|
def process(self, process: Popen) -> None:
|
||||||
|
self._process = process
|
||||||
|
0
tests/ci/terminate_runner_lambda/__init__.py
Normal file
0
tests/ci/terminate_runner_lambda/__init__.py
Normal file
@ -1,17 +1,18 @@
|
|||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
import requests
|
|
||||||
import argparse
|
import argparse
|
||||||
import jwt
|
|
||||||
import sys
|
import sys
|
||||||
import json
|
import json
|
||||||
import time
|
import time
|
||||||
from collections import namedtuple
|
from collections import namedtuple
|
||||||
|
from typing import Any, Dict, List, Tuple
|
||||||
|
|
||||||
|
import boto3 # type: ignore
|
||||||
|
import requests # type: ignore
|
||||||
|
import jwt
|
||||||
|
|
||||||
|
|
||||||
def get_key_and_app_from_aws():
|
def get_key_and_app_from_aws() -> Tuple[str, int]:
|
||||||
import boto3
|
|
||||||
|
|
||||||
secret_name = "clickhouse_github_secret_key"
|
secret_name = "clickhouse_github_secret_key"
|
||||||
session = boto3.session.Session()
|
session = boto3.session.Session()
|
||||||
client = session.client(
|
client = session.client(
|
||||||
@ -22,7 +23,7 @@ def get_key_and_app_from_aws():
|
|||||||
return data["clickhouse-app-key"], int(data["clickhouse-app-id"])
|
return data["clickhouse-app-key"], int(data["clickhouse-app-id"])
|
||||||
|
|
||||||
|
|
||||||
def get_installation_id(jwt_token):
|
def get_installation_id(jwt_token: str) -> int:
|
||||||
headers = {
|
headers = {
|
||||||
"Authorization": f"Bearer {jwt_token}",
|
"Authorization": f"Bearer {jwt_token}",
|
||||||
"Accept": "application/vnd.github.v3+json",
|
"Accept": "application/vnd.github.v3+json",
|
||||||
@ -33,10 +34,12 @@ def get_installation_id(jwt_token):
|
|||||||
for installation in data:
|
for installation in data:
|
||||||
if installation["account"]["login"] == "ClickHouse":
|
if installation["account"]["login"] == "ClickHouse":
|
||||||
installation_id = installation["id"]
|
installation_id = installation["id"]
|
||||||
return installation_id
|
break
|
||||||
|
|
||||||
|
return installation_id # type: ignore
|
||||||
|
|
||||||
|
|
||||||
def get_access_token(jwt_token, installation_id):
|
def get_access_token(jwt_token: str, installation_id: int) -> str:
|
||||||
headers = {
|
headers = {
|
||||||
"Authorization": f"Bearer {jwt_token}",
|
"Authorization": f"Bearer {jwt_token}",
|
||||||
"Accept": "application/vnd.github.v3+json",
|
"Accept": "application/vnd.github.v3+json",
|
||||||
@ -47,15 +50,16 @@ def get_access_token(jwt_token, installation_id):
|
|||||||
)
|
)
|
||||||
response.raise_for_status()
|
response.raise_for_status()
|
||||||
data = response.json()
|
data = response.json()
|
||||||
return data["token"]
|
return data["token"] # type: ignore
|
||||||
|
|
||||||
|
|
||||||
RunnerDescription = namedtuple(
|
RunnerDescription = namedtuple(
|
||||||
"RunnerDescription", ["id", "name", "tags", "offline", "busy"]
|
"RunnerDescription", ["id", "name", "tags", "offline", "busy"]
|
||||||
)
|
)
|
||||||
|
RunnerDescriptions = List[RunnerDescription]
|
||||||
|
|
||||||
|
|
||||||
def list_runners(access_token):
|
def list_runners(access_token: str) -> RunnerDescriptions:
|
||||||
headers = {
|
headers = {
|
||||||
"Authorization": f"token {access_token}",
|
"Authorization": f"token {access_token}",
|
||||||
"Accept": "application/vnd.github.v3+json",
|
"Accept": "application/vnd.github.v3+json",
|
||||||
@ -94,9 +98,9 @@ def list_runners(access_token):
|
|||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
def how_many_instances_to_kill(event_data):
|
def how_many_instances_to_kill(event_data: dict) -> Dict[str, int]:
|
||||||
data_array = event_data["CapacityToTerminate"]
|
data_array = event_data["CapacityToTerminate"]
|
||||||
to_kill_by_zone = {}
|
to_kill_by_zone = {} # type: Dict[str, int]
|
||||||
for av_zone in data_array:
|
for av_zone in data_array:
|
||||||
zone_name = av_zone["AvailabilityZone"]
|
zone_name = av_zone["AvailabilityZone"]
|
||||||
to_kill = av_zone["Capacity"]
|
to_kill = av_zone["Capacity"]
|
||||||
@ -104,15 +108,16 @@ def how_many_instances_to_kill(event_data):
|
|||||||
to_kill_by_zone[zone_name] = 0
|
to_kill_by_zone[zone_name] = 0
|
||||||
|
|
||||||
to_kill_by_zone[zone_name] += to_kill
|
to_kill_by_zone[zone_name] += to_kill
|
||||||
|
|
||||||
return to_kill_by_zone
|
return to_kill_by_zone
|
||||||
|
|
||||||
|
|
||||||
def get_candidates_to_be_killed(event_data):
|
def get_candidates_to_be_killed(event_data: dict) -> Dict[str, List[str]]:
|
||||||
data_array = event_data["Instances"]
|
data_array = event_data["Instances"]
|
||||||
instances_by_zone = {}
|
instances_by_zone = {} # type: Dict[str, List[str]]
|
||||||
for instance in data_array:
|
for instance in data_array:
|
||||||
zone_name = instance["AvailabilityZone"]
|
zone_name = instance["AvailabilityZone"]
|
||||||
instance_id = instance["InstanceId"]
|
instance_id = instance["InstanceId"] # type: str
|
||||||
if zone_name not in instances_by_zone:
|
if zone_name not in instances_by_zone:
|
||||||
instances_by_zone[zone_name] = []
|
instances_by_zone[zone_name] = []
|
||||||
instances_by_zone[zone_name].append(instance_id)
|
instances_by_zone[zone_name].append(instance_id)
|
||||||
@ -120,7 +125,7 @@ def get_candidates_to_be_killed(event_data):
|
|||||||
return instances_by_zone
|
return instances_by_zone
|
||||||
|
|
||||||
|
|
||||||
def delete_runner(access_token, runner):
|
def delete_runner(access_token: str, runner: RunnerDescription) -> bool:
|
||||||
headers = {
|
headers = {
|
||||||
"Authorization": f"token {access_token}",
|
"Authorization": f"token {access_token}",
|
||||||
"Accept": "application/vnd.github.v3+json",
|
"Accept": "application/vnd.github.v3+json",
|
||||||
@ -134,10 +139,12 @@ def delete_runner(access_token, runner):
|
|||||||
print(
|
print(
|
||||||
f"Response code deleting {runner.name} with id {runner.id} is {response.status_code}"
|
f"Response code deleting {runner.name} with id {runner.id} is {response.status_code}"
|
||||||
)
|
)
|
||||||
return response.status_code == 204
|
return bool(response.status_code == 204)
|
||||||
|
|
||||||
|
|
||||||
def main(github_secret_key, github_app_id, event):
|
def main(
|
||||||
|
github_secret_key: str, github_app_id: int, event: dict
|
||||||
|
) -> Dict[str, List[str]]:
|
||||||
print("Got event", json.dumps(event, sort_keys=True, indent=4))
|
print("Got event", json.dumps(event, sort_keys=True, indent=4))
|
||||||
to_kill_by_zone = how_many_instances_to_kill(event)
|
to_kill_by_zone = how_many_instances_to_kill(event)
|
||||||
instances_by_zone = get_candidates_to_be_killed(event)
|
instances_by_zone = get_candidates_to_be_killed(event)
|
||||||
@ -156,17 +163,16 @@ def main(github_secret_key, github_app_id, event):
|
|||||||
|
|
||||||
to_delete_runners = []
|
to_delete_runners = []
|
||||||
instances_to_kill = []
|
instances_to_kill = []
|
||||||
for zone in to_kill_by_zone:
|
for zone, num_to_kill in to_kill_by_zone.items():
|
||||||
num_to_kill = to_kill_by_zone[zone]
|
|
||||||
candidates = instances_by_zone[zone]
|
candidates = instances_by_zone[zone]
|
||||||
if num_to_kill > len(candidates):
|
if num_to_kill > len(candidates):
|
||||||
raise Exception(
|
raise Exception(
|
||||||
f"Required to kill {num_to_kill}, but have only {len(candidates)} candidates in AV {zone}"
|
f"Required to kill {num_to_kill}, but have only {len(candidates)} candidates in AV {zone}"
|
||||||
)
|
)
|
||||||
|
|
||||||
delete_for_av = []
|
delete_for_av = [] # type: RunnerDescriptions
|
||||||
for candidate in candidates:
|
for candidate in candidates:
|
||||||
if candidate not in set([runner.name for runner in runners]):
|
if candidate not in set(runner.name for runner in runners):
|
||||||
print(
|
print(
|
||||||
f"Candidate {candidate} was not in runners list, simply delete it"
|
f"Candidate {candidate} was not in runners list, simply delete it"
|
||||||
)
|
)
|
||||||
@ -214,7 +220,7 @@ def main(github_secret_key, github_app_id, event):
|
|||||||
return response
|
return response
|
||||||
|
|
||||||
|
|
||||||
def handler(event, context):
|
def handler(event: dict, context: Any) -> Dict[str, List[str]]:
|
||||||
private_key, app_id = get_key_and_app_from_aws()
|
private_key, app_id = get_key_and_app_from_aws()
|
||||||
return main(private_key, app_id, event)
|
return main(private_key, app_id, event)
|
||||||
|
|
||||||
|
@ -5,6 +5,7 @@ import os
|
|||||||
import sys
|
import sys
|
||||||
import subprocess
|
import subprocess
|
||||||
import atexit
|
import atexit
|
||||||
|
from typing import List, Tuple
|
||||||
|
|
||||||
from github import Github
|
from github import Github
|
||||||
|
|
||||||
@ -37,14 +38,16 @@ def get_test_name(line):
|
|||||||
raise Exception(f"No test name in line '{line}'")
|
raise Exception(f"No test name in line '{line}'")
|
||||||
|
|
||||||
|
|
||||||
def process_result(result_folder):
|
def process_results(
|
||||||
|
result_folder: str,
|
||||||
|
) -> Tuple[str, str, List[Tuple[str, str]], List[str]]:
|
||||||
OK_SIGN = "OK ]"
|
OK_SIGN = "OK ]"
|
||||||
FAILED_SIGN = "FAILED ]"
|
FAILED_SIGN = "FAILED ]"
|
||||||
SEGFAULT = "Segmentation fault"
|
SEGFAULT = "Segmentation fault"
|
||||||
SIGNAL = "received signal SIG"
|
SIGNAL = "received signal SIG"
|
||||||
PASSED = "PASSED"
|
PASSED = "PASSED"
|
||||||
|
|
||||||
summary = []
|
summary = [] # type: List[Tuple[str, str]]
|
||||||
total_counter = 0
|
total_counter = 0
|
||||||
failed_counter = 0
|
failed_counter = 0
|
||||||
result_log_path = f"{result_folder}/test_result.txt"
|
result_log_path = f"{result_folder}/test_result.txt"
|
||||||
@ -151,7 +154,7 @@ if __name__ == "__main__":
|
|||||||
subprocess.check_call(f"sudo chown -R ubuntu:ubuntu {temp_path}", shell=True)
|
subprocess.check_call(f"sudo chown -R ubuntu:ubuntu {temp_path}", shell=True)
|
||||||
|
|
||||||
s3_helper = S3Helper()
|
s3_helper = S3Helper()
|
||||||
state, description, test_results, additional_logs = process_result(test_output)
|
state, description, test_results, additional_logs = process_results(test_output)
|
||||||
|
|
||||||
ch_helper = ClickHouseHelper()
|
ch_helper = ClickHouseHelper()
|
||||||
mark_flaky_tests(ch_helper, check_name, test_results)
|
mark_flaky_tests(ch_helper, check_name, test_results)
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
import os
|
import os
|
||||||
import logging
|
import logging
|
||||||
import ast
|
|
||||||
|
from typing import List, Tuple
|
||||||
|
|
||||||
from env_helper import (
|
from env_helper import (
|
||||||
GITHUB_JOB_URL,
|
GITHUB_JOB_URL,
|
||||||
@ -9,35 +10,15 @@ from env_helper import (
|
|||||||
GITHUB_SERVER_URL,
|
GITHUB_SERVER_URL,
|
||||||
)
|
)
|
||||||
from report import ReportColorTheme, create_test_html_report
|
from report import ReportColorTheme, create_test_html_report
|
||||||
|
from s3_helper import S3Helper
|
||||||
|
|
||||||
|
|
||||||
def process_logs(
|
def process_logs(
|
||||||
s3_client, additional_logs, s3_path_prefix, test_results, with_raw_logs
|
s3_client: S3Helper, additional_logs: List[str], s3_path_prefix: str
|
||||||
):
|
) -> List[str]:
|
||||||
logging.info("Upload files to s3 %s", additional_logs)
|
logging.info("Upload files to s3 %s", additional_logs)
|
||||||
|
|
||||||
processed_logs = {}
|
additional_urls = [] # type: List[str]
|
||||||
# Firstly convert paths of logs from test_results to urls to s3.
|
|
||||||
for test_result in test_results:
|
|
||||||
if len(test_result) <= 3 or with_raw_logs:
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Convert from string repr of list to list.
|
|
||||||
test_log_paths = ast.literal_eval(test_result[3])
|
|
||||||
test_log_urls = []
|
|
||||||
for log_path in test_log_paths:
|
|
||||||
if log_path in processed_logs:
|
|
||||||
test_log_urls.append(processed_logs[log_path])
|
|
||||||
elif log_path:
|
|
||||||
url = s3_client.upload_test_report_to_s3(
|
|
||||||
log_path, s3_path_prefix + "/" + os.path.basename(log_path)
|
|
||||||
)
|
|
||||||
test_log_urls.append(url)
|
|
||||||
processed_logs[log_path] = url
|
|
||||||
|
|
||||||
test_result[3] = test_log_urls
|
|
||||||
|
|
||||||
additional_urls = []
|
|
||||||
for log_path in additional_logs:
|
for log_path in additional_logs:
|
||||||
if log_path:
|
if log_path:
|
||||||
additional_urls.append(
|
additional_urls.append(
|
||||||
@ -50,21 +31,18 @@ def process_logs(
|
|||||||
|
|
||||||
|
|
||||||
def upload_results(
|
def upload_results(
|
||||||
s3_client,
|
s3_client: S3Helper,
|
||||||
pr_number,
|
pr_number: int,
|
||||||
commit_sha,
|
commit_sha: str,
|
||||||
test_results,
|
test_results: List[Tuple[str, str]],
|
||||||
additional_files,
|
additional_files: List[str],
|
||||||
check_name,
|
check_name: str,
|
||||||
with_raw_logs=True,
|
with_raw_logs: bool = True,
|
||||||
statuscolors=None,
|
) -> str:
|
||||||
):
|
|
||||||
s3_path_prefix = f"{pr_number}/{commit_sha}/" + check_name.lower().replace(
|
s3_path_prefix = f"{pr_number}/{commit_sha}/" + check_name.lower().replace(
|
||||||
" ", "_"
|
" ", "_"
|
||||||
).replace("(", "_").replace(")", "_").replace(",", "_")
|
).replace("(", "_").replace(")", "_").replace(",", "_")
|
||||||
additional_urls = process_logs(
|
additional_urls = process_logs(s3_client, additional_files, s3_path_prefix)
|
||||||
s3_client, additional_files, s3_path_prefix, test_results, with_raw_logs
|
|
||||||
)
|
|
||||||
|
|
||||||
branch_url = f"{GITHUB_SERVER_URL}/{GITHUB_REPOSITORY}/commits/master"
|
branch_url = f"{GITHUB_SERVER_URL}/{GITHUB_REPOSITORY}/commits/master"
|
||||||
branch_name = "master"
|
branch_name = "master"
|
||||||
|
@ -2,9 +2,9 @@
|
|||||||
import logging
|
import logging
|
||||||
import os.path as p
|
import os.path as p
|
||||||
from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter, ArgumentTypeError
|
from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter, ArgumentTypeError
|
||||||
from typing import Dict, List, Optional, Tuple, Union
|
from typing import Any, Dict, List, Literal, Optional, Tuple, Union
|
||||||
|
|
||||||
from git_helper import TWEAK, Git, get_tags, git_runner, removeprefix
|
from git_helper import TWEAK, Git as Git, get_tags, git_runner, removeprefix
|
||||||
|
|
||||||
FILE_WITH_VERSION_PATH = "cmake/autogenerated_versions.txt"
|
FILE_WITH_VERSION_PATH = "cmake/autogenerated_versions.txt"
|
||||||
CHANGELOG_IN_PATH = "debian/changelog.in"
|
CHANGELOG_IN_PATH = "debian/changelog.in"
|
||||||
@ -45,7 +45,7 @@ class ClickHouseVersion:
|
|||||||
patch: Union[int, str],
|
patch: Union[int, str],
|
||||||
revision: Union[int, str],
|
revision: Union[int, str],
|
||||||
git: Optional[Git],
|
git: Optional[Git],
|
||||||
tweak: str = None,
|
tweak: Optional[str] = None,
|
||||||
):
|
):
|
||||||
self._major = int(major)
|
self._major = int(major)
|
||||||
self._minor = int(minor)
|
self._minor = int(minor)
|
||||||
@ -59,10 +59,15 @@ class ClickHouseVersion:
|
|||||||
self._tweak = self._git.tweak
|
self._tweak = self._git.tweak
|
||||||
self._describe = ""
|
self._describe = ""
|
||||||
|
|
||||||
def update(self, part: str) -> "ClickHouseVersion":
|
def update(self, part: Literal["major", "minor", "patch"]) -> "ClickHouseVersion":
|
||||||
"""If part is valid, returns a new version"""
|
"""If part is valid, returns a new version"""
|
||||||
method = getattr(self, f"{part}_update")
|
if part == "major":
|
||||||
return method()
|
return self.major_update()
|
||||||
|
if part == "minor":
|
||||||
|
return self.minor_update()
|
||||||
|
if part == "patch":
|
||||||
|
return self.patch_update()
|
||||||
|
raise KeyError(f"wrong part {part} is used")
|
||||||
|
|
||||||
def major_update(self) -> "ClickHouseVersion":
|
def major_update(self) -> "ClickHouseVersion":
|
||||||
if self._git is not None:
|
if self._git is not None:
|
||||||
@ -139,10 +144,10 @@ class ClickHouseVersion:
|
|||||||
raise ValueError(f"version type {version_type} not in {VersionType.VALID}")
|
raise ValueError(f"version type {version_type} not in {VersionType.VALID}")
|
||||||
self._describe = f"v{self.string}-{version_type}"
|
self._describe = f"v{self.string}-{version_type}"
|
||||||
|
|
||||||
def __eq__(self, other) -> bool:
|
def __eq__(self, other: Any) -> bool:
|
||||||
if not isinstance(self, type(other)):
|
if not isinstance(self, type(other)):
|
||||||
return NotImplemented
|
return NotImplemented
|
||||||
return (
|
return bool(
|
||||||
self.major == other.major
|
self.major == other.major
|
||||||
and self.minor == other.minor
|
and self.minor == other.minor
|
||||||
and self.patch == other.patch
|
and self.patch == other.patch
|
||||||
@ -170,7 +175,7 @@ class VersionType:
|
|||||||
VALID = (TESTING, PRESTABLE, STABLE, LTS)
|
VALID = (TESTING, PRESTABLE, STABLE, LTS)
|
||||||
|
|
||||||
|
|
||||||
def validate_version(version: str):
|
def validate_version(version: str) -> None:
|
||||||
parts = version.split(".")
|
parts = version.split(".")
|
||||||
if len(parts) != 4:
|
if len(parts) != 4:
|
||||||
raise ValueError(f"{version} does not contain 4 parts")
|
raise ValueError(f"{version} does not contain 4 parts")
|
||||||
@ -259,7 +264,7 @@ def get_tagged_versions() -> List[ClickHouseVersion]:
|
|||||||
def update_cmake_version(
|
def update_cmake_version(
|
||||||
version: ClickHouseVersion,
|
version: ClickHouseVersion,
|
||||||
versions_path: str = FILE_WITH_VERSION_PATH,
|
versions_path: str = FILE_WITH_VERSION_PATH,
|
||||||
):
|
) -> None:
|
||||||
path_to_file = get_abs_path(versions_path)
|
path_to_file = get_abs_path(versions_path)
|
||||||
with open(path_to_file, "w", encoding="utf-8") as f:
|
with open(path_to_file, "w", encoding="utf-8") as f:
|
||||||
f.write(VERSIONS_TEMPLATE.format_map(version.as_dict()))
|
f.write(VERSIONS_TEMPLATE.format_map(version.as_dict()))
|
||||||
@ -269,7 +274,7 @@ def update_contributors(
|
|||||||
relative_contributors_path: str = GENERATED_CONTRIBUTORS,
|
relative_contributors_path: str = GENERATED_CONTRIBUTORS,
|
||||||
force: bool = False,
|
force: bool = False,
|
||||||
raise_error: bool = False,
|
raise_error: bool = False,
|
||||||
):
|
) -> None:
|
||||||
# Check if we have shallow checkout by comparing number of lines
|
# Check if we have shallow checkout by comparing number of lines
|
||||||
# '--is-shallow-repository' is in git since 2.15, 2017-10-30
|
# '--is-shallow-repository' is in git since 2.15, 2017-10-30
|
||||||
if git_runner.run("git rev-parse --is-shallow-repository") == "true" and not force:
|
if git_runner.run("git rev-parse --is-shallow-repository") == "true" and not force:
|
||||||
|
@ -17,9 +17,9 @@ class TestFunctions(unittest.TestCase):
|
|||||||
("v1.1.1.2-testing", vh.get_version_from_string("1.1.1.2")),
|
("v1.1.1.2-testing", vh.get_version_from_string("1.1.1.2")),
|
||||||
("refs/tags/v1.1.1.2-testing", vh.get_version_from_string("1.1.1.2")),
|
("refs/tags/v1.1.1.2-testing", vh.get_version_from_string("1.1.1.2")),
|
||||||
)
|
)
|
||||||
for case in cases:
|
for test_case in cases:
|
||||||
version = vh.version_arg(case[0])
|
version = vh.version_arg(test_case[0])
|
||||||
self.assertEqual(case[1], version)
|
self.assertEqual(test_case[1], version)
|
||||||
error_cases = (
|
error_cases = (
|
||||||
"0.0.0",
|
"0.0.0",
|
||||||
"1.1.1.a",
|
"1.1.1.a",
|
||||||
@ -28,6 +28,6 @@ class TestFunctions(unittest.TestCase):
|
|||||||
"v1.1.1.2-testin",
|
"v1.1.1.2-testin",
|
||||||
"refs/tags/v1.1.1.2-testin",
|
"refs/tags/v1.1.1.2-testin",
|
||||||
)
|
)
|
||||||
for case in error_cases:
|
for error_case in error_cases:
|
||||||
with self.assertRaises(ArgumentTypeError):
|
with self.assertRaises(ArgumentTypeError):
|
||||||
version = vh.version_arg(case[0])
|
version = vh.version_arg(error_case[0])
|
||||||
|
@ -313,7 +313,7 @@ def check_suspicious_changed_files(changed_files):
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
def approve_run(workflow_description: WorkflowDescription, token):
|
def approve_run(workflow_description: WorkflowDescription, token: str) -> None:
|
||||||
url = f"{workflow_description.api_url}/approve"
|
url = f"{workflow_description.api_url}/approve"
|
||||||
_exec_post_with_retry(url, token)
|
_exec_post_with_retry(url, token)
|
||||||
|
|
||||||
@ -391,7 +391,7 @@ def rerun_workflow(workflow_description, token):
|
|||||||
|
|
||||||
|
|
||||||
def check_workflow_completed(
|
def check_workflow_completed(
|
||||||
event_data, workflow_description: WorkflowDescription, token: str
|
event_data: dict, workflow_description: WorkflowDescription, token: str
|
||||||
) -> bool:
|
) -> bool:
|
||||||
if workflow_description.action == "completed":
|
if workflow_description.action == "completed":
|
||||||
attempt = 0
|
attempt = 0
|
||||||
|
23
utils/check-style/check-mypy
Executable file
23
utils/check-style/check-mypy
Executable file
@ -0,0 +1,23 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
|
# The mypy supports pyproject.toml, but unfortunately it doesn't support it recursively
|
||||||
|
# https://github.com/python/mypy/issues/10613
|
||||||
|
#
|
||||||
|
# Unless it's done, mypy only runs against tests/ci
|
||||||
|
# Let's leave here a room for improvement and redo it when mypy will test anything else
|
||||||
|
|
||||||
|
GIT_ROOT=$(git rev-parse --show-cdup)
|
||||||
|
GIT_ROOT=${GIT_ROOT:-.}
|
||||||
|
CONFIG="$GIT_ROOT/tests/ci/.mypy.ini"
|
||||||
|
DIRS=("$GIT_ROOT/tests/ci/" "$GIT_ROOT/tests/ci/"*/)
|
||||||
|
tmp=$(mktemp)
|
||||||
|
for dir in "${DIRS[@]}"; do
|
||||||
|
if ! compgen -G "$dir"/*.py > /dev/null; then
|
||||||
|
continue
|
||||||
|
fi
|
||||||
|
if ! mypy --config-file="$CONFIG" --sqlite-cache "$dir"/*.py > "$tmp" 2>&1; then
|
||||||
|
echo "Errors while processing $dir":
|
||||||
|
cat "$tmp"
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
rm -rf "$tmp"
|
Loading…
Reference in New Issue
Block a user