mirror of
https://github.com/ClickHouse/ClickHouse.git
synced 2024-12-17 20:02:05 +00:00
Merge pull request #43132 from ClickHouse/strict-typing-ci
Enable strict typing check in tests/ci
This commit is contained in:
commit
8897aa7fe8
@ -17,7 +17,7 @@ RUN apt-get update && env DEBIAN_FRONTEND=noninteractive apt-get install --yes \
|
||||
python3-pip \
|
||||
shellcheck \
|
||||
yamllint \
|
||||
&& pip3 install black==22.8.0 boto3 codespell==2.2.1 dohq-artifactory PyGithub unidiff pylint==2.6.2 \
|
||||
&& pip3 install black==22.8.0 boto3 codespell==2.2.1 dohq-artifactory mypy PyGithub unidiff pylint==2.6.2 \
|
||||
&& apt-get clean \
|
||||
&& rm -rf /root/.cache/pip
|
||||
|
||||
|
@ -11,17 +11,19 @@ def process_result(result_folder):
|
||||
description = ""
|
||||
test_results = []
|
||||
checks = (
|
||||
("header duplicates", "duplicate_output.txt"),
|
||||
("shellcheck", "shellcheck_output.txt"),
|
||||
("style", "style_output.txt"),
|
||||
("black", "black_output.txt"),
|
||||
("typos", "typos_output.txt"),
|
||||
("whitespaces", "whitespaces_output.txt"),
|
||||
("workflows", "workflows_output.txt"),
|
||||
("doc typos", "doc_spell_output.txt"),
|
||||
"duplicate includes",
|
||||
"shellcheck",
|
||||
"style",
|
||||
"black",
|
||||
"mypy",
|
||||
"typos",
|
||||
"whitespaces",
|
||||
"workflows",
|
||||
"docs spelling",
|
||||
)
|
||||
|
||||
for name, out_file in checks:
|
||||
for name in checks:
|
||||
out_file = name.replace(" ", "_") + "_output.txt"
|
||||
full_path = os.path.join(result_folder, out_file)
|
||||
if not os.path.exists(full_path):
|
||||
logging.info("No %s check log on path %s", name, full_path)
|
||||
|
@ -4,15 +4,17 @@
|
||||
|
||||
cd /ClickHouse/utils/check-style || echo -e "failure\tRepo not found" > /test_output/check_status.tsv
|
||||
echo "Check duplicates" | ts
|
||||
./check-duplicate-includes.sh |& tee /test_output/duplicate_output.txt
|
||||
./check-duplicate-includes.sh |& tee /test_output/duplicate_includes_output.txt
|
||||
echo "Check style" | ts
|
||||
./check-style -n |& tee /test_output/style_output.txt
|
||||
echo "Check python formatting with black" | ts
|
||||
./check-black -n |& tee /test_output/black_output.txt
|
||||
echo "Check python type hinting with mypy" | ts
|
||||
./check-mypy -n |& tee /test_output/mypy_output.txt
|
||||
echo "Check typos" | ts
|
||||
./check-typos |& tee /test_output/typos_output.txt
|
||||
echo "Check docs spelling" | ts
|
||||
./check-doc-aspell |& tee /test_output/doc_spell_output.txt
|
||||
./check-doc-aspell |& tee /test_output/docs_spelling_output.txt
|
||||
echo "Check whitespaces" | ts
|
||||
./check-whitespaces -n |& tee /test_output/whitespaces_output.txt
|
||||
echo "Check workflows" | ts
|
||||
|
16
tests/ci/.mypy.ini
Normal file
16
tests/ci/.mypy.ini
Normal file
@ -0,0 +1,16 @@
|
||||
[mypy]
|
||||
warn_no_return = False
|
||||
warn_unused_configs = True
|
||||
disallow_subclassing_any = True
|
||||
disallow_untyped_calls = False
|
||||
disallow_untyped_defs = False
|
||||
disallow_incomplete_defs = True
|
||||
check_untyped_defs = True
|
||||
disallow_untyped_decorators = True
|
||||
no_implicit_optional = True
|
||||
warn_redundant_casts = True
|
||||
warn_unused_ignores = True
|
||||
warn_return_any = True
|
||||
no_implicit_reexport = True
|
||||
strict_equality = True
|
||||
strict_concatenate = True
|
@ -121,7 +121,7 @@ def check_for_success_run(
|
||||
s3_prefix: str,
|
||||
build_name: str,
|
||||
build_config: BuildConfig,
|
||||
):
|
||||
) -> None:
|
||||
logged_prefix = os.path.join(S3_BUILDS_BUCKET, s3_prefix)
|
||||
logging.info("Checking for artifacts in %s", logged_prefix)
|
||||
try:
|
||||
@ -174,7 +174,7 @@ def create_json_artifact(
|
||||
build_config: BuildConfig,
|
||||
elapsed: int,
|
||||
success: bool,
|
||||
):
|
||||
) -> None:
|
||||
subprocess.check_call(
|
||||
f"echo 'BUILD_URLS=build_urls_{build_name}' >> $GITHUB_ENV", shell=True
|
||||
)
|
||||
@ -218,7 +218,7 @@ def upload_master_static_binaries(
|
||||
build_config: BuildConfig,
|
||||
s3_helper: S3Helper,
|
||||
build_output_path: str,
|
||||
):
|
||||
) -> None:
|
||||
"""Upload binary artifacts to a static S3 links"""
|
||||
static_binary_name = build_config.get("static_binary_name", False)
|
||||
if pr_info.number != 0:
|
||||
|
@ -5,7 +5,7 @@ import logging
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
from typing import List, Optional
|
||||
from typing import Any, List, Optional
|
||||
|
||||
import requests # type: ignore
|
||||
|
||||
@ -18,7 +18,7 @@ def get_with_retries(
|
||||
url: str,
|
||||
retries: int = DOWNLOAD_RETRIES_COUNT,
|
||||
sleep: int = 3,
|
||||
**kwargs,
|
||||
**kwargs: Any,
|
||||
) -> requests.Response:
|
||||
logging.info(
|
||||
"Getting URL with %i tries and sleep %i in between: %s", retries, sleep, url
|
||||
@ -41,18 +41,18 @@ def get_with_retries(
|
||||
return response
|
||||
|
||||
|
||||
def get_build_name_for_check(check_name) -> str:
|
||||
return CI_CONFIG["tests_config"][check_name]["required_build"]
|
||||
def get_build_name_for_check(check_name: str) -> str:
|
||||
return CI_CONFIG["tests_config"][check_name]["required_build"] # type: ignore
|
||||
|
||||
|
||||
def read_build_urls(build_name, reports_path) -> List[str]:
|
||||
def read_build_urls(build_name: str, reports_path: str) -> List[str]:
|
||||
for root, _, files in os.walk(reports_path):
|
||||
for f in files:
|
||||
if build_name in f:
|
||||
logging.info("Found build report json %s", f)
|
||||
with open(os.path.join(root, f), "r", encoding="utf-8") as file_handler:
|
||||
build_report = json.load(file_handler)
|
||||
return build_report["build_urls"]
|
||||
return build_report["build_urls"] # type: ignore
|
||||
return []
|
||||
|
||||
|
||||
|
@ -19,7 +19,7 @@ from env_helper import (
|
||||
from report import create_build_html_report
|
||||
from s3_helper import S3Helper
|
||||
from get_robot_token import get_best_robot_token
|
||||
from pr_info import PRInfo
|
||||
from pr_info import NeedsDataType, PRInfo
|
||||
from commit_status_helper import (
|
||||
get_commit,
|
||||
update_mergeable_check,
|
||||
@ -28,7 +28,7 @@ from ci_config import CI_CONFIG
|
||||
from rerun_helper import RerunHelper
|
||||
|
||||
|
||||
NEEDS_DATA_PATH = os.getenv("NEEDS_DATA_PATH")
|
||||
NEEDS_DATA_PATH = os.getenv("NEEDS_DATA_PATH", "")
|
||||
|
||||
|
||||
class BuildResult:
|
||||
@ -98,7 +98,7 @@ def get_failed_report(
|
||||
|
||||
|
||||
def process_report(
|
||||
build_report,
|
||||
build_report: dict,
|
||||
) -> Tuple[List[BuildResult], List[List[str]], List[str]]:
|
||||
build_config = build_report["build_config"]
|
||||
build_result = BuildResult(
|
||||
@ -144,16 +144,14 @@ def main():
|
||||
os.makedirs(temp_path)
|
||||
|
||||
build_check_name = sys.argv[1]
|
||||
needs_data = None
|
||||
needs_data = {} # type: NeedsDataType
|
||||
required_builds = 0
|
||||
if os.path.exists(NEEDS_DATA_PATH):
|
||||
with open(NEEDS_DATA_PATH, "rb") as file_handler:
|
||||
needs_data = json.load(file_handler)
|
||||
required_builds = len(needs_data)
|
||||
|
||||
if needs_data is not None and all(
|
||||
i["result"] == "skipped" for i in needs_data.values()
|
||||
):
|
||||
if needs_data and all(i["result"] == "skipped" for i in needs_data.values()):
|
||||
logging.info("All builds are skipped, exiting")
|
||||
sys.exit(0)
|
||||
|
||||
@ -218,19 +216,21 @@ def main():
|
||||
build_logs = []
|
||||
|
||||
for build_report in build_reports:
|
||||
build_result, build_artifacts_url, build_logs_url = process_report(build_report)
|
||||
logging.info(
|
||||
"Got %s artifact groups for build report report", len(build_result)
|
||||
_build_results, build_artifacts_url, build_logs_url = process_report(
|
||||
build_report
|
||||
)
|
||||
build_results.extend(build_result)
|
||||
logging.info(
|
||||
"Got %s artifact groups for build report report", len(_build_results)
|
||||
)
|
||||
build_results.extend(_build_results)
|
||||
build_artifacts.extend(build_artifacts_url)
|
||||
build_logs.extend(build_logs_url)
|
||||
|
||||
for failed_job in missing_build_names:
|
||||
build_result, build_artifacts_url, build_logs_url = get_failed_report(
|
||||
_build_results, build_artifacts_url, build_logs_url = get_failed_report(
|
||||
failed_job
|
||||
)
|
||||
build_results.extend(build_result)
|
||||
build_results.extend(_build_results)
|
||||
build_artifacts.extend(build_artifacts_url)
|
||||
build_logs.extend(build_logs_url)
|
||||
|
||||
|
@ -106,7 +106,7 @@ def _exec_get_with_retry(url: str, token: str) -> dict:
|
||||
try:
|
||||
response = requests.get(url, headers=headers)
|
||||
response.raise_for_status()
|
||||
return response.json()
|
||||
return response.json() # type: ignore
|
||||
except Exception as ex:
|
||||
print("Got exception executing request", ex)
|
||||
time.sleep(i + 1)
|
||||
@ -130,8 +130,7 @@ WorkflowDescription = namedtuple(
|
||||
|
||||
|
||||
def get_workflows_description_for_pull_request(
|
||||
pull_request_event,
|
||||
token,
|
||||
pull_request_event: dict, token: str
|
||||
) -> List[WorkflowDescription]:
|
||||
head_repo = pull_request_event["head"]["repo"]["full_name"]
|
||||
head_branch = pull_request_event["head"]["ref"]
|
||||
@ -193,7 +192,7 @@ def get_workflows_description_for_pull_request(
|
||||
|
||||
|
||||
def get_workflow_description_fallback(
|
||||
pull_request_event, token
|
||||
pull_request_event: dict, token: str
|
||||
) -> List[WorkflowDescription]:
|
||||
head_repo = pull_request_event["head"]["repo"]["full_name"]
|
||||
head_branch = pull_request_event["head"]["ref"]
|
||||
@ -241,7 +240,7 @@ def get_workflow_description_fallback(
|
||||
WorkflowDescription(
|
||||
url=wf["url"],
|
||||
run_id=wf["id"],
|
||||
name=workflow["name"],
|
||||
name=wf["name"],
|
||||
head_sha=wf["head_sha"],
|
||||
status=wf["status"],
|
||||
rerun_url=wf["rerun_url"],
|
||||
@ -254,7 +253,7 @@ def get_workflow_description_fallback(
|
||||
return workflow_descriptions
|
||||
|
||||
|
||||
def get_workflow_description(workflow_url, token) -> WorkflowDescription:
|
||||
def get_workflow_description(workflow_url: str, token: str) -> WorkflowDescription:
|
||||
workflow = _exec_get_with_retry(workflow_url, token)
|
||||
return WorkflowDescription(
|
||||
url=workflow["url"],
|
||||
@ -331,7 +330,7 @@ def main(event):
|
||||
workflow_descriptions
|
||||
or get_workflow_description_fallback(pull_request, token)
|
||||
)
|
||||
workflow_descriptions.sort(key=lambda x: x.run_id)
|
||||
workflow_descriptions.sort(key=lambda x: x.run_id) # type: ignore
|
||||
most_recent_workflow = workflow_descriptions[-1]
|
||||
if (
|
||||
most_recent_workflow.status == "completed"
|
||||
@ -376,7 +375,7 @@ def main(event):
|
||||
print("Not found any workflows")
|
||||
return
|
||||
|
||||
workflow_descriptions.sort(key=lambda x: x.run_id)
|
||||
workflow_descriptions.sort(key=lambda x: x.run_id) # type: ignore
|
||||
most_recent_workflow = workflow_descriptions[-1]
|
||||
print("Latest workflow", most_recent_workflow)
|
||||
if (
|
||||
|
@ -92,7 +92,8 @@ Merge it only if you intend to backport changes to the target branch, otherwise
|
||||
if branch_updated:
|
||||
self._backported = True
|
||||
|
||||
def pop_prs(self, prs: PullRequests):
|
||||
def pop_prs(self, prs: PullRequests) -> None:
|
||||
"""the method processes all prs and pops the ReleaseBranch related prs"""
|
||||
to_pop = [] # type: List[int]
|
||||
for i, pr in enumerate(prs):
|
||||
if self.name not in pr.head.ref:
|
||||
@ -105,14 +106,14 @@ Merge it only if you intend to backport changes to the target branch, otherwise
|
||||
to_pop.append(i)
|
||||
else:
|
||||
logging.error(
|
||||
"PR #%s doesn't head ref starting with known suffix",
|
||||
"head ref of PR #%s isn't starting with known suffix",
|
||||
pr.number,
|
||||
)
|
||||
for i in reversed(to_pop):
|
||||
# Going from the tail to keep the order and pop greater index first
|
||||
prs.pop(i)
|
||||
|
||||
def process(self, dry_run: bool):
|
||||
def process(self, dry_run: bool) -> None:
|
||||
if self.backported:
|
||||
return
|
||||
if not self.cherrypick_pr:
|
||||
@ -209,6 +210,7 @@ Merge it only if you intend to backport changes to the target branch, otherwise
|
||||
self._assign_new_pr(self.cherrypick_pr)
|
||||
|
||||
def create_backport(self):
|
||||
assert self.cherrypick_pr is not None
|
||||
# Checkout the backport branch from the remote and make all changes to
|
||||
# apply like they are only one cherry-pick commit on top of release
|
||||
git_runner(f"{self.git_prefix} checkout -f {self.backport_branch}")
|
||||
@ -239,7 +241,7 @@ Merge it only if you intend to backport changes to the target branch, otherwise
|
||||
self.backport_pr.add_to_labels(Labels.BACKPORT)
|
||||
self._assign_new_pr(self.backport_pr)
|
||||
|
||||
def _assign_new_pr(self, new_pr: PullRequest):
|
||||
def _assign_new_pr(self, new_pr: PullRequest) -> None:
|
||||
"""Assign `new_pr` to author, merger and assignees of an original PR"""
|
||||
# It looks there some race when multiple .add_to_assignees are executed,
|
||||
# so we'll add all at once
|
||||
@ -340,7 +342,7 @@ class Backport:
|
||||
)
|
||||
self.error = e
|
||||
|
||||
def process_pr(self, pr: PullRequest):
|
||||
def process_pr(self, pr: PullRequest) -> None:
|
||||
pr_labels = [label.name for label in pr.labels]
|
||||
if Labels.MUST_BACKPORT in pr_labels:
|
||||
branches = [
|
||||
@ -403,7 +405,7 @@ class Backport:
|
||||
# And check it after the running
|
||||
self.mark_pr_backported(pr)
|
||||
|
||||
def mark_pr_backported(self, pr: PullRequest):
|
||||
def mark_pr_backported(self, pr: PullRequest) -> None:
|
||||
if self.dry_run:
|
||||
logging.info("DRY RUN: would mark PR #%s as done", pr.number)
|
||||
return
|
||||
@ -488,7 +490,8 @@ def main():
|
||||
|
||||
gh = GitHub(token, per_page=100)
|
||||
bp = Backport(gh, args.repo, args.dry_run)
|
||||
bp.gh.cache_path = str(f"{TEMP_PATH}/gh_cache")
|
||||
# https://github.com/python/mypy/issues/3004
|
||||
bp.gh.cache_path = f"{TEMP_PATH}/gh_cache" # type: ignore
|
||||
bp.receive_release_prs()
|
||||
bp.receive_prs_for_backport()
|
||||
bp.process_backports()
|
||||
|
@ -12,11 +12,12 @@ import json
|
||||
import time
|
||||
from collections import namedtuple
|
||||
from datetime import datetime
|
||||
from typing import Dict, List, Tuple
|
||||
|
||||
import jwt
|
||||
import requests
|
||||
import boto3
|
||||
from botocore.exceptions import ClientError
|
||||
import requests # type: ignore
|
||||
import boto3 # type: ignore
|
||||
from botocore.exceptions import ClientError # type: ignore
|
||||
|
||||
UNIVERSAL_LABEL = "universal"
|
||||
RUNNER_TYPE_LABELS = [
|
||||
@ -29,8 +30,13 @@ RUNNER_TYPE_LABELS = [
|
||||
"style-checker-aarch64",
|
||||
]
|
||||
|
||||
RunnerDescription = namedtuple(
|
||||
"RunnerDescription", ["id", "name", "tags", "offline", "busy"]
|
||||
)
|
||||
RunnerDescriptions = List[RunnerDescription]
|
||||
|
||||
def get_dead_runners_in_ec2(runners):
|
||||
|
||||
def get_dead_runners_in_ec2(runners: RunnerDescriptions) -> RunnerDescriptions:
|
||||
ids = {
|
||||
runner.name: runner
|
||||
for runner in runners
|
||||
@ -92,7 +98,7 @@ def get_dead_runners_in_ec2(runners):
|
||||
return result_to_delete
|
||||
|
||||
|
||||
def get_lost_ec2_instances(runners):
|
||||
def get_lost_ec2_instances(runners: RunnerDescriptions) -> List[dict]:
|
||||
client = boto3.client("ec2")
|
||||
reservations = client.describe_instances(
|
||||
Filters=[{"Name": "tag-key", "Values": ["github:runner-type"]}]
|
||||
@ -130,7 +136,7 @@ def get_lost_ec2_instances(runners):
|
||||
return lost_instances
|
||||
|
||||
|
||||
def get_key_and_app_from_aws():
|
||||
def get_key_and_app_from_aws() -> Tuple[str, int]:
|
||||
secret_name = "clickhouse_github_secret_key"
|
||||
session = boto3.session.Session()
|
||||
client = session.client(
|
||||
@ -146,7 +152,7 @@ def handler(event, context):
|
||||
main(private_key, app_id, True, True)
|
||||
|
||||
|
||||
def get_installation_id(jwt_token):
|
||||
def get_installation_id(jwt_token: str) -> int:
|
||||
headers = {
|
||||
"Authorization": f"Bearer {jwt_token}",
|
||||
"Accept": "application/vnd.github.v3+json",
|
||||
@ -157,10 +163,12 @@ def get_installation_id(jwt_token):
|
||||
for installation in data:
|
||||
if installation["account"]["login"] == "ClickHouse":
|
||||
installation_id = installation["id"]
|
||||
return installation_id
|
||||
break
|
||||
|
||||
return installation_id # type: ignore
|
||||
|
||||
|
||||
def get_access_token(jwt_token, installation_id):
|
||||
def get_access_token(jwt_token: str, installation_id: int) -> str:
|
||||
headers = {
|
||||
"Authorization": f"Bearer {jwt_token}",
|
||||
"Accept": "application/vnd.github.v3+json",
|
||||
@ -171,15 +179,10 @@ def get_access_token(jwt_token, installation_id):
|
||||
)
|
||||
response.raise_for_status()
|
||||
data = response.json()
|
||||
return data["token"]
|
||||
return data["token"] # type: ignore
|
||||
|
||||
|
||||
RunnerDescription = namedtuple(
|
||||
"RunnerDescription", ["id", "name", "tags", "offline", "busy"]
|
||||
)
|
||||
|
||||
|
||||
def list_runners(access_token):
|
||||
def list_runners(access_token: str) -> RunnerDescriptions:
|
||||
headers = {
|
||||
"Authorization": f"token {access_token}",
|
||||
"Accept": "application/vnd.github.v3+json",
|
||||
@ -225,8 +228,10 @@ def list_runners(access_token):
|
||||
return result
|
||||
|
||||
|
||||
def group_runners_by_tag(listed_runners):
|
||||
result = {}
|
||||
def group_runners_by_tag(
|
||||
listed_runners: RunnerDescriptions,
|
||||
) -> Dict[str, RunnerDescriptions]:
|
||||
result = {} # type: Dict[str, RunnerDescriptions]
|
||||
|
||||
def add_to_result(tag, runner):
|
||||
if tag not in result:
|
||||
@ -248,7 +253,9 @@ def group_runners_by_tag(listed_runners):
|
||||
return result
|
||||
|
||||
|
||||
def push_metrics_to_cloudwatch(listed_runners, namespace):
|
||||
def push_metrics_to_cloudwatch(
|
||||
listed_runners: RunnerDescriptions, namespace: str
|
||||
) -> None:
|
||||
client = boto3.client("cloudwatch")
|
||||
metrics_data = []
|
||||
busy_runners = sum(
|
||||
@ -278,7 +285,7 @@ def push_metrics_to_cloudwatch(listed_runners, namespace):
|
||||
}
|
||||
)
|
||||
if total_active_runners == 0:
|
||||
busy_ratio = 100
|
||||
busy_ratio = 100.0
|
||||
else:
|
||||
busy_ratio = busy_runners / total_active_runners * 100
|
||||
|
||||
@ -293,7 +300,7 @@ def push_metrics_to_cloudwatch(listed_runners, namespace):
|
||||
client.put_metric_data(Namespace=namespace, MetricData=metrics_data)
|
||||
|
||||
|
||||
def delete_runner(access_token, runner):
|
||||
def delete_runner(access_token: str, runner: RunnerDescription) -> bool:
|
||||
headers = {
|
||||
"Authorization": f"token {access_token}",
|
||||
"Accept": "application/vnd.github.v3+json",
|
||||
@ -305,10 +312,15 @@ def delete_runner(access_token, runner):
|
||||
)
|
||||
response.raise_for_status()
|
||||
print(f"Response code deleting {runner.name} is {response.status_code}")
|
||||
return response.status_code == 204
|
||||
return bool(response.status_code == 204)
|
||||
|
||||
|
||||
def main(github_secret_key, github_app_id, push_to_cloudwatch, delete_offline_runners):
|
||||
def main(
|
||||
github_secret_key: str,
|
||||
github_app_id: int,
|
||||
push_to_cloudwatch: bool,
|
||||
delete_offline_runners: bool,
|
||||
) -> None:
|
||||
payload = {
|
||||
"iat": int(time.time()) - 60,
|
||||
"exp": int(time.time()) + (10 * 60),
|
||||
|
@ -7,14 +7,21 @@ import logging
|
||||
|
||||
from github import Github
|
||||
|
||||
from env_helper import IMAGES_PATH, REPO_COPY, S3_TEST_REPORTS_BUCKET, S3_DOWNLOAD
|
||||
from stopwatch import Stopwatch
|
||||
from upload_result_helper import upload_results
|
||||
from s3_helper import S3Helper
|
||||
from get_robot_token import get_best_robot_token
|
||||
from env_helper import (
|
||||
IMAGES_PATH,
|
||||
REPO_COPY,
|
||||
S3_DOWNLOAD,
|
||||
S3_TEST_REPORTS_BUCKET,
|
||||
TEMP_PATH,
|
||||
)
|
||||
from commit_status_helper import post_commit_status
|
||||
from docker_pull_helper import get_image_with_version
|
||||
from get_robot_token import get_best_robot_token
|
||||
from pr_info import PRInfo
|
||||
from s3_helper import S3Helper
|
||||
from stopwatch import Stopwatch
|
||||
from tee_popen import TeePopen
|
||||
from upload_result_helper import upload_results
|
||||
|
||||
NAME = "Woboq Build"
|
||||
|
||||
@ -33,17 +40,16 @@ if __name__ == "__main__":
|
||||
|
||||
stopwatch = Stopwatch()
|
||||
|
||||
temp_path = os.getenv("TEMP_PATH", os.path.abspath("."))
|
||||
|
||||
gh = Github(get_best_robot_token(), per_page=100)
|
||||
pr_info = PRInfo()
|
||||
|
||||
if not os.path.exists(temp_path):
|
||||
os.makedirs(temp_path)
|
||||
if not os.path.exists(TEMP_PATH):
|
||||
os.makedirs(TEMP_PATH)
|
||||
|
||||
docker_image = get_image_with_version(IMAGES_PATH, "clickhouse/codebrowser")
|
||||
s3_helper = S3Helper()
|
||||
|
||||
result_path = os.path.join(temp_path, "result_path")
|
||||
result_path = os.path.join(TEMP_PATH, "result_path")
|
||||
if not os.path.exists(result_path):
|
||||
os.makedirs(result_path)
|
||||
|
||||
@ -51,7 +57,7 @@ if __name__ == "__main__":
|
||||
|
||||
logging.info("Going to run codebrowser: %s", run_command)
|
||||
|
||||
run_log_path = os.path.join(temp_path, "runlog.log")
|
||||
run_log_path = os.path.join(TEMP_PATH, "runlog.log")
|
||||
|
||||
with TeePopen(run_command, run_log_path) as process:
|
||||
retcode = process.wait()
|
||||
@ -60,7 +66,7 @@ if __name__ == "__main__":
|
||||
else:
|
||||
logging.info("Run failed")
|
||||
|
||||
subprocess.check_call(f"sudo chown -R ubuntu:ubuntu {temp_path}", shell=True)
|
||||
subprocess.check_call(f"sudo chown -R ubuntu:ubuntu {TEMP_PATH}", shell=True)
|
||||
|
||||
report_path = os.path.join(result_path, "html_report")
|
||||
logging.info("Report path %s", report_path)
|
||||
@ -76,12 +82,8 @@ if __name__ == "__main__":
|
||||
|
||||
test_results = [(index_html, "Look at the report")]
|
||||
|
||||
report_url = upload_results(
|
||||
s3_helper, 0, os.getenv("GITHUB_SHA"), test_results, [], NAME
|
||||
)
|
||||
report_url = upload_results(s3_helper, 0, pr_info.sha, test_results, [], NAME)
|
||||
|
||||
print(f"::notice ::Report url: {report_url}")
|
||||
|
||||
post_commit_status(
|
||||
gh, os.getenv("GITHUB_SHA"), NAME, "Report built", "success", report_url
|
||||
)
|
||||
post_commit_status(gh, pr_info.sha, NAME, "Report built", "success", report_url)
|
||||
|
@ -17,7 +17,7 @@ RETRY = 5
|
||||
CommitStatuses = List[CommitStatus]
|
||||
|
||||
|
||||
def override_status(status: str, check_name: str, invert=False) -> str:
|
||||
def override_status(status: str, check_name: str, invert: bool = False) -> str:
|
||||
if CI_CONFIG["tests_config"].get(check_name, {}).get("force_tests", False):
|
||||
return "success"
|
||||
|
||||
@ -45,7 +45,7 @@ def get_commit(gh: Github, commit_sha: str, retry_count: int = RETRY) -> Commit:
|
||||
|
||||
def post_commit_status(
|
||||
gh: Github, sha: str, check_name: str, description: str, state: str, report_url: str
|
||||
):
|
||||
) -> None:
|
||||
for i in range(RETRY):
|
||||
try:
|
||||
commit = get_commit(gh, sha, 1)
|
||||
@ -64,7 +64,7 @@ def post_commit_status(
|
||||
|
||||
def post_commit_status_to_file(
|
||||
file_path: str, description: str, state: str, report_url: str
|
||||
):
|
||||
) -> None:
|
||||
if os.path.exists(file_path):
|
||||
raise Exception(f'File "{file_path}" already exists!')
|
||||
with open(file_path, "w", encoding="utf-8") as f:
|
||||
@ -88,21 +88,21 @@ def get_commit_filtered_statuses(commit: Commit) -> CommitStatuses:
|
||||
return list(filtered.values())
|
||||
|
||||
|
||||
def remove_labels(gh: Github, pr_info: PRInfo, labels_names: List[str]):
|
||||
def remove_labels(gh: Github, pr_info: PRInfo, labels_names: List[str]) -> None:
|
||||
repo = gh.get_repo(GITHUB_REPOSITORY)
|
||||
pull_request = repo.get_pull(pr_info.number)
|
||||
for label in labels_names:
|
||||
pull_request.remove_from_labels(label)
|
||||
|
||||
|
||||
def post_labels(gh: Github, pr_info: PRInfo, labels_names: List[str]):
|
||||
def post_labels(gh: Github, pr_info: PRInfo, labels_names: List[str]) -> None:
|
||||
repo = gh.get_repo(GITHUB_REPOSITORY)
|
||||
pull_request = repo.get_pull(pr_info.number)
|
||||
for label in labels_names:
|
||||
pull_request.add_to_labels(label)
|
||||
|
||||
|
||||
def fail_mergeable_check(commit: Commit, description: str):
|
||||
def fail_mergeable_check(commit: Commit, description: str) -> None:
|
||||
commit.create_status(
|
||||
context="Mergeable Check",
|
||||
description=description,
|
||||
@ -111,7 +111,7 @@ def fail_mergeable_check(commit: Commit, description: str):
|
||||
)
|
||||
|
||||
|
||||
def reset_mergeable_check(commit: Commit, description: str = ""):
|
||||
def reset_mergeable_check(commit: Commit, description: str = "") -> None:
|
||||
commit.create_status(
|
||||
context="Mergeable Check",
|
||||
description=description,
|
||||
@ -120,7 +120,7 @@ def reset_mergeable_check(commit: Commit, description: str = ""):
|
||||
)
|
||||
|
||||
|
||||
def update_mergeable_check(gh: Github, pr_info: PRInfo, check_name: str):
|
||||
def update_mergeable_check(gh: Github, pr_info: PRInfo, check_name: str) -> None:
|
||||
if SKIP_MERGEABLE_CHECK_LABEL in pr_info.labels:
|
||||
return
|
||||
|
||||
|
@ -8,7 +8,7 @@ import shutil
|
||||
import subprocess
|
||||
import time
|
||||
import sys
|
||||
from typing import Dict, List, Optional, Set, Tuple, Union
|
||||
from typing import Any, Dict, List, Optional, Set, Tuple, Union
|
||||
|
||||
from github import Github
|
||||
|
||||
@ -52,7 +52,7 @@ class DockerImage:
|
||||
and self.only_amd64 == other.only_amd64
|
||||
)
|
||||
|
||||
def __lt__(self, other) -> bool:
|
||||
def __lt__(self, other: Any) -> bool:
|
||||
if not isinstance(other, DockerImage):
|
||||
return False
|
||||
if self.parent and not other.parent:
|
||||
@ -270,7 +270,7 @@ def build_and_push_one_image(
|
||||
def process_single_image(
|
||||
image: DockerImage,
|
||||
versions: List[str],
|
||||
additional_cache,
|
||||
additional_cache: str,
|
||||
push: bool,
|
||||
child: bool,
|
||||
) -> List[Tuple[str, str, str]]:
|
||||
|
@ -70,7 +70,7 @@ def parse_args() -> argparse.Namespace:
|
||||
|
||||
def load_images(path: str, suffix: str) -> Images:
|
||||
with open(os.path.join(path, CHANGED_IMAGES.format(suffix)), "rb") as images:
|
||||
return json.load(images)
|
||||
return json.load(images) # type: ignore
|
||||
|
||||
|
||||
def strip_suffix(suffix: str, images: Images) -> Images:
|
||||
|
@ -6,11 +6,11 @@ import time
|
||||
import subprocess
|
||||
import logging
|
||||
|
||||
from typing import Optional
|
||||
from typing import List, Optional
|
||||
|
||||
|
||||
class DockerImage:
|
||||
def __init__(self, name, version: Optional[str] = None):
|
||||
def __init__(self, name: str, version: Optional[str] = None):
|
||||
self.name = name
|
||||
if version is None:
|
||||
self.version = "latest"
|
||||
@ -22,8 +22,11 @@ class DockerImage:
|
||||
|
||||
|
||||
def get_images_with_versions(
|
||||
reports_path, required_image, pull=True, version: Optional[str] = None
|
||||
):
|
||||
reports_path: str,
|
||||
required_images: List[str],
|
||||
pull: bool = True,
|
||||
version: Optional[str] = None,
|
||||
) -> List[DockerImage]:
|
||||
images_path = None
|
||||
for root, _, files in os.walk(reports_path):
|
||||
for f in files:
|
||||
@ -45,12 +48,13 @@ def get_images_with_versions(
|
||||
images = {}
|
||||
|
||||
docker_images = []
|
||||
for image_name in required_image:
|
||||
for image_name in required_images:
|
||||
docker_image = DockerImage(image_name, version)
|
||||
if image_name in images:
|
||||
docker_image.version = images[image_name]
|
||||
docker_images.append(docker_image)
|
||||
|
||||
latest_error = Exception("predefined to avoid access before created")
|
||||
if pull:
|
||||
for docker_image in docker_images:
|
||||
for i in range(10):
|
||||
@ -75,6 +79,8 @@ def get_images_with_versions(
|
||||
return docker_images
|
||||
|
||||
|
||||
def get_image_with_version(reports_path, image, pull=True, version=None):
|
||||
def get_image_with_version(
|
||||
reports_path: str, image: str, pull: bool = True, version: Optional[str] = None
|
||||
) -> DockerImage:
|
||||
logging.info("Looking for images file in %s", reports_path)
|
||||
return get_images_with_versions(reports_path, [image], pull, version=version)[0]
|
||||
|
@ -43,55 +43,55 @@ class TestDockerImageCheck(unittest.TestCase):
|
||||
"docker/test/stateless",
|
||||
"clickhouse/stateless-test",
|
||||
False,
|
||||
"clickhouse/test-base",
|
||||
"clickhouse/test-base", # type: ignore
|
||||
),
|
||||
di.DockerImage(
|
||||
"docker/test/integration/base",
|
||||
"clickhouse/integration-test",
|
||||
False,
|
||||
"clickhouse/test-base",
|
||||
"clickhouse/test-base", # type: ignore
|
||||
),
|
||||
di.DockerImage(
|
||||
"docker/test/fuzzer",
|
||||
"clickhouse/fuzzer",
|
||||
False,
|
||||
"clickhouse/test-base",
|
||||
"clickhouse/test-base", # type: ignore
|
||||
),
|
||||
di.DockerImage(
|
||||
"docker/test/keeper-jepsen",
|
||||
"clickhouse/keeper-jepsen-test",
|
||||
False,
|
||||
"clickhouse/test-base",
|
||||
"clickhouse/test-base", # type: ignore
|
||||
),
|
||||
di.DockerImage(
|
||||
"docker/docs/check",
|
||||
"clickhouse/docs-check",
|
||||
False,
|
||||
"clickhouse/docs-builder",
|
||||
"clickhouse/docs-builder", # type: ignore
|
||||
),
|
||||
di.DockerImage(
|
||||
"docker/docs/release",
|
||||
"clickhouse/docs-release",
|
||||
False,
|
||||
"clickhouse/docs-builder",
|
||||
"clickhouse/docs-builder", # type: ignore
|
||||
),
|
||||
di.DockerImage(
|
||||
"docker/test/stateful",
|
||||
"clickhouse/stateful-test",
|
||||
False,
|
||||
"clickhouse/stateless-test",
|
||||
"clickhouse/stateless-test", # type: ignore
|
||||
),
|
||||
di.DockerImage(
|
||||
"docker/test/unit",
|
||||
"clickhouse/unit-test",
|
||||
False,
|
||||
"clickhouse/stateless-test",
|
||||
"clickhouse/stateless-test", # type: ignore
|
||||
),
|
||||
di.DockerImage(
|
||||
"docker/test/stress",
|
||||
"clickhouse/stress-test",
|
||||
False,
|
||||
"clickhouse/stateful-test",
|
||||
"clickhouse/stateful-test", # type: ignore
|
||||
),
|
||||
]
|
||||
)
|
||||
@ -277,7 +277,7 @@ class TestDockerServer(unittest.TestCase):
|
||||
ds.gen_tags(version, "auto")
|
||||
|
||||
@patch("docker_server.get_tagged_versions")
|
||||
def test_auto_release_type(self, mock_tagged_versions: MagicMock):
|
||||
def test_auto_release_type(self, mock_tagged_versions: MagicMock) -> None:
|
||||
mock_tagged_versions.return_value = [
|
||||
get_version_from_string("1.1.1.1"),
|
||||
get_version_from_string("1.2.1.1"),
|
||||
|
@ -6,6 +6,7 @@ import os
|
||||
import csv
|
||||
import sys
|
||||
import atexit
|
||||
from typing import List, Tuple
|
||||
|
||||
from github import Github
|
||||
|
||||
@ -50,8 +51,10 @@ def get_fasttest_cmd(
|
||||
)
|
||||
|
||||
|
||||
def process_results(result_folder):
|
||||
test_results = []
|
||||
def process_results(
|
||||
result_folder: str,
|
||||
) -> Tuple[str, str, List[Tuple[str, str]], List[str]]:
|
||||
test_results = [] # type: List[Tuple[str, str]]
|
||||
additional_files = []
|
||||
# Just upload all files from result_folder.
|
||||
# If task provides processed results, then it's responsible for content of
|
||||
@ -78,7 +81,7 @@ def process_results(result_folder):
|
||||
results_path = os.path.join(result_folder, "test_results.tsv")
|
||||
if os.path.exists(results_path):
|
||||
with open(results_path, "r", encoding="utf-8") as results_file:
|
||||
test_results = list(csv.reader(results_file, delimiter="\t"))
|
||||
test_results = list(csv.reader(results_file, delimiter="\t")) # type: ignore
|
||||
if len(test_results) == 0:
|
||||
return "error", "Empty test_results.tsv", test_results, additional_files
|
||||
|
||||
@ -172,7 +175,7 @@ if __name__ == "__main__":
|
||||
"test_log.txt" in test_output_files or "test_result.txt" in test_output_files
|
||||
)
|
||||
test_result_exists = "test_results.tsv" in test_output_files
|
||||
test_results = []
|
||||
test_results = [] # type: List[Tuple[str, str]]
|
||||
if "submodule_log.txt" not in test_output_files:
|
||||
description = "Cannot clone repository"
|
||||
state = "failure"
|
||||
|
@ -5,27 +5,11 @@ from github import Github
|
||||
from env_helper import GITHUB_RUN_URL
|
||||
from pr_info import PRInfo
|
||||
from get_robot_token import get_best_robot_token
|
||||
from commit_status_helper import get_commit
|
||||
from commit_status_helper import get_commit, get_commit_filtered_statuses
|
||||
|
||||
NAME = "Run Check"
|
||||
|
||||
|
||||
def filter_statuses(statuses):
|
||||
"""
|
||||
Squash statuses to latest state
|
||||
1. context="first", state="success", update_time=1
|
||||
2. context="second", state="success", update_time=2
|
||||
3. context="first", stat="failure", update_time=3
|
||||
=========>
|
||||
1. context="second", state="success"
|
||||
2. context="first", stat="failure"
|
||||
"""
|
||||
filt = {}
|
||||
for status in sorted(statuses, key=lambda x: x.updated_at):
|
||||
filt[status.context] = status
|
||||
return filt
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
|
||||
@ -34,8 +18,13 @@ if __name__ == "__main__":
|
||||
commit = get_commit(gh, pr_info.sha)
|
||||
|
||||
url = GITHUB_RUN_URL
|
||||
statuses = filter_statuses(list(commit.get_statuses()))
|
||||
if NAME in statuses and statuses[NAME].state == "pending":
|
||||
statuses = get_commit_filtered_statuses(commit)
|
||||
pending_status = any( # find NAME status in pending state
|
||||
True
|
||||
for status in statuses
|
||||
if status.context == NAME and status.state == "pending"
|
||||
)
|
||||
if pending_status:
|
||||
commit.create_status(
|
||||
context=NAME,
|
||||
description="All checks finished",
|
||||
|
@ -7,6 +7,7 @@ import os
|
||||
import subprocess
|
||||
import sys
|
||||
import atexit
|
||||
from typing import List, Tuple
|
||||
|
||||
from github import Github
|
||||
|
||||
@ -122,8 +123,11 @@ def get_tests_to_run(pr_info):
|
||||
return list(result)
|
||||
|
||||
|
||||
def process_results(result_folder, server_log_path):
|
||||
test_results = []
|
||||
def process_results(
|
||||
result_folder: str,
|
||||
server_log_path: str,
|
||||
) -> Tuple[str, str, List[Tuple[str, str]], List[str]]:
|
||||
test_results = [] # type: List[Tuple[str, str]]
|
||||
additional_files = []
|
||||
# Just upload all files from result_folder.
|
||||
# If task provides processed results, then it's responsible for content of result_folder.
|
||||
@ -166,7 +170,7 @@ def process_results(result_folder, server_log_path):
|
||||
return "error", "Not found test_results.tsv", test_results, additional_files
|
||||
|
||||
with open(results_path, "r", encoding="utf-8") as results_file:
|
||||
test_results = list(csv.reader(results_file, delimiter="\t"))
|
||||
test_results = list(csv.reader(results_file, delimiter="\t")) # type: ignore
|
||||
if len(test_results) == 0:
|
||||
return "error", "Empty test_results.tsv", test_results, additional_files
|
||||
|
||||
@ -232,8 +236,8 @@ if __name__ == "__main__":
|
||||
sys.exit(0)
|
||||
|
||||
if "RUN_BY_HASH_NUM" in os.environ:
|
||||
run_by_hash_num = int(os.getenv("RUN_BY_HASH_NUM"))
|
||||
run_by_hash_total = int(os.getenv("RUN_BY_HASH_TOTAL"))
|
||||
run_by_hash_num = int(os.getenv("RUN_BY_HASH_NUM", "0"))
|
||||
run_by_hash_total = int(os.getenv("RUN_BY_HASH_TOTAL", "0"))
|
||||
check_name_with_group = (
|
||||
check_name + f" [{run_by_hash_num + 1}/{run_by_hash_total}]"
|
||||
)
|
||||
|
@ -3,7 +3,7 @@
|
||||
import re
|
||||
import logging
|
||||
|
||||
import requests
|
||||
import requests # type: ignore
|
||||
|
||||
CLICKHOUSE_TAGS_URL = "https://api.github.com/repos/ClickHouse/ClickHouse/tags"
|
||||
VERSION_PATTERN = r"(v(?:\d+\.)?(?:\d+\.)?(?:\d+\.)?\d+-[a-zA-Z]*)"
|
||||
|
@ -1,8 +1,17 @@
|
||||
#!/usr/bin/env python3
|
||||
import logging
|
||||
from dataclasses import dataclass
|
||||
|
||||
import boto3 # type: ignore
|
||||
from github import Github # type: ignore
|
||||
from github import Github
|
||||
from github.AuthenticatedUser import AuthenticatedUser
|
||||
|
||||
|
||||
@dataclass
|
||||
class Token:
|
||||
user: AuthenticatedUser
|
||||
value: str
|
||||
rest: int
|
||||
|
||||
|
||||
def get_parameter_from_ssm(name, decrypt=True, client=None):
|
||||
@ -19,7 +28,7 @@ def get_best_robot_token(token_prefix_env_name="github_robot_token_"):
|
||||
]
|
||||
)["Parameters"]
|
||||
assert parameters
|
||||
token = {"login": "", "value": "", "rest": 0}
|
||||
token = None
|
||||
|
||||
for token_name in [p["Name"] for p in parameters]:
|
||||
value = get_parameter_from_ssm(token_name, True, client)
|
||||
@ -29,12 +38,15 @@ def get_best_robot_token(token_prefix_env_name="github_robot_token_"):
|
||||
user = gh.get_user()
|
||||
rest, _ = gh.rate_limiting
|
||||
logging.info("Get token with %s remaining requests", rest)
|
||||
if token["rest"] < rest:
|
||||
token = {"user": user, "value": value, "rest": rest}
|
||||
if token is None:
|
||||
token = Token(user, value, rest)
|
||||
continue
|
||||
if token.rest < rest:
|
||||
token.user, token.value, token.rest = user, value, rest
|
||||
|
||||
assert token["value"]
|
||||
assert token
|
||||
logging.info(
|
||||
"User %s with %s remaining requests is used", token["user"].login, token["rest"]
|
||||
"User %s with %s remaining requests is used", token.user.login, token.rest
|
||||
)
|
||||
|
||||
return token["value"]
|
||||
return token.value
|
||||
|
@ -4,7 +4,7 @@ import logging
|
||||
import os.path as p
|
||||
import re
|
||||
import subprocess
|
||||
from typing import List, Optional
|
||||
from typing import Any, List, Optional
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@ -21,19 +21,19 @@ TWEAK = 1
|
||||
|
||||
|
||||
# Py 3.8 removeprefix and removesuffix
|
||||
def removeprefix(string: str, prefix: str):
|
||||
def removeprefix(string: str, prefix: str) -> str:
|
||||
if string.startswith(prefix):
|
||||
return string[len(prefix) :] # noqa: ignore E203, false positive
|
||||
return string
|
||||
|
||||
|
||||
def removesuffix(string: str, suffix: str):
|
||||
def removesuffix(string: str, suffix: str) -> str:
|
||||
if string.endswith(suffix):
|
||||
return string[: -len(suffix)]
|
||||
return string
|
||||
|
||||
|
||||
def commit(name: str):
|
||||
def commit(name: str) -> str:
|
||||
r = re.compile(SHA_REGEXP)
|
||||
if not r.match(name):
|
||||
raise argparse.ArgumentTypeError(
|
||||
@ -42,7 +42,7 @@ def commit(name: str):
|
||||
return name
|
||||
|
||||
|
||||
def release_branch(name: str):
|
||||
def release_branch(name: str) -> str:
|
||||
r = re.compile(RELEASE_BRANCH_REGEXP)
|
||||
if not r.match(name):
|
||||
raise argparse.ArgumentTypeError("release branch should be as 12.1")
|
||||
@ -55,20 +55,23 @@ class Runner:
|
||||
def __init__(self, cwd: str = CWD):
|
||||
self._cwd = cwd
|
||||
|
||||
def run(self, cmd: str, cwd: Optional[str] = None, **kwargs) -> str:
|
||||
def run(self, cmd: str, cwd: Optional[str] = None, **kwargs: Any) -> str:
|
||||
if cwd is None:
|
||||
cwd = self.cwd
|
||||
logger.debug("Running command: %s", cmd)
|
||||
return subprocess.check_output(
|
||||
cmd, shell=True, cwd=cwd, encoding="utf-8", **kwargs
|
||||
).strip()
|
||||
output = str(
|
||||
subprocess.check_output(
|
||||
cmd, shell=True, cwd=cwd, encoding="utf-8", **kwargs
|
||||
).strip()
|
||||
)
|
||||
return output
|
||||
|
||||
@property
|
||||
def cwd(self) -> str:
|
||||
return self._cwd
|
||||
|
||||
@cwd.setter
|
||||
def cwd(self, value: str):
|
||||
def cwd(self, value: str) -> None:
|
||||
# Set _cwd only once, then set it to readonly
|
||||
if self._cwd != CWD:
|
||||
return
|
||||
@ -139,7 +142,7 @@ class Git:
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def check_tag(value: str):
|
||||
def check_tag(value: str) -> None:
|
||||
if value == "":
|
||||
return
|
||||
if not Git._tag_pattern.match(value):
|
||||
@ -150,7 +153,7 @@ class Git:
|
||||
return self._latest_tag
|
||||
|
||||
@latest_tag.setter
|
||||
def latest_tag(self, value: str):
|
||||
def latest_tag(self, value: str) -> None:
|
||||
self.check_tag(value)
|
||||
self._latest_tag = value
|
||||
|
||||
@ -159,7 +162,7 @@ class Git:
|
||||
return self._new_tag
|
||||
|
||||
@new_tag.setter
|
||||
def new_tag(self, value: str):
|
||||
def new_tag(self, value: str) -> None:
|
||||
self.check_tag(value)
|
||||
self._new_tag = value
|
||||
|
||||
|
@ -8,11 +8,18 @@ from time import sleep
|
||||
from typing import List, Optional, Tuple
|
||||
|
||||
import github
|
||||
from github.GithubException import RateLimitExceededException
|
||||
from github.Issue import Issue
|
||||
from github.NamedUser import NamedUser
|
||||
from github.PullRequest import PullRequest
|
||||
from github.Repository import Repository
|
||||
|
||||
# explicit reimport
|
||||
# pylint: disable=useless-import-alias
|
||||
from github.GithubException import (
|
||||
RateLimitExceededException as RateLimitExceededException,
|
||||
)
|
||||
from github.Issue import Issue as Issue
|
||||
from github.NamedUser import NamedUser as NamedUser
|
||||
from github.PullRequest import PullRequest as PullRequest
|
||||
from github.Repository import Repository as Repository
|
||||
|
||||
# pylint: enable=useless-import-alias
|
||||
|
||||
CACHE_PATH = p.join(p.dirname(p.realpath(__file__)), "gh_cache")
|
||||
|
||||
@ -90,7 +97,7 @@ class GitHub(github.Github):
|
||||
raise exception
|
||||
|
||||
# pylint: enable=signature-differs
|
||||
def get_pulls_from_search(self, *args, **kwargs) -> PullRequests:
|
||||
def get_pulls_from_search(self, *args, **kwargs) -> PullRequests: # type: ignore
|
||||
"""The search api returns actually issues, so we need to fetch PullRequests"""
|
||||
issues = self.search_issues(*args, **kwargs)
|
||||
repos = {}
|
||||
@ -168,7 +175,7 @@ class GitHub(github.Github):
|
||||
self.dump(user, prfd) # type: ignore
|
||||
return user
|
||||
|
||||
def _get_cached(self, path: Path):
|
||||
def _get_cached(self, path: Path): # type: ignore
|
||||
with open(path, "rb") as ob_fd:
|
||||
return self.load(ob_fd) # type: ignore
|
||||
|
||||
@ -190,11 +197,11 @@ class GitHub(github.Github):
|
||||
return False, cached_obj
|
||||
|
||||
@property
|
||||
def cache_path(self):
|
||||
def cache_path(self) -> Path:
|
||||
return self._cache_path
|
||||
|
||||
@cache_path.setter
|
||||
def cache_path(self, value: str):
|
||||
def cache_path(self, value: str) -> None:
|
||||
self._cache_path = Path(value)
|
||||
if self._cache_path.exists():
|
||||
assert self._cache_path.is_dir()
|
||||
@ -208,5 +215,6 @@ class GitHub(github.Github):
|
||||
return self._retries
|
||||
|
||||
@retries.setter
|
||||
def retries(self, value: int):
|
||||
def retries(self, value: int) -> None:
|
||||
assert isinstance(value, int)
|
||||
self._retries = value
|
||||
|
@ -7,6 +7,7 @@ import logging
|
||||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
from typing import List, Tuple
|
||||
|
||||
from github import Github
|
||||
|
||||
@ -87,8 +88,10 @@ def get_env_for_runner(build_path, repo_path, result_path, work_path):
|
||||
return my_env
|
||||
|
||||
|
||||
def process_results(result_folder):
|
||||
test_results = []
|
||||
def process_results(
|
||||
result_folder: str,
|
||||
) -> Tuple[str, str, List[Tuple[str, str]], List[str]]:
|
||||
test_results = [] # type: List[Tuple[str, str]]
|
||||
additional_files = []
|
||||
# Just upload all files from result_folder.
|
||||
# If task provides processed results, then it's responsible for content of result_folder.
|
||||
@ -115,7 +118,7 @@ def process_results(result_folder):
|
||||
results_path = os.path.join(result_folder, "test_results.tsv")
|
||||
if os.path.exists(results_path):
|
||||
with open(results_path, "r", encoding="utf-8") as results_file:
|
||||
test_results = list(csv.reader(results_file, delimiter="\t"))
|
||||
test_results = list(csv.reader(results_file, delimiter="\t")) # type: ignore
|
||||
if len(test_results) == 0:
|
||||
return "error", "Empty test_results.tsv", test_results, additional_files
|
||||
|
||||
@ -153,8 +156,8 @@ if __name__ == "__main__":
|
||||
validate_bugix_check = args.validate_bugfix
|
||||
|
||||
if "RUN_BY_HASH_NUM" in os.environ:
|
||||
run_by_hash_num = int(os.getenv("RUN_BY_HASH_NUM"))
|
||||
run_by_hash_total = int(os.getenv("RUN_BY_HASH_TOTAL"))
|
||||
run_by_hash_num = int(os.getenv("RUN_BY_HASH_NUM", "0"))
|
||||
run_by_hash_total = int(os.getenv("RUN_BY_HASH_TOTAL", "0"))
|
||||
check_name_with_group = (
|
||||
check_name + f" [{run_by_hash_num + 1}/{run_by_hash_total}]"
|
||||
)
|
||||
|
@ -7,9 +7,9 @@ import sys
|
||||
|
||||
import argparse
|
||||
|
||||
import boto3
|
||||
import boto3 # type: ignore
|
||||
import requests # type: ignore
|
||||
from github import Github
|
||||
import requests
|
||||
|
||||
from env_helper import REPO_COPY, TEMP_PATH, S3_BUILDS_BUCKET, S3_DOWNLOAD
|
||||
from stopwatch import Stopwatch
|
||||
|
@ -2,7 +2,7 @@
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
from typing import Set
|
||||
from typing import Dict, Set, Union
|
||||
|
||||
from unidiff import PatchSet # type: ignore
|
||||
|
||||
@ -16,6 +16,7 @@ from env_helper import (
|
||||
|
||||
FORCE_TESTS_LABEL = "force tests"
|
||||
SKIP_MERGEABLE_CHECK_LABEL = "skip mergeable check"
|
||||
NeedsDataType = Dict[str, Dict[str, Union[str, Dict[str, str]]]]
|
||||
|
||||
DIFF_IN_DOCUMENTATION_EXT = [
|
||||
".html",
|
||||
@ -146,7 +147,7 @@ class PRInfo:
|
||||
self.body = github_event["pull_request"]["body"]
|
||||
self.labels = {
|
||||
label["name"] for label in github_event["pull_request"]["labels"]
|
||||
}
|
||||
} # type: Set[str]
|
||||
|
||||
self.user_login = github_event["pull_request"]["user"]["login"]
|
||||
self.user_orgs = set([])
|
||||
@ -178,7 +179,7 @@ class PRInfo:
|
||||
if pull_request is None or pull_request["state"] == "closed":
|
||||
# it's merged PR to master
|
||||
self.number = 0
|
||||
self.labels = {}
|
||||
self.labels = set()
|
||||
self.pr_html_url = f"{repo_prefix}/commits/{ref}"
|
||||
self.base_ref = ref
|
||||
self.base_name = self.repo_full_name
|
||||
@ -228,7 +229,7 @@ class PRInfo:
|
||||
print(json.dumps(github_event, sort_keys=True, indent=4))
|
||||
self.sha = os.getenv("GITHUB_SHA")
|
||||
self.number = 0
|
||||
self.labels = {}
|
||||
self.labels = set()
|
||||
repo_prefix = f"{GITHUB_SERVER_URL}/{GITHUB_REPOSITORY}"
|
||||
self.task_url = GITHUB_RUN_URL
|
||||
self.commit_html_url = f"{repo_prefix}/commits/{self.sha}"
|
||||
|
@ -5,7 +5,7 @@ import logging
|
||||
import os
|
||||
import re
|
||||
from collections import namedtuple
|
||||
from typing import Dict, List, Tuple
|
||||
from typing import Dict, List, Optional, Tuple
|
||||
|
||||
from artifactory import ArtifactorySaaSPath # type: ignore
|
||||
from build_download_helper import download_build_with_progress
|
||||
@ -14,7 +14,7 @@ from git_helper import TAG_REGEXP, commit, removeprefix, removesuffix
|
||||
|
||||
|
||||
# Necessary ENV variables
|
||||
def getenv(name: str, default: str = None):
|
||||
def getenv(name: str, default: Optional[str] = None) -> str:
|
||||
env = os.getenv(name, default)
|
||||
if env is not None:
|
||||
return env
|
||||
@ -62,7 +62,7 @@ class Packages:
|
||||
raise ValueError(f"{deb_pkg} not in {self.deb}")
|
||||
return removesuffix(deb_pkg, ".deb").split("_")[-1]
|
||||
|
||||
def replace_with_fallback(self, name: str):
|
||||
def replace_with_fallback(self, name: str) -> None:
|
||||
if name.endswith(".deb"):
|
||||
suffix = self.deb.pop(name)
|
||||
self.deb[self.fallback_to_all(name)] = self.fallback_to_all(suffix)
|
||||
@ -80,7 +80,7 @@ class Packages:
|
||||
return os.path.join(TEMP_PATH, package_file)
|
||||
|
||||
@staticmethod
|
||||
def fallback_to_all(url_or_name: str):
|
||||
def fallback_to_all(url_or_name: str) -> str:
|
||||
"""Until July 2022 we had clickhouse-server and clickhouse-client with
|
||||
arch 'all'"""
|
||||
# deb
|
||||
@ -111,7 +111,7 @@ class S3:
|
||||
self.force_download = force_download
|
||||
self.packages = Packages(version)
|
||||
|
||||
def download_package(self, package_file: str, s3_path_suffix: str):
|
||||
def download_package(self, package_file: str, s3_path_suffix: str) -> None:
|
||||
path = Packages.path(package_file)
|
||||
fallback_path = Packages.fallback_to_all(path)
|
||||
if not self.force_download and (
|
||||
@ -186,7 +186,12 @@ class Release:
|
||||
|
||||
class Artifactory:
|
||||
def __init__(
|
||||
self, url: str, release: str, deb_repo="deb", rpm_repo="rpm", tgz_repo="tgz"
|
||||
self,
|
||||
url: str,
|
||||
release: str,
|
||||
deb_repo: str = "deb",
|
||||
rpm_repo: str = "rpm",
|
||||
tgz_repo: str = "tgz",
|
||||
):
|
||||
self._url = url
|
||||
self._release = release
|
||||
@ -196,7 +201,7 @@ class Artifactory:
|
||||
# check the credentials ENVs for early exit
|
||||
self.__path_helper("_deb", "")
|
||||
|
||||
def deploy_deb(self, packages: Packages):
|
||||
def deploy_deb(self, packages: Packages) -> None:
|
||||
for package_file in packages.deb:
|
||||
path = packages.path(package_file)
|
||||
dist = self._release
|
||||
@ -212,13 +217,13 @@ class Artifactory:
|
||||
)
|
||||
self.deb_path(package_file).deploy_deb(path, dist, comp, arch)
|
||||
|
||||
def deploy_rpm(self, packages: Packages):
|
||||
def deploy_rpm(self, packages: Packages) -> None:
|
||||
for package_file in packages.rpm:
|
||||
path = packages.path(package_file)
|
||||
logging.info("Deploy %s to artifactory", path)
|
||||
self.rpm_path(package_file).deploy_file(path)
|
||||
|
||||
def deploy_tgz(self, packages: Packages):
|
||||
def deploy_tgz(self, packages: Packages) -> None:
|
||||
for package_file in packages.tgz:
|
||||
path = packages.path(package_file)
|
||||
logging.info("Deploy %s to artifactory", path)
|
||||
@ -316,19 +321,19 @@ def parse_args() -> argparse.Namespace:
|
||||
return args
|
||||
|
||||
|
||||
def process_deb(s3: S3, art_clients: List[Artifactory]):
|
||||
def process_deb(s3: S3, art_clients: List[Artifactory]) -> None:
|
||||
s3.download_deb()
|
||||
for art_client in art_clients:
|
||||
art_client.deploy_deb(s3.packages)
|
||||
|
||||
|
||||
def process_rpm(s3: S3, art_clients: List[Artifactory]):
|
||||
def process_rpm(s3: S3, art_clients: List[Artifactory]) -> None:
|
||||
s3.download_rpm()
|
||||
for art_client in art_clients:
|
||||
art_client.deploy_rpm(s3.packages)
|
||||
|
||||
|
||||
def process_tgz(s3: S3, art_clients: List[Artifactory]):
|
||||
def process_tgz(s3: S3, art_clients: List[Artifactory]) -> None:
|
||||
s3.download_tgz()
|
||||
for art_client in art_clients:
|
||||
art_client.deploy_tgz(s3.packages)
|
||||
|
@ -11,7 +11,7 @@ On another hand, PyGithub is used for convenient getting commit's status from AP
|
||||
|
||||
|
||||
from contextlib import contextmanager
|
||||
from typing import List, Optional
|
||||
from typing import Any, Iterator, List, Literal, Optional
|
||||
import argparse
|
||||
import logging
|
||||
import subprocess
|
||||
@ -48,7 +48,7 @@ class Repo:
|
||||
return self._url
|
||||
|
||||
@url.setter
|
||||
def url(self, protocol: str):
|
||||
def url(self, protocol: str) -> None:
|
||||
if protocol == "ssh":
|
||||
self._url = f"git@github.com:{self}.git"
|
||||
elif protocol == "https":
|
||||
@ -68,17 +68,23 @@ class Release:
|
||||
CMAKE_PATH = get_abs_path(FILE_WITH_VERSION_PATH)
|
||||
CONTRIBUTORS_PATH = get_abs_path(GENERATED_CONTRIBUTORS)
|
||||
|
||||
def __init__(self, repo: Repo, release_commit: str, release_type: str):
|
||||
def __init__(
|
||||
self,
|
||||
repo: Repo,
|
||||
release_commit: str,
|
||||
release_type: Literal["major", "minor", "patch"],
|
||||
):
|
||||
self.repo = repo
|
||||
self._release_commit = ""
|
||||
self.release_commit = release_commit
|
||||
assert release_type in self.BIG + self.SMALL
|
||||
self.release_type = release_type
|
||||
self._git = git
|
||||
self._version = get_version_from_repo(git=self._git)
|
||||
self._release_branch = ""
|
||||
self._rollback_stack = [] # type: List[str]
|
||||
|
||||
def run(self, cmd: str, cwd: Optional[str] = None, **kwargs) -> str:
|
||||
def run(self, cmd: str, cwd: Optional[str] = None, **kwargs: Any) -> str:
|
||||
cwd_text = ""
|
||||
if cwd:
|
||||
cwd_text = f" (CWD='{cwd}')"
|
||||
@ -153,7 +159,9 @@ class Release:
|
||||
|
||||
self.check_commit_release_ready()
|
||||
|
||||
def do(self, check_dirty: bool, check_branch: bool, with_release_branch: bool):
|
||||
def do(
|
||||
self, check_dirty: bool, check_branch: bool, with_release_branch: bool
|
||||
) -> None:
|
||||
self.check_prerequisites()
|
||||
|
||||
if check_dirty:
|
||||
@ -310,7 +318,7 @@ class Release:
|
||||
return self._version
|
||||
|
||||
@version.setter
|
||||
def version(self, version: ClickHouseVersion):
|
||||
def version(self, version: ClickHouseVersion) -> None:
|
||||
if not isinstance(version, ClickHouseVersion):
|
||||
raise ValueError(f"version must be ClickHouseVersion, not {type(version)}")
|
||||
self._version = version
|
||||
@ -320,7 +328,7 @@ class Release:
|
||||
return self._release_branch
|
||||
|
||||
@release_branch.setter
|
||||
def release_branch(self, branch: str):
|
||||
def release_branch(self, branch: str) -> None:
|
||||
self._release_branch = release_branch(branch)
|
||||
|
||||
@property
|
||||
@ -328,7 +336,7 @@ class Release:
|
||||
return self._release_commit
|
||||
|
||||
@release_commit.setter
|
||||
def release_commit(self, release_commit: str):
|
||||
def release_commit(self, release_commit: str) -> None:
|
||||
self._release_commit = commit(release_commit)
|
||||
|
||||
@contextmanager
|
||||
@ -367,7 +375,7 @@ class Release:
|
||||
yield
|
||||
|
||||
@contextmanager
|
||||
def _bump_testing_version(self, helper_branch: str):
|
||||
def _bump_testing_version(self, helper_branch: str) -> Iterator[None]:
|
||||
self.read_version()
|
||||
self.version = self.version.update(self.release_type)
|
||||
self.version.with_description(VersionType.TESTING)
|
||||
@ -387,7 +395,7 @@ class Release:
|
||||
yield
|
||||
|
||||
@contextmanager
|
||||
def _checkout(self, ref: str, with_checkout_back: bool = False):
|
||||
def _checkout(self, ref: str, with_checkout_back: bool = False) -> Iterator[None]:
|
||||
orig_ref = self._git.branch or self._git.sha
|
||||
need_rollback = False
|
||||
if ref not in (self._git.branch, self._git.sha):
|
||||
@ -406,7 +414,7 @@ class Release:
|
||||
self.run(rollback_cmd)
|
||||
|
||||
@contextmanager
|
||||
def _create_branch(self, name: str, start_point: str = ""):
|
||||
def _create_branch(self, name: str, start_point: str = "") -> Iterator[None]:
|
||||
self.run(f"git branch {name} {start_point}")
|
||||
rollback_cmd = f"git branch -D {name}"
|
||||
self._rollback_stack.append(rollback_cmd)
|
||||
@ -418,7 +426,7 @@ class Release:
|
||||
raise
|
||||
|
||||
@contextmanager
|
||||
def _create_gh_label(self, label: str, color_hex: str):
|
||||
def _create_gh_label(self, label: str, color_hex: str) -> Iterator[None]:
|
||||
# API call, https://docs.github.com/en/rest/reference/issues#create-a-label
|
||||
self.run(
|
||||
f"gh api repos/{self.repo}/labels -f name={label} -f color={color_hex}"
|
||||
@ -433,7 +441,7 @@ class Release:
|
||||
raise
|
||||
|
||||
@contextmanager
|
||||
def _create_gh_release(self, as_prerelease: bool):
|
||||
def _create_gh_release(self, as_prerelease: bool) -> Iterator[None]:
|
||||
with self._create_tag():
|
||||
# Preserve tag if version is changed
|
||||
tag = self.version.describe
|
||||
@ -468,7 +476,9 @@ class Release:
|
||||
raise
|
||||
|
||||
@contextmanager
|
||||
def _push(self, ref: str, with_rollback_on_fail: bool = True, remote_ref: str = ""):
|
||||
def _push(
|
||||
self, ref: str, with_rollback_on_fail: bool = True, remote_ref: str = ""
|
||||
) -> Iterator[None]:
|
||||
if remote_ref == "":
|
||||
remote_ref = ref
|
||||
|
||||
|
@ -101,7 +101,7 @@ def _format_header(header, branch_name, branch_url=None):
|
||||
result = "ClickHouse " + result
|
||||
result += " for "
|
||||
if branch_url:
|
||||
result += '<a href="{url}">{name}</a>'.format(url=branch_url, name=branch_name)
|
||||
result += f'<a href="{branch_url}">{branch_name}</a>'
|
||||
else:
|
||||
result += branch_name
|
||||
return result
|
||||
@ -140,9 +140,7 @@ def _get_html_url(url):
|
||||
if isinstance(url, tuple):
|
||||
href, name = url[0], _get_html_url_name(url)
|
||||
if href and name:
|
||||
return '<a href="{href}">{name}</a>'.format(
|
||||
href=href, name=_get_html_url_name(url)
|
||||
)
|
||||
return f'<a href="{href}">{_get_html_url_name(url)}</a>'
|
||||
return ""
|
||||
|
||||
|
||||
@ -199,13 +197,7 @@ def create_test_html_report(
|
||||
num_fails = num_fails + 1
|
||||
is_fail_id = 'id="fail' + str(num_fails) + '" '
|
||||
|
||||
row += (
|
||||
"<td "
|
||||
+ is_fail_id
|
||||
+ 'style="{}">'.format(style)
|
||||
+ test_status
|
||||
+ "</td>"
|
||||
)
|
||||
row += f'<td {is_fail_id}style="{style}">{test_status}</td>'
|
||||
|
||||
if test_time is not None:
|
||||
row += "<td>" + test_time + "</td>"
|
||||
@ -229,8 +221,8 @@ def create_test_html_report(
|
||||
if has_test_logs and not with_raw_logs:
|
||||
headers.append("Logs")
|
||||
|
||||
headers = "".join(["<th>" + h + "</th>" for h in headers])
|
||||
test_part = HTML_TEST_PART.format(headers=headers, rows=rows_part)
|
||||
headers_html = "".join(["<th>" + h + "</th>" for h in headers])
|
||||
test_part = HTML_TEST_PART.format(headers=headers_html, rows=rows_part)
|
||||
else:
|
||||
test_part = ""
|
||||
|
||||
@ -317,33 +309,33 @@ def create_build_html_report(
|
||||
build_results, build_logs_urls, artifact_urls_list
|
||||
):
|
||||
row = "<tr>"
|
||||
row += "<td>{}</td>".format(build_result.compiler)
|
||||
row += f"<td>{build_result.compiler}</td>"
|
||||
if build_result.build_type:
|
||||
row += "<td>{}</td>".format(build_result.build_type)
|
||||
row += f"<td>{build_result.build_type}</td>"
|
||||
else:
|
||||
row += "<td>{}</td>".format("relwithdebuginfo")
|
||||
row += "<td>relwithdebuginfo</td>"
|
||||
if build_result.sanitizer:
|
||||
row += "<td>{}</td>".format(build_result.sanitizer)
|
||||
row += f"<td>{build_result.sanitizer}</td>"
|
||||
else:
|
||||
row += "<td>{}</td>".format("none")
|
||||
row += "<td>none</td>"
|
||||
|
||||
row += "<td>{}</td>".format(build_result.libraries)
|
||||
row += f"<td>{build_result.libraries}</td>"
|
||||
|
||||
if build_result.status:
|
||||
style = _get_status_style(build_result.status)
|
||||
row += '<td style="{}">{}</td>'.format(style, build_result.status)
|
||||
row += f'<td style="{style}">{build_result.status}</td>'
|
||||
else:
|
||||
style = _get_status_style("error")
|
||||
row += '<td style="{}">{}</td>'.format(style, "error")
|
||||
row += f'<td style="{style}">error</td>'
|
||||
|
||||
row += '<td><a href="{}">link</a></td>'.format(build_log_url)
|
||||
row += f'<td><a href="{build_log_url}">link</a></td>'
|
||||
|
||||
if build_result.elapsed_seconds:
|
||||
delta = datetime.timedelta(seconds=build_result.elapsed_seconds)
|
||||
else:
|
||||
delta = "unknown"
|
||||
delta = "unknown" # type: ignore
|
||||
|
||||
row += "<td>{}</td>".format(str(delta))
|
||||
row += f"<td>{delta}</td>"
|
||||
|
||||
links = ""
|
||||
link_separator = "<br/>"
|
||||
@ -355,7 +347,7 @@ def create_build_html_report(
|
||||
links += link_separator
|
||||
if links:
|
||||
links = links[: -len(link_separator)]
|
||||
row += "<td>{}</td>".format(links)
|
||||
row += f"<td>{links}</td>"
|
||||
|
||||
row += "</tr>"
|
||||
rows += row
|
||||
|
@ -112,7 +112,7 @@ def should_run_checks_for_pr(pr_info: PRInfo) -> Tuple[bool, str, str]:
|
||||
return True, "No special conditions apply", "pending"
|
||||
|
||||
|
||||
def check_pr_description(pr_info) -> Tuple[str, str]:
|
||||
def check_pr_description(pr_info: PRInfo) -> Tuple[str, str]:
|
||||
lines = list(
|
||||
map(lambda x: x.strip(), pr_info.body.split("\n") if pr_info.body else [])
|
||||
)
|
||||
|
0
tests/ci/runner_token_rotation_lambda/__init__.py
Normal file
0
tests/ci/runner_token_rotation_lambda/__init__.py
Normal file
@ -46,7 +46,7 @@ class S3Helper:
|
||||
self.host = host
|
||||
self.download_host = download_host
|
||||
|
||||
def _upload_file_to_s3(self, bucket_name, file_path, s3_path):
|
||||
def _upload_file_to_s3(self, bucket_name: str, file_path: str, s3_path: str) -> str:
|
||||
logging.debug(
|
||||
"Start uploading %s to bucket=%s path=%s", file_path, bucket_name, s3_path
|
||||
)
|
||||
@ -110,7 +110,7 @@ class S3Helper:
|
||||
url = f"{self.download_host}/{bucket_name}/{s3_path}"
|
||||
return url.replace("+", "%2B").replace(" ", "%20")
|
||||
|
||||
def upload_test_report_to_s3(self, file_path, s3_path):
|
||||
def upload_test_report_to_s3(self, file_path: str, s3_path: str) -> str:
|
||||
if CI:
|
||||
return self._upload_file_to_s3(S3_TEST_REPORTS_BUCKET, file_path, s3_path)
|
||||
else:
|
||||
@ -296,7 +296,7 @@ class S3Helper:
|
||||
return False
|
||||
|
||||
@staticmethod
|
||||
def copy_file_to_local(bucket_name, file_path, s3_path):
|
||||
def copy_file_to_local(bucket_name: str, file_path: str, s3_path: str) -> str:
|
||||
local_path = os.path.abspath(
|
||||
os.path.join(RUNNER_TEMP, "s3", bucket_name, s3_path)
|
||||
)
|
||||
|
@ -4,6 +4,7 @@ import logging
|
||||
import subprocess
|
||||
import os
|
||||
import sys
|
||||
from typing import List, Tuple
|
||||
|
||||
from github import Github
|
||||
|
||||
@ -137,7 +138,7 @@ if __name__ == "__main__":
|
||||
report_url = GITHUB_RUN_URL
|
||||
|
||||
status = "success"
|
||||
test_results = []
|
||||
test_results = [] # type: List[Tuple[str, str]]
|
||||
# Try to get status message saved by the SQLancer
|
||||
try:
|
||||
# with open(
|
||||
@ -145,7 +146,7 @@ if __name__ == "__main__":
|
||||
# ) as status_f:
|
||||
# status = status_f.readline().rstrip("\n")
|
||||
if os.path.exists(os.path.join(workspace_path, "server_crashed.log")):
|
||||
test_results.append("Server crashed", "FAIL")
|
||||
test_results.append(("Server crashed", "FAIL"))
|
||||
with open(
|
||||
os.path.join(workspace_path, "summary.tsv"), "r", encoding="utf-8"
|
||||
) as summary_f:
|
||||
|
@ -5,6 +5,7 @@ import logging
|
||||
import subprocess
|
||||
import os
|
||||
import sys
|
||||
from typing import List, Tuple
|
||||
|
||||
from github import Github
|
||||
|
||||
@ -44,8 +45,10 @@ def get_run_command(
|
||||
return cmd
|
||||
|
||||
|
||||
def process_results(result_folder, server_log_path, run_log_path):
|
||||
test_results = []
|
||||
def process_results(
|
||||
result_folder: str, server_log_path: str, run_log_path: str
|
||||
) -> Tuple[str, str, List[Tuple[str, str]], List[str]]:
|
||||
test_results = [] # type: List[Tuple[str, str]]
|
||||
additional_files = []
|
||||
# Just upload all files from result_folder.
|
||||
# If task provides processed results, then it's responsible for content
|
||||
@ -89,7 +92,7 @@ def process_results(result_folder, server_log_path, run_log_path):
|
||||
|
||||
results_path = os.path.join(result_folder, "test_results.tsv")
|
||||
with open(results_path, "r", encoding="utf-8") as results_file:
|
||||
test_results = list(csv.reader(results_file, delimiter="\t"))
|
||||
test_results = list(csv.reader(results_file, delimiter="\t")) # type: ignore
|
||||
if len(test_results) == 0:
|
||||
raise Exception("Empty results")
|
||||
|
||||
|
@ -1,11 +1,13 @@
|
||||
#!/usr/bin/env python3
|
||||
import argparse
|
||||
import atexit
|
||||
import csv
|
||||
import logging
|
||||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
import atexit
|
||||
|
||||
from typing import List, Tuple
|
||||
|
||||
|
||||
from clickhouse_helper import (
|
||||
@ -36,8 +38,10 @@ GIT_PREFIX = ( # All commits to remote are done as robot-clickhouse
|
||||
)
|
||||
|
||||
|
||||
def process_result(result_folder):
|
||||
test_results = []
|
||||
def process_result(
|
||||
result_folder: str,
|
||||
) -> Tuple[str, str, List[Tuple[str, str]], List[str]]:
|
||||
test_results = [] # type: List[Tuple[str, str]]
|
||||
additional_files = []
|
||||
# Just upload all files from result_folder.
|
||||
# If task provides processed results, then it's responsible
|
||||
@ -64,7 +68,7 @@ def process_result(result_folder):
|
||||
try:
|
||||
results_path = os.path.join(result_folder, "test_results.tsv")
|
||||
with open(results_path, "r", encoding="utf-8") as fd:
|
||||
test_results = list(csv.reader(fd, delimiter="\t"))
|
||||
test_results = list(csv.reader(fd, delimiter="\t")) # type: ignore
|
||||
if len(test_results) == 0:
|
||||
raise Exception("Empty results")
|
||||
|
||||
@ -88,7 +92,7 @@ def parse_args():
|
||||
return parser.parse_args()
|
||||
|
||||
|
||||
def checkout_head(pr_info: PRInfo):
|
||||
def checkout_head(pr_info: PRInfo) -> None:
|
||||
# It works ONLY for PRs, and only over ssh, so either
|
||||
# ROBOT_CLICKHOUSE_SSH_KEY should be set or ssh-agent should work
|
||||
assert pr_info.number
|
||||
@ -108,7 +112,7 @@ def checkout_head(pr_info: PRInfo):
|
||||
git_runner(f"git checkout -f head-{pr_info.head_ref}")
|
||||
|
||||
|
||||
def commit_push_staged(pr_info: PRInfo):
|
||||
def commit_push_staged(pr_info: PRInfo) -> None:
|
||||
# It works ONLY for PRs, and only over ssh, so either
|
||||
# ROBOT_CLICKHOUSE_SSH_KEY should be set or ssh-agent should work
|
||||
assert pr_info.number
|
||||
|
0
tests/ci/team_keys_lambda/__init__.py
Normal file
0
tests/ci/team_keys_lambda/__init__.py
Normal file
@ -14,7 +14,7 @@ import boto3 # type: ignore
|
||||
class Keys(set):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.updated_at = 0
|
||||
self.updated_at = 0.0
|
||||
|
||||
def update_now(self):
|
||||
self.updated_at = datetime.now().timestamp()
|
||||
@ -88,7 +88,7 @@ def get_token_from_aws() -> str:
|
||||
)
|
||||
get_secret_value_response = client.get_secret_value(SecretId=secret_name)
|
||||
data = json.loads(get_secret_value_response["SecretString"])
|
||||
return data["clickhouse_robot_token"]
|
||||
return data["clickhouse_robot_token"] # type: ignore
|
||||
|
||||
|
||||
def main(token: str, org: str, team_slug: str) -> str:
|
||||
|
@ -3,6 +3,7 @@
|
||||
from subprocess import Popen, PIPE, STDOUT
|
||||
from threading import Thread
|
||||
from time import sleep
|
||||
from typing import Optional
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
@ -18,7 +19,7 @@ class TeePopen:
|
||||
self.command = command
|
||||
self.log_file = log_file
|
||||
self.env = env
|
||||
self.process = None
|
||||
self._process = None # type: Optional[Popen]
|
||||
self.timeout = timeout
|
||||
|
||||
def _check_timeout(self):
|
||||
@ -51,7 +52,7 @@ class TeePopen:
|
||||
return self
|
||||
|
||||
def __exit__(self, t, value, traceback):
|
||||
for line in self.process.stdout:
|
||||
for line in self.process.stdout: # type: ignore
|
||||
sys.stdout.write(line)
|
||||
self.log_file.write(line)
|
||||
|
||||
@ -59,8 +60,18 @@ class TeePopen:
|
||||
self.log_file.close()
|
||||
|
||||
def wait(self):
|
||||
for line in self.process.stdout:
|
||||
for line in self.process.stdout: # type: ignore
|
||||
sys.stdout.write(line)
|
||||
self.log_file.write(line)
|
||||
|
||||
return self.process.wait()
|
||||
|
||||
@property
|
||||
def process(self) -> Popen:
|
||||
if self._process is not None:
|
||||
return self._process
|
||||
raise AttributeError("process is not created yet")
|
||||
|
||||
@process.setter
|
||||
def process(self, process: Popen) -> None:
|
||||
self._process = process
|
||||
|
0
tests/ci/terminate_runner_lambda/__init__.py
Normal file
0
tests/ci/terminate_runner_lambda/__init__.py
Normal file
@ -1,17 +1,18 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import requests
|
||||
import argparse
|
||||
import jwt
|
||||
import sys
|
||||
import json
|
||||
import time
|
||||
from collections import namedtuple
|
||||
from typing import Any, Dict, List, Tuple
|
||||
|
||||
import boto3 # type: ignore
|
||||
import requests # type: ignore
|
||||
import jwt
|
||||
|
||||
|
||||
def get_key_and_app_from_aws():
|
||||
import boto3
|
||||
|
||||
def get_key_and_app_from_aws() -> Tuple[str, int]:
|
||||
secret_name = "clickhouse_github_secret_key"
|
||||
session = boto3.session.Session()
|
||||
client = session.client(
|
||||
@ -22,7 +23,7 @@ def get_key_and_app_from_aws():
|
||||
return data["clickhouse-app-key"], int(data["clickhouse-app-id"])
|
||||
|
||||
|
||||
def get_installation_id(jwt_token):
|
||||
def get_installation_id(jwt_token: str) -> int:
|
||||
headers = {
|
||||
"Authorization": f"Bearer {jwt_token}",
|
||||
"Accept": "application/vnd.github.v3+json",
|
||||
@ -33,10 +34,12 @@ def get_installation_id(jwt_token):
|
||||
for installation in data:
|
||||
if installation["account"]["login"] == "ClickHouse":
|
||||
installation_id = installation["id"]
|
||||
return installation_id
|
||||
break
|
||||
|
||||
return installation_id # type: ignore
|
||||
|
||||
|
||||
def get_access_token(jwt_token, installation_id):
|
||||
def get_access_token(jwt_token: str, installation_id: int) -> str:
|
||||
headers = {
|
||||
"Authorization": f"Bearer {jwt_token}",
|
||||
"Accept": "application/vnd.github.v3+json",
|
||||
@ -47,15 +50,16 @@ def get_access_token(jwt_token, installation_id):
|
||||
)
|
||||
response.raise_for_status()
|
||||
data = response.json()
|
||||
return data["token"]
|
||||
return data["token"] # type: ignore
|
||||
|
||||
|
||||
RunnerDescription = namedtuple(
|
||||
"RunnerDescription", ["id", "name", "tags", "offline", "busy"]
|
||||
)
|
||||
RunnerDescriptions = List[RunnerDescription]
|
||||
|
||||
|
||||
def list_runners(access_token):
|
||||
def list_runners(access_token: str) -> RunnerDescriptions:
|
||||
headers = {
|
||||
"Authorization": f"token {access_token}",
|
||||
"Accept": "application/vnd.github.v3+json",
|
||||
@ -94,9 +98,9 @@ def list_runners(access_token):
|
||||
return result
|
||||
|
||||
|
||||
def how_many_instances_to_kill(event_data):
|
||||
def how_many_instances_to_kill(event_data: dict) -> Dict[str, int]:
|
||||
data_array = event_data["CapacityToTerminate"]
|
||||
to_kill_by_zone = {}
|
||||
to_kill_by_zone = {} # type: Dict[str, int]
|
||||
for av_zone in data_array:
|
||||
zone_name = av_zone["AvailabilityZone"]
|
||||
to_kill = av_zone["Capacity"]
|
||||
@ -104,15 +108,16 @@ def how_many_instances_to_kill(event_data):
|
||||
to_kill_by_zone[zone_name] = 0
|
||||
|
||||
to_kill_by_zone[zone_name] += to_kill
|
||||
|
||||
return to_kill_by_zone
|
||||
|
||||
|
||||
def get_candidates_to_be_killed(event_data):
|
||||
def get_candidates_to_be_killed(event_data: dict) -> Dict[str, List[str]]:
|
||||
data_array = event_data["Instances"]
|
||||
instances_by_zone = {}
|
||||
instances_by_zone = {} # type: Dict[str, List[str]]
|
||||
for instance in data_array:
|
||||
zone_name = instance["AvailabilityZone"]
|
||||
instance_id = instance["InstanceId"]
|
||||
instance_id = instance["InstanceId"] # type: str
|
||||
if zone_name not in instances_by_zone:
|
||||
instances_by_zone[zone_name] = []
|
||||
instances_by_zone[zone_name].append(instance_id)
|
||||
@ -120,7 +125,7 @@ def get_candidates_to_be_killed(event_data):
|
||||
return instances_by_zone
|
||||
|
||||
|
||||
def delete_runner(access_token, runner):
|
||||
def delete_runner(access_token: str, runner: RunnerDescription) -> bool:
|
||||
headers = {
|
||||
"Authorization": f"token {access_token}",
|
||||
"Accept": "application/vnd.github.v3+json",
|
||||
@ -134,10 +139,12 @@ def delete_runner(access_token, runner):
|
||||
print(
|
||||
f"Response code deleting {runner.name} with id {runner.id} is {response.status_code}"
|
||||
)
|
||||
return response.status_code == 204
|
||||
return bool(response.status_code == 204)
|
||||
|
||||
|
||||
def main(github_secret_key, github_app_id, event):
|
||||
def main(
|
||||
github_secret_key: str, github_app_id: int, event: dict
|
||||
) -> Dict[str, List[str]]:
|
||||
print("Got event", json.dumps(event, sort_keys=True, indent=4))
|
||||
to_kill_by_zone = how_many_instances_to_kill(event)
|
||||
instances_by_zone = get_candidates_to_be_killed(event)
|
||||
@ -156,17 +163,16 @@ def main(github_secret_key, github_app_id, event):
|
||||
|
||||
to_delete_runners = []
|
||||
instances_to_kill = []
|
||||
for zone in to_kill_by_zone:
|
||||
num_to_kill = to_kill_by_zone[zone]
|
||||
for zone, num_to_kill in to_kill_by_zone.items():
|
||||
candidates = instances_by_zone[zone]
|
||||
if num_to_kill > len(candidates):
|
||||
raise Exception(
|
||||
f"Required to kill {num_to_kill}, but have only {len(candidates)} candidates in AV {zone}"
|
||||
)
|
||||
|
||||
delete_for_av = []
|
||||
delete_for_av = [] # type: RunnerDescriptions
|
||||
for candidate in candidates:
|
||||
if candidate not in set([runner.name for runner in runners]):
|
||||
if candidate not in set(runner.name for runner in runners):
|
||||
print(
|
||||
f"Candidate {candidate} was not in runners list, simply delete it"
|
||||
)
|
||||
@ -214,7 +220,7 @@ def main(github_secret_key, github_app_id, event):
|
||||
return response
|
||||
|
||||
|
||||
def handler(event, context):
|
||||
def handler(event: dict, context: Any) -> Dict[str, List[str]]:
|
||||
private_key, app_id = get_key_and_app_from_aws()
|
||||
return main(private_key, app_id, event)
|
||||
|
||||
|
@ -5,6 +5,7 @@ import os
|
||||
import sys
|
||||
import subprocess
|
||||
import atexit
|
||||
from typing import List, Tuple
|
||||
|
||||
from github import Github
|
||||
|
||||
@ -37,14 +38,16 @@ def get_test_name(line):
|
||||
raise Exception(f"No test name in line '{line}'")
|
||||
|
||||
|
||||
def process_result(result_folder):
|
||||
def process_results(
|
||||
result_folder: str,
|
||||
) -> Tuple[str, str, List[Tuple[str, str]], List[str]]:
|
||||
OK_SIGN = "OK ]"
|
||||
FAILED_SIGN = "FAILED ]"
|
||||
SEGFAULT = "Segmentation fault"
|
||||
SIGNAL = "received signal SIG"
|
||||
PASSED = "PASSED"
|
||||
|
||||
summary = []
|
||||
summary = [] # type: List[Tuple[str, str]]
|
||||
total_counter = 0
|
||||
failed_counter = 0
|
||||
result_log_path = f"{result_folder}/test_result.txt"
|
||||
@ -151,7 +154,7 @@ if __name__ == "__main__":
|
||||
subprocess.check_call(f"sudo chown -R ubuntu:ubuntu {temp_path}", shell=True)
|
||||
|
||||
s3_helper = S3Helper()
|
||||
state, description, test_results, additional_logs = process_result(test_output)
|
||||
state, description, test_results, additional_logs = process_results(test_output)
|
||||
|
||||
ch_helper = ClickHouseHelper()
|
||||
mark_flaky_tests(ch_helper, check_name, test_results)
|
||||
|
@ -1,6 +1,7 @@
|
||||
import os
|
||||
import logging
|
||||
import ast
|
||||
|
||||
from typing import List, Tuple
|
||||
|
||||
from env_helper import (
|
||||
GITHUB_JOB_URL,
|
||||
@ -9,35 +10,15 @@ from env_helper import (
|
||||
GITHUB_SERVER_URL,
|
||||
)
|
||||
from report import ReportColorTheme, create_test_html_report
|
||||
from s3_helper import S3Helper
|
||||
|
||||
|
||||
def process_logs(
|
||||
s3_client, additional_logs, s3_path_prefix, test_results, with_raw_logs
|
||||
):
|
||||
s3_client: S3Helper, additional_logs: List[str], s3_path_prefix: str
|
||||
) -> List[str]:
|
||||
logging.info("Upload files to s3 %s", additional_logs)
|
||||
|
||||
processed_logs = {}
|
||||
# Firstly convert paths of logs from test_results to urls to s3.
|
||||
for test_result in test_results:
|
||||
if len(test_result) <= 3 or with_raw_logs:
|
||||
continue
|
||||
|
||||
# Convert from string repr of list to list.
|
||||
test_log_paths = ast.literal_eval(test_result[3])
|
||||
test_log_urls = []
|
||||
for log_path in test_log_paths:
|
||||
if log_path in processed_logs:
|
||||
test_log_urls.append(processed_logs[log_path])
|
||||
elif log_path:
|
||||
url = s3_client.upload_test_report_to_s3(
|
||||
log_path, s3_path_prefix + "/" + os.path.basename(log_path)
|
||||
)
|
||||
test_log_urls.append(url)
|
||||
processed_logs[log_path] = url
|
||||
|
||||
test_result[3] = test_log_urls
|
||||
|
||||
additional_urls = []
|
||||
additional_urls = [] # type: List[str]
|
||||
for log_path in additional_logs:
|
||||
if log_path:
|
||||
additional_urls.append(
|
||||
@ -50,21 +31,18 @@ def process_logs(
|
||||
|
||||
|
||||
def upload_results(
|
||||
s3_client,
|
||||
pr_number,
|
||||
commit_sha,
|
||||
test_results,
|
||||
additional_files,
|
||||
check_name,
|
||||
with_raw_logs=True,
|
||||
statuscolors=None,
|
||||
):
|
||||
s3_client: S3Helper,
|
||||
pr_number: int,
|
||||
commit_sha: str,
|
||||
test_results: List[Tuple[str, str]],
|
||||
additional_files: List[str],
|
||||
check_name: str,
|
||||
with_raw_logs: bool = True,
|
||||
) -> str:
|
||||
s3_path_prefix = f"{pr_number}/{commit_sha}/" + check_name.lower().replace(
|
||||
" ", "_"
|
||||
).replace("(", "_").replace(")", "_").replace(",", "_")
|
||||
additional_urls = process_logs(
|
||||
s3_client, additional_files, s3_path_prefix, test_results, with_raw_logs
|
||||
)
|
||||
additional_urls = process_logs(s3_client, additional_files, s3_path_prefix)
|
||||
|
||||
branch_url = f"{GITHUB_SERVER_URL}/{GITHUB_REPOSITORY}/commits/master"
|
||||
branch_name = "master"
|
||||
|
@ -2,9 +2,9 @@
|
||||
import logging
|
||||
import os.path as p
|
||||
from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter, ArgumentTypeError
|
||||
from typing import Dict, List, Optional, Tuple, Union
|
||||
from typing import Any, Dict, List, Literal, Optional, Tuple, Union
|
||||
|
||||
from git_helper import TWEAK, Git, get_tags, git_runner, removeprefix
|
||||
from git_helper import TWEAK, Git as Git, get_tags, git_runner, removeprefix
|
||||
|
||||
FILE_WITH_VERSION_PATH = "cmake/autogenerated_versions.txt"
|
||||
CHANGELOG_IN_PATH = "debian/changelog.in"
|
||||
@ -45,7 +45,7 @@ class ClickHouseVersion:
|
||||
patch: Union[int, str],
|
||||
revision: Union[int, str],
|
||||
git: Optional[Git],
|
||||
tweak: str = None,
|
||||
tweak: Optional[str] = None,
|
||||
):
|
||||
self._major = int(major)
|
||||
self._minor = int(minor)
|
||||
@ -59,10 +59,15 @@ class ClickHouseVersion:
|
||||
self._tweak = self._git.tweak
|
||||
self._describe = ""
|
||||
|
||||
def update(self, part: str) -> "ClickHouseVersion":
|
||||
def update(self, part: Literal["major", "minor", "patch"]) -> "ClickHouseVersion":
|
||||
"""If part is valid, returns a new version"""
|
||||
method = getattr(self, f"{part}_update")
|
||||
return method()
|
||||
if part == "major":
|
||||
return self.major_update()
|
||||
if part == "minor":
|
||||
return self.minor_update()
|
||||
if part == "patch":
|
||||
return self.patch_update()
|
||||
raise KeyError(f"wrong part {part} is used")
|
||||
|
||||
def major_update(self) -> "ClickHouseVersion":
|
||||
if self._git is not None:
|
||||
@ -139,10 +144,10 @@ class ClickHouseVersion:
|
||||
raise ValueError(f"version type {version_type} not in {VersionType.VALID}")
|
||||
self._describe = f"v{self.string}-{version_type}"
|
||||
|
||||
def __eq__(self, other) -> bool:
|
||||
def __eq__(self, other: Any) -> bool:
|
||||
if not isinstance(self, type(other)):
|
||||
return NotImplemented
|
||||
return (
|
||||
return bool(
|
||||
self.major == other.major
|
||||
and self.minor == other.minor
|
||||
and self.patch == other.patch
|
||||
@ -170,7 +175,7 @@ class VersionType:
|
||||
VALID = (TESTING, PRESTABLE, STABLE, LTS)
|
||||
|
||||
|
||||
def validate_version(version: str):
|
||||
def validate_version(version: str) -> None:
|
||||
parts = version.split(".")
|
||||
if len(parts) != 4:
|
||||
raise ValueError(f"{version} does not contain 4 parts")
|
||||
@ -259,7 +264,7 @@ def get_tagged_versions() -> List[ClickHouseVersion]:
|
||||
def update_cmake_version(
|
||||
version: ClickHouseVersion,
|
||||
versions_path: str = FILE_WITH_VERSION_PATH,
|
||||
):
|
||||
) -> None:
|
||||
path_to_file = get_abs_path(versions_path)
|
||||
with open(path_to_file, "w", encoding="utf-8") as f:
|
||||
f.write(VERSIONS_TEMPLATE.format_map(version.as_dict()))
|
||||
@ -269,7 +274,7 @@ def update_contributors(
|
||||
relative_contributors_path: str = GENERATED_CONTRIBUTORS,
|
||||
force: bool = False,
|
||||
raise_error: bool = False,
|
||||
):
|
||||
) -> None:
|
||||
# Check if we have shallow checkout by comparing number of lines
|
||||
# '--is-shallow-repository' is in git since 2.15, 2017-10-30
|
||||
if git_runner.run("git rev-parse --is-shallow-repository") == "true" and not force:
|
||||
|
@ -17,9 +17,9 @@ class TestFunctions(unittest.TestCase):
|
||||
("v1.1.1.2-testing", vh.get_version_from_string("1.1.1.2")),
|
||||
("refs/tags/v1.1.1.2-testing", vh.get_version_from_string("1.1.1.2")),
|
||||
)
|
||||
for case in cases:
|
||||
version = vh.version_arg(case[0])
|
||||
self.assertEqual(case[1], version)
|
||||
for test_case in cases:
|
||||
version = vh.version_arg(test_case[0])
|
||||
self.assertEqual(test_case[1], version)
|
||||
error_cases = (
|
||||
"0.0.0",
|
||||
"1.1.1.a",
|
||||
@ -28,6 +28,6 @@ class TestFunctions(unittest.TestCase):
|
||||
"v1.1.1.2-testin",
|
||||
"refs/tags/v1.1.1.2-testin",
|
||||
)
|
||||
for case in error_cases:
|
||||
for error_case in error_cases:
|
||||
with self.assertRaises(ArgumentTypeError):
|
||||
version = vh.version_arg(case[0])
|
||||
version = vh.version_arg(error_case[0])
|
||||
|
@ -313,7 +313,7 @@ def check_suspicious_changed_files(changed_files):
|
||||
return False
|
||||
|
||||
|
||||
def approve_run(workflow_description: WorkflowDescription, token):
|
||||
def approve_run(workflow_description: WorkflowDescription, token: str) -> None:
|
||||
url = f"{workflow_description.api_url}/approve"
|
||||
_exec_post_with_retry(url, token)
|
||||
|
||||
@ -391,7 +391,7 @@ def rerun_workflow(workflow_description, token):
|
||||
|
||||
|
||||
def check_workflow_completed(
|
||||
event_data, workflow_description: WorkflowDescription, token: str
|
||||
event_data: dict, workflow_description: WorkflowDescription, token: str
|
||||
) -> bool:
|
||||
if workflow_description.action == "completed":
|
||||
attempt = 0
|
||||
|
23
utils/check-style/check-mypy
Executable file
23
utils/check-style/check-mypy
Executable file
@ -0,0 +1,23 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# The mypy supports pyproject.toml, but unfortunately it doesn't support it recursively
|
||||
# https://github.com/python/mypy/issues/10613
|
||||
#
|
||||
# Unless it's done, mypy only runs against tests/ci
|
||||
# Let's leave here a room for improvement and redo it when mypy will test anything else
|
||||
|
||||
GIT_ROOT=$(git rev-parse --show-cdup)
|
||||
GIT_ROOT=${GIT_ROOT:-.}
|
||||
CONFIG="$GIT_ROOT/tests/ci/.mypy.ini"
|
||||
DIRS=("$GIT_ROOT/tests/ci/" "$GIT_ROOT/tests/ci/"*/)
|
||||
tmp=$(mktemp)
|
||||
for dir in "${DIRS[@]}"; do
|
||||
if ! compgen -G "$dir"/*.py > /dev/null; then
|
||||
continue
|
||||
fi
|
||||
if ! mypy --config-file="$CONFIG" --sqlite-cache "$dir"/*.py > "$tmp" 2>&1; then
|
||||
echo "Errors while processing $dir":
|
||||
cat "$tmp"
|
||||
fi
|
||||
done
|
||||
rm -rf "$tmp"
|
Loading…
Reference in New Issue
Block a user