Automatic style fix

This commit is contained in:
robot-clickhouse 2024-09-27 10:19:39 +00:00
parent bed8a9ae07
commit 7c6ce8611c
639 changed files with 1808 additions and 1297 deletions

View File

@ -2,8 +2,8 @@
# To run this script you must install docker and piddeptree python package
#
import subprocess
import os
import subprocess
import sys

View File

@ -1,9 +1,10 @@
#!/usr/bin/env python3
from argparse import ArgumentParser
import os
import jinja2
import itertools
import os
from argparse import ArgumentParser
import jinja2
def removesuffix(text, suffix):

View File

@ -1,7 +1,8 @@
import datetime
import os
import subprocess
import datetime
from flask import Flask, flash, request, redirect, url_for
from flask import Flask, flash, redirect, request, url_for
def run_command(command, wait=False):

View File

@ -1,9 +1,9 @@
#!/usr/bin/env python3
import os
import logging
import argparse
import csv
import logging
import os
def process_result(result_folder):

View File

@ -1,12 +1,12 @@
#!/usr/bin/env python3
import os
import yaml
import html
import os
import random
import string
from clickhouse_driver import Client
import yaml
from clickhouse_driver import Client
client = Client(host="localhost", port=9000)
settings = {

View File

@ -1,8 +1,7 @@
#!/usr/bin/env python3
import sys
import string
import sys
TOKEN_TEXT = 1
TOKEN_VAR = 2

View File

@ -7,6 +7,7 @@ import sys
from pathlib import Path
from build_download_helper import read_build_urls
from ci_config import CI
from clickhouse_helper import CiLogsCredentials
from docker_images_helper import DockerImage, get_docker_image, pull_image
from env_helper import REPORT_PATH, TEMP_PATH
@ -14,7 +15,6 @@ from pr_info import PRInfo
from report import FAIL, FAILURE, OK, SUCCESS, JobReport, TestResult
from stopwatch import Stopwatch
from tee_popen import TeePopen
from ci_config import CI
IMAGE_NAME = "clickhouse/fuzzer"

View File

@ -34,11 +34,11 @@ from typing import List, Optional
import __main__
from ci_config import Labels
from env_helper import TEMP_PATH
from get_robot_token import get_best_robot_token
from git_helper import GIT_PREFIX, git_runner, is_shallow
from github_helper import GitHub, PullRequest, PullRequests, Repository
from ci_config import Labels
from ssh import SSHKey

View File

@ -1,15 +1,15 @@
import argparse
import json
import os
from typing import Union, Dict, List
from typing import Dict, List, Union
import boto3
import requests
from botocore.exceptions import ClientError
from pr_info import PRInfo
from ci_config import CI
from ci_utils import WithIter
from pr_info import PRInfo
class Channels(metaclass=WithIter):

View File

@ -1,7 +1,7 @@
import copy
from dataclasses import dataclass, field
from pathlib import Path
from typing import List, Union, Iterable, Optional, Literal, Any
from typing import Any, Iterable, List, Literal, Optional, Union
from ci_utils import WithIter
from integration_test_images import IMAGES

View File

@ -1,18 +1,17 @@
from pathlib import Path
from typing import Optional
from ci_utils import GH
from env_helper import (
S3_BUILDS_BUCKET,
TEMP_PATH,
GITHUB_UPSTREAM_REPOSITORY,
GITHUB_REPOSITORY,
GITHUB_UPSTREAM_REPOSITORY,
S3_BUILDS_BUCKET,
S3_BUILDS_BUCKET_PUBLIC,
TEMP_PATH,
)
from s3_helper import S3Helper
from ci_utils import GH
from synchronizer_utils import SYNC_BRANCH_PREFIX
# pylint: disable=too-many-lines

View File

@ -10,15 +10,13 @@ from pathlib import Path
from typing import List, Tuple
from build_download_helper import download_all_deb_packages
from clickhouse_helper import (
CiLogsCredentials,
)
from docker_images_helper import get_docker_image, pull_image, DockerImage
from env_helper import TEMP_PATH, REPORT_PATH
from clickhouse_helper import CiLogsCredentials
from docker_images_helper import DockerImage, get_docker_image, pull_image
from env_helper import REPORT_PATH, TEMP_PATH
from pr_info import PRInfo
from report import ERROR, SUCCESS, JobReport, StatusType, TestResults
from stopwatch import Stopwatch
from tee_popen import TeePopen
from report import ERROR, SUCCESS, JobReport, StatusType, TestResults
def get_image_name() -> str:

View File

@ -2,8 +2,8 @@
import fileinput
import json
import logging
import time
import os
import time
from pathlib import Path
from typing import Any, Dict, List, Optional

View File

@ -1,11 +1,9 @@
#!/usr/bin/env python3
import subprocess
import logging
import subprocess
from pathlib import Path
from typing import Optional
PIGZ = Path("/usr/bin/pigz")
SUFFIX = ".zst"

View File

@ -5,21 +5,21 @@ from dataclasses import asdict
from hashlib import md5
from logging import getLogger
from pathlib import Path
from typing import TYPE_CHECKING, Dict, Iterable, List, Optional, Union
from sys import modules
from typing import TYPE_CHECKING, Dict, Iterable, List, Optional, Union
from docker_images_helper import get_images_info
from git_helper import Runner
from env_helper import ROOT_DIR
from ci_utils import cd
from ci_config import CI
from ci_utils import cd
from docker_images_helper import get_images_info
from env_helper import ROOT_DIR
from git_helper import Runner
DOCKER_DIGEST_LEN = 12
JOB_DIGEST_LEN = 10
if TYPE_CHECKING:
from hashlib import ( # pylint:disable=no-name-in-module,ungrouped-imports
_Hash as HASH,
from hashlib import (
_Hash as HASH, # pylint:disable=no-name-in-module,ungrouped-imports
)
else:
HASH = "_Hash"

View File

@ -10,9 +10,8 @@ from typing import Any, List
import boto3 # type: ignore
from build_download_helper import (
read_build_urls,
)
from build_download_helper import read_build_urls
from ci_config import CI
from compress_files import compress_fast
from env_helper import REPO_COPY, REPORT_PATH, TEMP_PATH
from get_robot_token import get_parameter_from_ssm
@ -21,7 +20,6 @@ from report import FAILURE, SUCCESS, JobReport, TestResult, TestResults
from ssh import SSHKey
from stopwatch import Stopwatch
from tee_popen import TeePopen
from ci_config import CI
JEPSEN_GROUP_NAME = "jepsen_group"

View File

@ -14,19 +14,19 @@ from github.PaginatedList import PaginatedList
from github.PullRequestReview import PullRequestReview
from github.WorkflowRun import WorkflowRun
from ci_config import CI
from commit_status_helper import (
get_commit_filtered_statuses,
get_commit,
get_commit_filtered_statuses,
trigger_mergeable_check,
update_upstream_sync_status,
)
from env_helper import GITHUB_REPOSITORY, GITHUB_UPSTREAM_REPOSITORY
from get_robot_token import get_best_robot_token
from github_helper import GitHub, NamedUser, PullRequest, Repository
from pr_info import PRInfo
from report import SUCCESS
from env_helper import GITHUB_UPSTREAM_REPOSITORY, GITHUB_REPOSITORY
from synchronizer_utils import SYNC_BRANCH_PREFIX
from ci_config import CI
# The team name for accepted approvals
TEAM_NAME = getenv("GITHUB_TEAM_NAME", "core")

View File

@ -24,8 +24,8 @@ import subprocess
from contextlib import contextmanager
from typing import Any, Final, Iterator, List, Optional, Tuple
from git_helper import Git, commit, release_branch
from ci_config import Labels
from git_helper import Git, commit, release_branch
from report import SUCCESS
from version_helper import (
FILE_WITH_VERSION_PATH,

View File

@ -6,6 +6,7 @@ from typing import Tuple
from github import Github
from ci_config import CI
from commit_status_helper import (
create_ci_report,
format_description,
@ -16,11 +17,9 @@ from commit_status_helper import (
)
from env_helper import GITHUB_REPOSITORY, GITHUB_SERVER_URL
from get_robot_token import get_best_robot_token
from ci_config import CI
from pr_info import PRInfo
from report import FAILURE, PENDING, SUCCESS, StatusType
TRUSTED_ORG_IDS = {
54801242, # clickhouse
}

View File

@ -9,6 +9,7 @@ from typing import Any, List, Union
import boto3 # type: ignore
import botocore # type: ignore
from compress_files import compress_file_fast
from env_helper import (
IS_CI,

View File

@ -7,12 +7,12 @@ import sys
from pathlib import Path
from build_download_helper import read_build_urls
from ci_config import CI
from docker_images_helper import DockerImage, get_docker_image, pull_image
from env_helper import REPORT_PATH, TEMP_PATH
from report import FAILURE, SUCCESS, JobReport, TestResult, TestResults
from stopwatch import Stopwatch
from tee_popen import TeePopen
from ci_config import CI
IMAGE_NAME = "clickhouse/sqlancer-test"

View File

@ -7,12 +7,12 @@ import sys
from pathlib import Path
from build_download_helper import read_build_urls
from ci_config import CI
from docker_images_helper import get_docker_image, pull_image
from env_helper import REPORT_PATH, TEMP_PATH
from pr_info import PRInfo
from report import SUCCESS, JobReport, TestResult
from stopwatch import Stopwatch
from ci_config import CI
IMAGE_NAME = "clickhouse/sqltest"

View File

@ -5,12 +5,12 @@
import argparse
import sys
from ci_config import CI
from commit_status_helper import get_commit, post_commit_status
from get_robot_token import get_best_robot_token
from github_helper import GitHub
from pr_info import PRInfo
from report import SUCCESS
from ci_config import CI
def parse_args() -> argparse.Namespace:

View File

@ -1,16 +1,17 @@
#!/usr/bin/env python
import shutil
import unittest
from hashlib import md5
from pathlib import Path
import shutil
from typing import Dict, Set
import unittest
from s3_helper import S3Helper
from ci_cache import CiCache
from digest_helper import JOB_DIGEST_LEN
from commit_status_helper import CommitStatusData
from env_helper import S3_BUILDS_BUCKET, TEMP_PATH
from ci_config import CI
from commit_status_helper import CommitStatusData
from digest_helper import JOB_DIGEST_LEN
from env_helper import S3_BUILDS_BUCKET, TEMP_PATH
from s3_helper import S3Helper
def _create_mock_digest_1(string):

View File

@ -1,16 +1,15 @@
#!/usr/bin/env python3
import copy
import unittest
import random
import unittest
from ci_config import CI
import ci as CIPY
from ci_settings import CiSettings
from pr_info import PRInfo, EventType
from s3_helper import S3Helper
from ci_cache import CiCache
from ci_config import CI
from ci_settings import CiSettings
from ci_utils import Utils
from pr_info import EventType, PRInfo
from s3_helper import S3Helper
_TEST_EVENT_JSON = {"dummy": "dummy"}

View File

@ -3,8 +3,9 @@
# type: ignore
import unittest
from ci_settings import CiSettings
from ci_config import CI
from ci_settings import CiSettings
_TEST_BODY_1 = """
#### Run only:

View File

@ -2,8 +2,8 @@
import unittest
from version_helper import get_version_from_string
import docker_server as ds
from version_helper import get_version_from_string
class TestDockerServer(unittest.TestCase):

View File

@ -1,9 +1,9 @@
#!/usr/bin/env python3
import os
import logging
import argparse
import csv
import logging
import os
OK_SIGN = "[ OK "
FAIL_SIGN = "[ FAIL "

View File

@ -3,8 +3,8 @@
import configparser
import logging
import os
from pathlib import Path
import subprocess
from pathlib import Path
DEBUGGER = os.getenv("DEBUGGER", "")
FUZZER_ARGS = os.getenv("FUZZER_ARGS", "")

View File

@ -6,7 +6,8 @@ import logging
import os
import pytest # pylint:disable=import-error; for style check
from helpers.cluster import run_and_check, is_port_free
from helpers.cluster import is_port_free, run_and_check
from helpers.network import _NetworkManager
# This is a workaround for a problem with logging in pytest [1].
@ -22,8 +23,8 @@ def pdb_history(request):
Fixture loads and saves pdb history to file, so it can be preserved between runs
"""
if request.config.getoption("--pdb"):
import readline # pylint:disable=import-outside-toplevel
import pdb # pylint:disable=import-outside-toplevel
import readline # pylint:disable=import-outside-toplevel
def save_history():
readline.write_history_file(".pdb_history")

View File

@ -1,7 +1,7 @@
import logging
import os
import subprocess as sp
import tempfile
import logging
from threading import Timer
DEFAULT_QUERY_TIMEOUT = 600

View File

@ -1,63 +1,64 @@
import base64
import errno
from functools import cache
import http.client
import logging
import os
import platform
import stat
import os.path as p
import platform
import pprint
import pwd
import re
import shlex
import shutil
import socket
import stat
import subprocess
import time
import traceback
import urllib.parse
import shlex
import urllib3
import requests
from functools import cache
from pathlib import Path
import requests
import urllib3
try:
# Please, add modules that required for specific tests only here.
# So contributors will be able to run most tests locally
# without installing tons of unneeded packages that may be not so easy to install.
import asyncio
from cassandra.policies import RoundRobinPolicy
import ssl
import cassandra.cluster
import nats
import psycopg2
from psycopg2.extensions import ISOLATION_LEVEL_AUTOCOMMIT
import pymongo
import pymysql
import nats
import ssl
from cassandra.policies import RoundRobinPolicy
from confluent_kafka.avro.cached_schema_registry_client import (
CachedSchemaRegistryClient,
)
from psycopg2.extensions import ISOLATION_LEVEL_AUTOCOMMIT
from .hdfs_api import HDFSApi # imports requests_kerberos
except Exception as e:
logging.warning(f"Cannot import some modules, some tests may not work: {e}")
import docker
from dict2xml import dict2xml
from kazoo.client import KazooClient
from kazoo.exceptions import KazooException
from minio import Minio
from helpers.test_tools import assert_eq_with_retry, exec_query_with_retry
from helpers import pytest_xdist_logging_to_separate_files
from helpers.client import QueryRuntimeException
import docker
from helpers.test_tools import assert_eq_with_retry, exec_query_with_retry
from .client import Client
from .config_cluster import *
from .random_settings import write_random_settings_config
from .retry_decorator import retry
from .config_cluster import *
HELPERS_DIR = p.dirname(__file__)
CLICKHOUSE_ROOT_DIR = p.join(p.dirname(__file__), "../../..")
LOCAL_DOCKER_COMPOSE_DIR = p.join(CLICKHOUSE_ROOT_DIR, "tests/integration/compose/")

View File

@ -1,5 +1,6 @@
# -*- coding: utf-8 -*-
import datetime
import logging
import os
import uuid
import warnings
@ -8,7 +9,6 @@ import cassandra.cluster
import pymongo
import pymysql.cursors
import redis
import logging
class ExternalSource(object):

View File

@ -1,15 +1,16 @@
# -*- coding: utf-8 -*-
import io
import gzip
import subprocess
import time
from tempfile import NamedTemporaryFile
import requests
import requests_kerberos as reqkerb
import socket
import tempfile
import io
import logging
import os
import socket
import subprocess
import tempfile
import time
from tempfile import NamedTemporaryFile
import requests
import requests_kerberos as reqkerb
class mk_krb_conf(object):

View File

@ -1,13 +1,15 @@
import contextlib
import io
import subprocess
import select
import socket
import subprocess
import time
import typing as tp
import contextlib
import select
from kazoo.client import KazooClient
from helpers.cluster import ClickHouseCluster, ClickHouseInstance
from helpers.client import CommandRequest
from helpers.cluster import ClickHouseCluster, ClickHouseInstance
def execute_keeper_client_query(

View File

@ -1,7 +1,7 @@
import importlib
import logging
import os
import time
import importlib
# Starts simple HTTP servers written in Python.

View File

@ -1,9 +1,10 @@
import ipaddress
import logging
import os
import subprocess
import time
import logging
import docker
import ipaddress
class PartitionManager:

View File

@ -1,5 +1,6 @@
import psycopg2
import time
import psycopg2
from psycopg2.extensions import ISOLATION_LEVEL_AUTOCOMMIT
postgres_table_template = """

View File

@ -1,6 +1,6 @@
import time
import random
from typing import Type, List
import time
from typing import List, Type
def retry(

View File

@ -1,15 +1,14 @@
import logging
import sys
import threading
import random
import time
import urllib.parse
import http.server
import logging
import random
import socket
import socketserver
import string
import socket
import struct
import sys
import threading
import time
import urllib.parse
INF_COUNT = 100000000

View File

@ -1,12 +1,11 @@
from minio import Minio
import glob
import os
import json
import os
import shutil
from enum import Enum
from minio import Minio
class CloudUploader:

View File

@ -1,7 +1,6 @@
import os
import time
ALL_HTTP_METHODS = {"POST", "PUT", "GET", "HEAD", "CONNECT"}

View File

@ -1,6 +1,6 @@
import difflib
import time
import logging
import time
from io import IOBase

View File

@ -15,9 +15,9 @@ import os
import pty
import re
import time
from queue import Queue, Empty
from queue import Empty, Queue
from subprocess import Popen
from threading import Thread, Event
from threading import Event, Thread
class TimeoutError(Exception):

View File

@ -1,5 +1,5 @@
import string
import random
import string
import threading

View File

@ -1,4 +1,5 @@
import time
from helpers.test_tools import assert_eq_with_retry

View File

@ -14,7 +14,9 @@
# asynchronous_metrics_update_period_s.xml.
import logging
import pytest
from helpers.cluster import ClickHouseCluster
cluster = ClickHouseCluster(__file__)

View File

@ -1,9 +1,10 @@
import pytest
from helpers.client import Client
from helpers.cluster import ClickHouseCluster
import os.path
from os import remove
import pytest
from helpers.client import Client
from helpers.cluster import ClickHouseCluster
SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__))
MAX_RETRY = 5

View File

@ -1,4 +1,5 @@
import pytest
from helpers.cluster import ClickHouseCluster
cluster = ClickHouseCluster(__file__)

View File

@ -1,5 +1,7 @@
import pytest
import uuid
import pytest
from helpers.cluster import ClickHouseCluster
cluster = ClickHouseCluster(__file__)

View File

@ -1,9 +1,9 @@
import pytest
import random
import string
from helpers.cluster import ClickHouseCluster
import pytest
from helpers.cluster import ClickHouseCluster
cluster = ClickHouseCluster(__file__)
node = cluster.add_instance(

View File

@ -1,5 +1,7 @@
import pytest
import logging
import pytest
from helpers.cluster import ClickHouseCluster
cluster = ClickHouseCluster(__file__)

View File

@ -1,4 +1,5 @@
import pytest
from helpers.cluster import ClickHouseCluster, is_arm
cluster = ClickHouseCluster(__file__)

View File

@ -1,4 +1,5 @@
import pytest
from helpers.client import QueryRuntimeException
from helpers.cluster import ClickHouseCluster

View File

@ -1,4 +1,5 @@
import pytest
from helpers.cluster import ClickHouseCluster
from helpers.test_tools import assert_eq_with_retry

View File

@ -1,9 +1,9 @@
import logging
import random
import threading
import time
import pytest
import threading
import random
from helpers.client import QueryRuntimeException
from helpers.cluster import ClickHouseCluster

View File

@ -1,4 +1,5 @@
import pytest
from helpers.cluster import ClickHouseCluster
cluster = ClickHouseCluster(__file__)

View File

@ -1,4 +1,5 @@
import pytest
from helpers.cluster import ClickHouseCluster
cluster = ClickHouseCluster(__file__)

View File

@ -1,6 +1,7 @@
#!/usr/bin/env python3
import pytest
from helpers.cluster import ClickHouseCluster
from helpers.test_tools import TSV

View File

@ -1,6 +1,7 @@
import time
import pytest
from helpers.cluster import ClickHouseCluster
from helpers.test_tools import assert_eq_with_retry

View File

@ -1,7 +1,8 @@
import uuid
import time
import uuid
import pytest
from helpers.cluster import ClickHouseCluster
from helpers.test_tools import TSV

View File

@ -1,6 +1,6 @@
import pytest
from helpers.cluster import ClickHouseCluster
from helpers.cluster import ClickHouseCluster
cluster = ClickHouseCluster(__file__)

View File

@ -1,12 +1,12 @@
import copy
import logging
import pytest
import random
import timeit
from itertools import repeat
from math import floor
from multiprocessing import Pool
from itertools import repeat
import pytest
from helpers.cluster import ClickHouseCluster

View File

@ -1,5 +1,7 @@
import pytest
import random
import pytest
from helpers.cluster import ClickHouseCluster
from helpers.test_tools import assert_eq_with_retry

View File

@ -1,6 +1,7 @@
import time
import pytest
from helpers.cluster import ClickHouseCluster
cluster = ClickHouseCluster(__file__)

View File

@ -1,6 +1,7 @@
import time
import pytest
from helpers.cluster import ClickHouseCluster
cluster = ClickHouseCluster(__file__)

View File

@ -1,5 +1,7 @@
import pytest
import time
import pytest
from helpers.cluster import ClickHouseCluster
cluster = ClickHouseCluster(__file__)

View File

@ -1,6 +1,7 @@
import time
import pytest
from helpers.cluster import ClickHouseCluster
from helpers.network import PartitionManager

View File

@ -2,6 +2,7 @@
# pylint: disable=line-too-long
import pytest
from helpers.cluster import ClickHouseCluster
cluster = ClickHouseCluster(__file__)

View File

@ -1,4 +1,5 @@
import pytest
from helpers.cluster import ClickHouseCluster
from helpers.test_tools import assert_eq_with_retry

View File

@ -1,11 +1,12 @@
import re
import os
import logging
import os
import re
from pathlib import Path
import pytest
from minio.error import S3Error
from helpers.cluster import ClickHouseCluster
from minio.error import S3Error
from pathlib import Path
cluster = ClickHouseCluster(__file__)

View File

@ -1,4 +1,5 @@
import pytest
from helpers.cluster import ClickHouseCluster
cluster = ClickHouseCluster(__file__)

View File

@ -1,11 +1,12 @@
import time
import pytest
import logging
import time
import pytest
from helpers.cluster import ClickHouseCluster
from helpers.test_tools import assert_eq_with_retry
from helpers.network import PartitionManager
from helpers.corrupt_part_data_on_disk import corrupt_part_data_by_path
from helpers.network import PartitionManager
from helpers.test_tools import assert_eq_with_retry
def fill_node(node):

View File

@ -1,4 +1,5 @@
import pytest
from helpers.cluster import ClickHouseCluster
cluster = ClickHouseCluster(__file__)

View File

@ -1,20 +1,21 @@
#!/usr/bin/env python3
import gzip
import io
import json
import logging
import os
import io
import random
import threading
import time
from azure.storage.blob import BlobServiceClient
import helpers.client
import pytest
from azure.storage.blob import BlobServiceClient
import helpers.client
from helpers.cluster import ClickHouseCluster, ClickHouseInstance
from helpers.network import PartitionManager
from helpers.mock_servers import start_mock_servers
from helpers.network import PartitionManager
from helpers.test_tools import exec_query_with_retry

View File

@ -4,9 +4,9 @@ import random
import string
import pytest
from azure.storage.blob import BlobServiceClient
from helpers.cluster import ClickHouseCluster
from azure.storage.blob import BlobServiceClient
from test_storage_azure_blob_storage.test import azure_query
NODE_NAME = "node"

View File

@ -1,9 +1,10 @@
import logging
import pytest
from helpers.cluster import ClickHouseCluster
from test_storage_azure_blob_storage.test import azure_query
import os
import pytest
from helpers.cluster import ClickHouseCluster
from test_storage_azure_blob_storage.test import azure_query
logging.getLogger().setLevel(logging.INFO)
logging.getLogger().addHandler(logging.StreamHandler())

View File

@ -1,8 +1,8 @@
import pytest
from helpers.cluster import ClickHouseCluster
from helpers.test_tools import TSV
cluster = ClickHouseCluster(__file__)
instance = cluster.add_instance(

View File

@ -1,7 +1,8 @@
import logging
import os.path
import pytest
import logging
from helpers.cluster import ClickHouseCluster
from helpers.test_tools import TSV

View File

@ -1,20 +1,21 @@
#!/usr/bin/env python3
import gzip
import io
import json
import logging
import os
import io
import random
import threading
import time
from azure.storage.blob import BlobServiceClient
import helpers.client
import pytest
from azure.storage.blob import BlobServiceClient
import helpers.client
from helpers.cluster import ClickHouseCluster, ClickHouseInstance
from helpers.network import PartitionManager
from helpers.mock_servers import start_mock_servers
from helpers.network import PartitionManager
from helpers.test_tools import exec_query_with_retry

View File

@ -1,7 +1,8 @@
from time import sleep
import pytest
from helpers.cluster import ClickHouseCluster
import pytest
from helpers.cluster import ClickHouseCluster
cluster = ClickHouseCluster(__file__)

View File

@ -1,14 +1,15 @@
import glob
import os.path
import pytest
import random
import re
import sys
import uuid
from collections import namedtuple
from helpers.cluster import ClickHouseCluster
from helpers.test_tools import assert_eq_with_retry, TSV
import pytest
from helpers.cluster import ClickHouseCluster
from helpers.test_tools import TSV, assert_eq_with_retry
script_dir = os.path.dirname(os.path.realpath(__file__))

View File

@ -1,9 +1,10 @@
import re
import uuid
import pytest
from helpers.cluster import ClickHouseCluster
from helpers.test_tools import TSV, assert_eq_with_retry
import uuid
import re
cluster = ClickHouseCluster(__file__)

View File

@ -1,8 +1,9 @@
import pytest
from helpers.cluster import ClickHouseCluster
from helpers.test_tools import TSV, assert_eq_with_retry
import uuid
import pytest
from helpers.cluster import ClickHouseCluster
from helpers.test_tools import TSV, assert_eq_with_retry
cluster = ClickHouseCluster(__file__)

View File

@ -1,11 +1,13 @@
import pytest
import re
import os.path
import random, string
import random
import re
import string
import pytest
from helpers.cluster import ClickHouseCluster
from helpers.test_tools import TSV, assert_eq_with_retry
cluster = ClickHouseCluster(__file__)
main_configs = [

View File

@ -1,12 +1,13 @@
from random import random, randint
import pytest
import concurrent
import os.path
import time
import concurrent
from random import randint, random
import pytest
from helpers.cluster import ClickHouseCluster
from helpers.test_tools import TSV, assert_eq_with_retry
cluster = ClickHouseCluster(__file__)
num_nodes = 10

View File

@ -1,11 +1,13 @@
from random import randint
import pytest
import os.path
import time
import concurrent
import os.path
import re
import time
from random import randint
import pytest
from helpers.cluster import ClickHouseCluster
from helpers.test_tools import TSV, assert_eq_with_retry
import re
cluster = ClickHouseCluster(__file__)

View File

@ -3,7 +3,6 @@ import pytest
from helpers.cluster import ClickHouseCluster
from helpers.test_tools import TSV, assert_eq_with_retry, exec_query_with_retry
cluster = ClickHouseCluster(__file__)
main_configs = [

View File

@ -1,8 +1,8 @@
import pytest
from helpers.cluster import ClickHouseCluster
from helpers.test_tools import TSV
cluster = ClickHouseCluster(__file__)
main_configs = [

View File

@ -1,10 +1,11 @@
import os
import uuid
from typing import Dict
import pytest
from helpers.cluster import ClickHouseCluster
from helpers.test_tools import TSV
import uuid
import os
CONFIG_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "configs")

View File

@ -1,6 +1,6 @@
import pytest
from helpers.cluster import ClickHouseCluster
from helpers.cluster import ClickHouseCluster
backup_id_counter = 0

View File

@ -1,4 +1,5 @@
import pytest
from helpers.cluster import ClickHouseCluster
cluster = ClickHouseCluster(__file__)

View File

@ -1,6 +1,6 @@
import pytest
from helpers.cluster import ClickHouseCluster, CLICKHOUSE_CI_MIN_TESTED_VERSION
from helpers.cluster import CLICKHOUSE_CI_MIN_TESTED_VERSION, ClickHouseCluster
cluster = ClickHouseCluster(__file__)
node1 = cluster.add_instance(

View File

@ -1,6 +1,6 @@
import pytest
from helpers.cluster import ClickHouseCluster, CLICKHOUSE_CI_MIN_TESTED_VERSION
from helpers.cluster import CLICKHOUSE_CI_MIN_TESTED_VERSION, ClickHouseCluster
cluster = ClickHouseCluster(__file__)
node1 = cluster.add_instance(

View File

@ -1,6 +1,6 @@
import pytest
from helpers.cluster import ClickHouseCluster, CLICKHOUSE_CI_MIN_TESTED_VERSION
from helpers.cluster import CLICKHOUSE_CI_MIN_TESTED_VERSION, ClickHouseCluster
cluster = ClickHouseCluster(__file__)
node1 = cluster.add_instance(

View File

@ -1,5 +1,6 @@
import pytest
from helpers.cluster import ClickHouseCluster, CLICKHOUSE_CI_MIN_TESTED_VERSION
from helpers.cluster import CLICKHOUSE_CI_MIN_TESTED_VERSION, ClickHouseCluster
cluster = ClickHouseCluster(__file__)
node = cluster.add_instance(

View File

@ -1,6 +1,6 @@
import pytest
from helpers.cluster import ClickHouseCluster, CLICKHOUSE_CI_MIN_TESTED_VERSION
from helpers.cluster import CLICKHOUSE_CI_MIN_TESTED_VERSION, ClickHouseCluster
cluster = ClickHouseCluster(__file__)
node1 = cluster.add_instance("node1", with_zookeeper=False, use_old_analyzer=True)

View File

@ -4,9 +4,11 @@
# pylint: disable=redefined-outer-name
import logging
import pytest
from helpers.cluster import ClickHouseCluster, CLICKHOUSE_CI_MIN_TESTED_VERSION
from helpers.client import QueryRuntimeException
from helpers.cluster import CLICKHOUSE_CI_MIN_TESTED_VERSION, ClickHouseCluster
cluster = ClickHouseCluster(__file__)
upstream = cluster.add_instance("upstream", use_old_analyzer=True)

View File

@ -4,7 +4,7 @@
import pytest
from helpers.cluster import ClickHouseCluster, CLICKHOUSE_CI_MIN_TESTED_VERSION
from helpers.cluster import CLICKHOUSE_CI_MIN_TESTED_VERSION, ClickHouseCluster
cluster = ClickHouseCluster(__file__)
upstream_node = cluster.add_instance("upstream_node", use_old_analyzer=True)

View File

@ -1,6 +1,6 @@
import pytest
from helpers.cluster import ClickHouseCluster, CLICKHOUSE_CI_MIN_TESTED_VERSION
from helpers.cluster import CLICKHOUSE_CI_MIN_TESTED_VERSION, ClickHouseCluster
cluster = ClickHouseCluster(__file__)
# Version 21.6.3.14 has incompatible partition id for tables with UUID in partition key.

Some files were not shown because too many files have changed in this diff Show More