Merge pull request #19706 from qoega/easier-integration-no-project-dir

This commit is contained in:
Ilya Yatsishin 2021-01-30 17:22:00 +03:00 committed by GitHub
commit 5afcc927ac
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
2 changed files with 41 additions and 46 deletions

View File

@ -113,9 +113,9 @@ class ClickHouseCluster:
self.zookeeper_config_path = p.join(self.base_dir, zookeeper_config_path) if zookeeper_config_path else p.join(
HELPERS_DIR, 'zookeeper_config.xml')
self.project_name = pwd.getpwuid(os.getuid()).pw_name + p.basename(self.base_dir) + self.name
project_name = pwd.getpwuid(os.getuid()).pw_name + p.basename(self.base_dir) + self.name
# docker-compose removes everything non-alphanumeric from project names so we do it too.
self.project_name = re.sub(r'[^a-z0-9]', '', self.project_name.lower())
self.project_name = re.sub(r'[^a-z0-9]', '', project_name.lower())
self.instances_dir = p.join(self.base_dir, '_instances' + ('' if not self.name else '_' + self.name))
self.docker_logs_path = p.join(self.instances_dir, 'docker.log')
@ -126,8 +126,8 @@ class ClickHouseCluster:
self.base_cmd = ['docker-compose']
if custom_dockerd_host:
self.base_cmd += ['--host', custom_dockerd_host]
self.base_cmd += ['--project-name', self.project_name]
self.base_cmd += ['--project-directory', self.base_dir, '--project-name', self.project_name]
self.base_zookeeper_cmd = None
self.base_mysql_cmd = []
self.base_kafka_cmd = []
@ -260,25 +260,23 @@ class ClickHouseCluster:
self.with_zookeeper = True
self.zookeeper_use_tmpfs = zookeeper_use_tmpfs
self.base_cmd.extend(['--file', zookeeper_docker_compose_path])
self.base_zookeeper_cmd = ['docker-compose', '--project-directory', self.base_dir, '--project-name',
self.project_name, '--file', zookeeper_docker_compose_path]
self.base_zookeeper_cmd = ['docker-compose', '--project-name', self.project_name,
'--file', zookeeper_docker_compose_path]
cmds.append(self.base_zookeeper_cmd)
if with_mysql and not self.with_mysql:
self.with_mysql = True
self.base_cmd.extend(['--file', p.join(docker_compose_yml_dir, 'docker_compose_mysql.yml')])
self.base_mysql_cmd = ['docker-compose', '--project-directory', self.base_dir, '--project-name',
self.project_name, '--file',
p.join(docker_compose_yml_dir, 'docker_compose_mysql.yml')]
self.base_mysql_cmd = ['docker-compose', '--project-name', self.project_name,
'--file', p.join(docker_compose_yml_dir, 'docker_compose_mysql.yml')]
cmds.append(self.base_mysql_cmd)
if with_postgres and not self.with_postgres:
self.with_postgres = True
self.base_cmd.extend(['--file', p.join(docker_compose_yml_dir, 'docker_compose_postgres.yml')])
self.base_postgres_cmd = ['docker-compose', '--project-directory', self.base_dir, '--project-name',
self.project_name, '--file',
p.join(docker_compose_yml_dir, 'docker_compose_postgres.yml')]
self.base_postgres_cmd = ['docker-compose', '--project-name', self.project_name,
'--file', p.join(docker_compose_yml_dir, 'docker_compose_postgres.yml')]
cmds.append(self.base_postgres_cmd)
if with_odbc_drivers and not self.with_odbc_drivers:
@ -286,64 +284,57 @@ class ClickHouseCluster:
if not self.with_mysql:
self.with_mysql = True
self.base_cmd.extend(['--file', p.join(docker_compose_yml_dir, 'docker_compose_mysql.yml')])
self.base_mysql_cmd = ['docker-compose', '--project-directory', self.base_dir, '--project-name',
self.project_name, '--file',
p.join(docker_compose_yml_dir, 'docker_compose_mysql.yml')]
self.base_mysql_cmd = ['docker-compose', '--project-name', self.project_name,
'--file', p.join(docker_compose_yml_dir, 'docker_compose_mysql.yml')]
cmds.append(self.base_mysql_cmd)
if not self.with_postgres:
self.with_postgres = True
self.base_cmd.extend(['--file', p.join(docker_compose_yml_dir, 'docker_compose_postgres.yml')])
self.base_postgres_cmd = ['docker-compose', '--project-directory', self.base_dir, '--project-name',
self.project_name, '--file',
p.join(docker_compose_yml_dir, 'docker_compose_postgres.yml')]
self.base_postgres_cmd = ['docker-compose', '--project-name', self.project_name,
'--file', p.join(docker_compose_yml_dir, 'docker_compose_postgres.yml')]
cmds.append(self.base_postgres_cmd)
if with_kafka and not self.with_kafka:
self.with_kafka = True
self.base_cmd.extend(['--file', p.join(docker_compose_yml_dir, 'docker_compose_kafka.yml')])
self.base_kafka_cmd = ['docker-compose', '--project-directory', self.base_dir, '--project-name',
self.project_name, '--file',
p.join(docker_compose_yml_dir, 'docker_compose_kafka.yml')]
self.base_kafka_cmd = ['docker-compose', '--project-name', self.project_name,
'--file', p.join(docker_compose_yml_dir, 'docker_compose_kafka.yml')]
cmds.append(self.base_kafka_cmd)
if with_kerberized_kafka and not self.with_kerberized_kafka:
self.with_kerberized_kafka = True
self.base_cmd.extend(['--file', p.join(docker_compose_yml_dir, 'docker_compose_kerberized_kafka.yml')])
self.base_kerberized_kafka_cmd = ['docker-compose', '--project-directory', self.base_dir, '--project-name',
self.project_name, '--file', p.join(docker_compose_yml_dir, 'docker_compose_kerberized_kafka.yml')]
self.base_kerberized_kafka_cmd = ['docker-compose','--project-name', self.project_name,
'--file', p.join(docker_compose_yml_dir, 'docker_compose_kerberized_kafka.yml')]
cmds.append(self.base_kerberized_kafka_cmd)
if with_rabbitmq and not self.with_rabbitmq:
self.with_rabbitmq = True
self.base_cmd.extend(['--file', p.join(docker_compose_yml_dir, 'docker_compose_rabbitmq.yml')])
self.base_rabbitmq_cmd = ['docker-compose', '--project-directory', self.base_dir, '--project-name',
self.project_name, '--file',
p.join(docker_compose_yml_dir, 'docker_compose_rabbitmq.yml')]
self.base_rabbitmq_cmd = ['docker-compose', '--project-name', self.project_name,
'--file', p.join(docker_compose_yml_dir, 'docker_compose_rabbitmq.yml')]
cmds.append(self.base_rabbitmq_cmd)
if with_hdfs and not self.with_hdfs:
self.with_hdfs = True
self.base_cmd.extend(['--file', p.join(docker_compose_yml_dir, 'docker_compose_hdfs.yml')])
self.base_hdfs_cmd = ['docker-compose', '--project-directory', self.base_dir, '--project-name',
self.project_name, '--file',
p.join(docker_compose_yml_dir, 'docker_compose_hdfs.yml')]
self.base_hdfs_cmd = ['docker-compose', '--project-name', self.project_name,
'--file', p.join(docker_compose_yml_dir, 'docker_compose_hdfs.yml')]
cmds.append(self.base_hdfs_cmd)
if with_kerberized_hdfs and not self.with_kerberized_hdfs:
self.with_kerberized_hdfs = True
self.base_cmd.extend(['--file', p.join(docker_compose_yml_dir, 'docker_compose_kerberized_hdfs.yml')])
self.base_kerberized_hdfs_cmd = ['docker-compose', '--project-directory', self.base_dir, '--project-name',
self.project_name, '--file',
p.join(docker_compose_yml_dir, 'docker_compose_kerberized_hdfs.yml')]
self.base_kerberized_hdfs_cmd = ['docker-compose', '--project-name', self.project_name,
'--file', p.join(docker_compose_yml_dir, 'docker_compose_kerberized_hdfs.yml')]
cmds.append(self.base_kerberized_hdfs_cmd)
if with_mongo and not self.with_mongo:
self.with_mongo = True
self.base_cmd.extend(['--file', p.join(docker_compose_yml_dir, 'docker_compose_mongo.yml')])
self.base_mongo_cmd = ['docker-compose', '--project-directory', self.base_dir, '--project-name',
self.project_name, '--file',
p.join(docker_compose_yml_dir, 'docker_compose_mongo.yml')]
self.base_mongo_cmd = ['docker-compose', '--project-name', self.project_name,
'--file', p.join(docker_compose_yml_dir, 'docker_compose_mongo.yml')]
cmds.append(self.base_mongo_cmd)
if self.with_net_trics:
@ -353,26 +344,25 @@ class ClickHouseCluster:
if with_redis and not self.with_redis:
self.with_redis = True
self.base_cmd.extend(['--file', p.join(docker_compose_yml_dir, 'docker_compose_redis.yml')])
self.base_redis_cmd = ['docker-compose', '--project-directory', self.base_dir, '--project-name',
self.project_name, '--file',
p.join(docker_compose_yml_dir, 'docker_compose_redis.yml')]
self.base_redis_cmd = ['docker-compose', '--project-name', self.project_name,
'--file', p.join(docker_compose_yml_dir, 'docker_compose_redis.yml')]
if with_minio and not self.with_minio:
self.with_minio = True
self.minio_certs_dir = minio_certs_dir
self.base_cmd.extend(['--file', p.join(docker_compose_yml_dir, 'docker_compose_minio.yml')])
self.base_minio_cmd = ['docker-compose', '--project-directory', self.base_dir, '--project-name',
self.project_name, '--file',
p.join(docker_compose_yml_dir, 'docker_compose_minio.yml')]
self.base_minio_cmd = ['docker-compose', '--project-name', self.project_name,
'--file', p.join(docker_compose_yml_dir, 'docker_compose_minio.yml')]
cmds.append(self.base_minio_cmd)
if with_cassandra and not self.with_cassandra:
self.with_cassandra = True
self.base_cmd.extend(['--file', p.join(docker_compose_yml_dir, 'docker_compose_cassandra.yml')])
self.base_cassandra_cmd = ['docker-compose', '--project-directory', self.base_dir, '--project-name',
self.project_name, '--file',
p.join(docker_compose_yml_dir, 'docker_compose_cassandra.yml')]
self.base_cassandra_cmd = ['docker-compose', '--project-name', self.project_name,
'--file', p.join(docker_compose_yml_dir, 'docker_compose_cassandra.yml')]
print("Cluster name:{} project_name:{}. Added instance name:{} tag:{} base_cmd:{} docker_compose_yml_dir:{}".format(
self.name, self.project_name, name, tag, self.base_cmd, docker_compose_yml_dir))
return instance
def get_instance_docker_id(self, instance_name):
@ -407,7 +397,10 @@ class ClickHouseCluster:
return node
def get_instance_ip(self, instance_name):
print("get_instance_ip instance_name={}".format(instance_name))
docker_id = self.get_instance_docker_id(instance_name)
# for cont in self.docker_client.containers.list():
# print("CONTAINERS LIST: ID={} NAME={} STATUS={}".format(cont.id, cont.name, cont.status))
handle = self.docker_client.containers.get(docker_id)
return list(handle.attrs['NetworkSettings']['Networks'].values())[0]['IPAddress']
@ -936,7 +929,7 @@ class ClickHouseInstance:
self.with_cassandra = with_cassandra
self.path = p.join(self.cluster.instances_dir, name)
self.docker_compose_path = p.join(self.path, 'docker_compose.yml')
self.docker_compose_path = p.join(self.path, 'docker-compose.yml')
self.env_variables = env_variables or {}
if with_odbc_drivers:
self.odbc_ini_path = self.path + "/odbc.ini:/etc/odbc.ini"
@ -1163,14 +1156,14 @@ class ClickHouseInstance:
if status == 'exited':
raise Exception(
"Instance `{}' failed to start. Container status: {}, logs: {}".format(self.name, status,
handle.logs()))
handle.logs().decode('utf-8')))
current_time = time.time()
time_left = deadline - current_time
if deadline is not None and current_time >= deadline:
raise Exception("Timed out while waiting for instance `{}' with ip address {} to start. "
"Container status: {}, logs: {}".format(self.name, self.ip_address, status,
handle.logs()))
handle.logs().decode('utf-8')))
# Repeatedly poll the instance address until there is something that listens there.
# Usually it means that ClickHouse is ready to accept queries.

View File

@ -105,6 +105,8 @@ def test_kafka_json_as_string(kafka_cluster):
kafka_flush_interval_ms=1000;
''')
time.sleep(3)
result = instance.query('SELECT * FROM test.kafka;')
expected = '''\
{"t": 123, "e": {"x": "woof"} }