diff --git a/docker/test/integration/runner/compose/docker_compose_jdbc_bridge.yml b/docker/test/integration/runner/compose/docker_compose_jdbc_bridge.yml index e3e0d5d07ce..a65ef629df6 100644 --- a/docker/test/integration/runner/compose/docker_compose_jdbc_bridge.yml +++ b/docker/test/integration/runner/compose/docker_compose_jdbc_bridge.yml @@ -14,10 +14,14 @@ services: } EOF ./docker-entrypoint.sh' - ports: - - 9020:9019 + expose: + - 9019 healthcheck: test: ["CMD", "curl", "-s", "localhost:9019/ping"] interval: 5s timeout: 3s retries: 30 + volumes: + - type: ${JDBC_BRIDGE_FS:-tmpfs} + source: ${JDBC_BRIDGE_LOGS:-} + target: /app/logs \ No newline at end of file diff --git a/tests/integration/helpers/cluster.py b/tests/integration/helpers/cluster.py index ab5d32751bd..c6bd93edbbc 100644 --- a/tests/integration/helpers/cluster.py +++ b/tests/integration/helpers/cluster.py @@ -392,6 +392,13 @@ class ClickHouseCluster: self.zookeeper_instance_dir_prefix = p.join(self.instances_dir, "zk") self.zookeeper_dirs_to_create = [] + # available when with_jdbc_bridge == True + self.jdbc_bridge_host = "bridge1" + self.jdbc_bridge_ip = None + self.jdbc_bridge_port = 9019 + self.jdbc_driver_dir = p.abspath(p.join(self.instances_dir, "jdbc_driver")) + self.jdbc_driver_logs_dir = os.path.join(self.jdbc_driver_dir, "logs") + self.docker_client = None self.is_up = False self.env = os.environ.copy() @@ -700,6 +707,8 @@ class ClickHouseCluster: def setup_jdbc_bridge_cmd(self, instance, env_variables, docker_compose_yml_dir): self.with_jdbc_bridge = True + env_variables['JDBC_DRIVER_LOGS'] = self.jdbc_driver_logs_dir + env_variables['JDBC_DRIVER_FS'] = "bind" self.base_cmd.extend(['--file', p.join(docker_compose_yml_dir, 'docker_compose_jdbc_bridge.yml')]) self.base_jdbc_bridge_cmd = ['docker-compose', '--env-file', instance.env_file, '--project-name', self.project_name, '--file', p.join(docker_compose_yml_dir, 'docker_compose_jdbc_bridge.yml')] @@ -1512,8 +1521,12 @@ class ClickHouseCluster: self.wait_cassandra_to_start() if self.with_jdbc_bridge and self.base_jdbc_bridge_cmd: + os.makedirs(self.jdbc_driver_logs_dir) + os.chmod(self.jdbc_driver_logs_dir, stat.S_IRWXO) + subprocess_check_call(self.base_jdbc_bridge_cmd + ['up', '-d']) - self.wait_for_url("http://localhost:9020/ping") + self.jdbc_bridge_ip = self.get_instance_ip(self.jdbc_bridge_host) + self.wait_for_url(f"http://{self.jdbc_bridge_ip}:{self.jdbc_bridge_port}/ping") clickhouse_start_cmd = self.base_cmd + ['up', '-d', '--no-recreate'] logging.debug(("Trying to create ClickHouse instance by command %s", ' '.join(map(str, clickhouse_start_cmd)))) diff --git a/tests/integration/test_jdbc_bridge/test.py b/tests/integration/test_jdbc_bridge/test.py index 5972cfd7a5e..b5304c4cb10 100644 --- a/tests/integration/test_jdbc_bridge/test.py +++ b/tests/integration/test_jdbc_bridge/test.py @@ -1,7 +1,6 @@ -import contextlib +import logging import os.path as p import pytest -import time import uuid from helpers.cluster import ClickHouseCluster @@ -23,6 +22,14 @@ def started_cluster(): INSERT INTO test.ClickHouseTable(Num, Str) SELECT number, toString(number) FROM system.numbers LIMIT {}; '''.format(records)) + + while True: + datasources = instance.query("select * from jdbc('', 'show datasources')") + if 'self' in datasources: + logging.debug(f"JDBC Driver self datasource initialized.\n{datasources}") + break + else: + logging.debug(f"Waiting JDBC Driver to initialize 'self' datasource.\n{datasources}") yield cluster finally: cluster.shutdown() @@ -52,8 +59,9 @@ def test_jdbc_distributed_query(started_cluster): def test_jdbc_insert(started_cluster): """Test insert query using JDBC table function""" + instance.query('DROP TABLE IF EXISTS test.test_insert') instance.query(''' - CREATE TABLE test.test_insert engine = Memory AS + CREATE TABLE test.test_insert ENGINE = Memory AS SELECT * FROM test.ClickHouseTable; SELECT * FROM jdbc('{0}?mutation', 'INSERT INTO test.test_insert VALUES({1}, ''{1}'', ''{1}'')'); @@ -67,8 +75,9 @@ def test_jdbc_insert(started_cluster): def test_jdbc_update(started_cluster): """Test update query using JDBC table function""" secrets = str(uuid.uuid1()) + instance.query('DROP TABLE IF EXISTS test.test_update') instance.query(''' - CREATE TABLE test.test_update engine = Memory AS + CREATE TABLE test.test_update ENGINE = Memory AS SELECT * FROM test.ClickHouseTable; SELECT * FROM jdbc( @@ -85,8 +94,9 @@ def test_jdbc_update(started_cluster): def test_jdbc_delete(started_cluster): """Test delete query using JDBC table function""" + instance.query('DROP TABLE IF EXISTS test.test_delete') instance.query(''' - CREATE TABLE test.test_delete engine = Memory AS + CREATE TABLE test.test_delete ENGINE = Memory AS SELECT * FROM test.ClickHouseTable; SELECT * FROM jdbc( @@ -102,6 +112,7 @@ def test_jdbc_delete(started_cluster): def test_jdbc_table_engine(started_cluster): """Test query against a JDBC table""" + instance.query('DROP TABLE IF EXISTS test.jdbc_table') actual = instance.query(''' CREATE TABLE test.jdbc_table(Str String) ENGINE = JDBC('{}', 'test', 'ClickHouseTable');