Move integration tests docker files to docker/ directory. Rename image -> runner.

This commit is contained in:
Yatsishin Ilya 2020-04-17 19:01:16 +03:00
parent 165691045b
commit a0ce15befd
20 changed files with 57 additions and 49 deletions

View File

@ -1,5 +1,5 @@
## ClickHouse Dockerfiles ## ClickHouse Dockerfiles
This directory contain Dockerfiles for `clickhouse-client` and `clickhouse-server`. They updated each release. This directory contain Dockerfiles for `clickhouse-client` and `clickhouse-server`. They are updated in each release.
Also there is bunch of images for testing and CI. They are listed in `images.json` file and updated on each commit to master. If you need to add another image, place information about it into `images.json`. Also there is bunch of images for testing and CI. They are listed in `images.json` file and updated on each commit to master. If you need to add another image, place information about it into `images.json`.

View File

@ -15,5 +15,5 @@
"docker/test/stress": "yandex/clickhouse-stress-test", "docker/test/stress": "yandex/clickhouse-stress-test",
"docker/test/split_build_smoke_test": "yandex/clickhouse-split-build-smoke-test", "docker/test/split_build_smoke_test": "yandex/clickhouse-split-build-smoke-test",
"docker/test/codebrowser": "yandex/clickhouse-codebrowser", "docker/test/codebrowser": "yandex/clickhouse-codebrowser",
"tests/integration/image": "yandex/clickhouse-integration-tests-runner" "docker/test/integration/runner": "yandex/clickhouse-integration-tests-runner"
} }

View File

@ -0,0 +1,6 @@
## Docker containers for integration tests
- `base` container with required packages
- `runner` container with that runs integration tests in docker
- `compose` contains docker_compose YaML files that are used in tests
How to run integration tests is described in tests/integration/README.md

View File

@ -41,32 +41,32 @@ ENV DOCKER_CHANNEL stable
ENV DOCKER_VERSION 17.09.1-ce ENV DOCKER_VERSION 17.09.1-ce
RUN set -eux; \ RUN set -eux; \
\ \
# this "case" statement is generated via "update.sh" # this "case" statement is generated via "update.sh"
\ \
if ! wget -O docker.tgz "https://download.docker.com/linux/static/${DOCKER_CHANNEL}/x86_64/docker-${DOCKER_VERSION}.tgz"; then \ if ! wget -O docker.tgz "https://download.docker.com/linux/static/${DOCKER_CHANNEL}/x86_64/docker-${DOCKER_VERSION}.tgz"; then \
echo >&2 "error: failed to download 'docker-${DOCKER_VERSION}' from '${DOCKER_CHANNEL}' for '${x86_64}'"; \ echo >&2 "error: failed to download 'docker-${DOCKER_VERSION}' from '${DOCKER_CHANNEL}' for '${x86_64}'"; \
exit 1; \ exit 1; \
fi; \ fi; \
\ \
tar --extract \ tar --extract \
--file docker.tgz \ --file docker.tgz \
--strip-components 1 \ --strip-components 1 \
--directory /usr/local/bin/ \ --directory /usr/local/bin/ \
; \ ; \
rm docker.tgz; \ rm docker.tgz; \
\ \
dockerd --version; \ dockerd --version; \
docker --version docker --version
COPY modprobe.sh /usr/local/bin/modprobe COPY modprobe.sh /usr/local/bin/modprobe
COPY dockerd-entrypoint.sh /usr/local/bin/ COPY dockerd-entrypoint.sh /usr/local/bin/
RUN set -x \ RUN set -x \
&& addgroup --system dockremap \ && addgroup --system dockremap \
&& adduser --system dockremap \ && adduser --system dockremap \
&& adduser dockremap dockremap \ && adduser dockremap dockremap \
&& echo 'dockremap:165536:65536' >> /etc/subuid \ && echo 'dockremap:165536:65536' >> /etc/subuid \
&& echo 'dockremap:165536:65536' >> /etc/subgid && echo 'dockremap:165536:65536' >> /etc/subgid
VOLUME /var/lib/docker VOLUME /var/lib/docker

View File

@ -9,10 +9,10 @@ set -eu
# Docker often uses "modprobe -va foo bar baz" # Docker often uses "modprobe -va foo bar baz"
# so we ignore modules that start with "-" # so we ignore modules that start with "-"
for module; do for module; do
if [ "${module#-}" = "$module" ]; then if [ "${module#-}" = "$module" ]; then
ip link show "$module" || true ip link show "$module" || true
lsmod | grep "$module" || true lsmod | grep "$module" || true
fi fi
done done
# remove /usr/local/... from PATH so we can exec the real modprobe as a last resort # remove /usr/local/... from PATH so we can exec the real modprobe as a last resort

View File

@ -94,7 +94,7 @@ cd docker/test/integration
docker build -t yandex/clickhouse-integration-test . docker build -t yandex/clickhouse-integration-test .
``` ```
The helper container used by the `runner` script is in `tests/integration/image/Dockerfile`. The helper container used by the `runner` script is in `docker/test/integration/runner/Dockerfile`.
### Adding new tests ### Adding new tests

View File

@ -28,6 +28,8 @@ from .client import Client
from .hdfs_api import HDFSApi from .hdfs_api import HDFSApi
HELPERS_DIR = p.dirname(__file__) HELPERS_DIR = p.dirname(__file__)
CLICKHOUSE_ROOT_DIR = p.join(p.dirname(__file__), "../../..")
DOCKER_COMPOSE_DIR = p.join(CLICKHOUSE_ROOT_DIR, "docker/test/integration/compose/")
DEFAULT_ENV_NAME = 'env_file' DEFAULT_ENV_NAME = 'env_file'
SANITIZER_SIGN = "==================" SANITIZER_SIGN = "=================="
@ -174,14 +176,14 @@ class ClickHouseCluster:
self.instances[name] = instance self.instances[name] = instance
if ipv4_address is not None or ipv6_address is not None: if ipv4_address is not None or ipv6_address is not None:
self.with_net_trics = True self.with_net_trics = True
self.base_cmd.extend(['--file', p.join(HELPERS_DIR, 'docker_compose_net.yml')]) self.base_cmd.extend(['--file', p.join(DOCKER_COMPOSE_DIR, 'docker_compose_net.yml')])
self.base_cmd.extend(['--file', instance.docker_compose_path]) self.base_cmd.extend(['--file', instance.docker_compose_path])
cmds = [] cmds = []
if with_zookeeper and not self.with_zookeeper: if with_zookeeper and not self.with_zookeeper:
if not zookeeper_docker_compose_path: if not zookeeper_docker_compose_path:
zookeeper_docker_compose_path = p.join(HELPERS_DIR, 'docker_compose_zookeeper.yml') zookeeper_docker_compose_path = p.join(DOCKER_COMPOSE_DIR, 'docker_compose_zookeeper.yml')
self.with_zookeeper = True self.with_zookeeper = True
self.base_cmd.extend(['--file', zookeeper_docker_compose_path]) self.base_cmd.extend(['--file', zookeeper_docker_compose_path])
@ -191,72 +193,72 @@ class ClickHouseCluster:
if with_mysql and not self.with_mysql: if with_mysql and not self.with_mysql:
self.with_mysql = True self.with_mysql = True
self.base_cmd.extend(['--file', p.join(HELPERS_DIR, 'docker_compose_mysql.yml')]) self.base_cmd.extend(['--file', p.join(DOCKER_COMPOSE_DIR, 'docker_compose_mysql.yml')])
self.base_mysql_cmd = ['docker-compose', '--project-directory', self.base_dir, '--project-name', self.base_mysql_cmd = ['docker-compose', '--project-directory', self.base_dir, '--project-name',
self.project_name, '--file', p.join(HELPERS_DIR, 'docker_compose_mysql.yml')] self.project_name, '--file', p.join(DOCKER_COMPOSE_DIR, 'docker_compose_mysql.yml')]
cmds.append(self.base_mysql_cmd) cmds.append(self.base_mysql_cmd)
if with_postgres and not self.with_postgres: if with_postgres and not self.with_postgres:
self.with_postgres = True self.with_postgres = True
self.base_cmd.extend(['--file', p.join(HELPERS_DIR, 'docker_compose_postgres.yml')]) self.base_cmd.extend(['--file', p.join(DOCKER_COMPOSE_DIR, 'docker_compose_postgres.yml')])
self.base_postgres_cmd = ['docker-compose', '--project-directory', self.base_dir, '--project-name', self.base_postgres_cmd = ['docker-compose', '--project-directory', self.base_dir, '--project-name',
self.project_name, '--file', p.join(HELPERS_DIR, 'docker_compose_postgres.yml')] self.project_name, '--file', p.join(DOCKER_COMPOSE_DIR, 'docker_compose_postgres.yml')]
cmds.append(self.base_postgres_cmd) cmds.append(self.base_postgres_cmd)
if with_odbc_drivers and not self.with_odbc_drivers: if with_odbc_drivers and not self.with_odbc_drivers:
self.with_odbc_drivers = True self.with_odbc_drivers = True
if not self.with_mysql: if not self.with_mysql:
self.with_mysql = True self.with_mysql = True
self.base_cmd.extend(['--file', p.join(HELPERS_DIR, 'docker_compose_mysql.yml')]) self.base_cmd.extend(['--file', p.join(DOCKER_COMPOSE_DIR, 'docker_compose_mysql.yml')])
self.base_mysql_cmd = ['docker-compose', '--project-directory', self.base_dir, '--project-name', self.base_mysql_cmd = ['docker-compose', '--project-directory', self.base_dir, '--project-name',
self.project_name, '--file', p.join(HELPERS_DIR, 'docker_compose_mysql.yml')] self.project_name, '--file', p.join(DOCKER_COMPOSE_DIR, 'docker_compose_mysql.yml')]
cmds.append(self.base_mysql_cmd) cmds.append(self.base_mysql_cmd)
if not self.with_postgres: if not self.with_postgres:
self.with_postgres = True self.with_postgres = True
self.base_cmd.extend(['--file', p.join(HELPERS_DIR, 'docker_compose_postgres.yml')]) self.base_cmd.extend(['--file', p.join(DOCKER_COMPOSE_DIR, 'docker_compose_postgres.yml')])
self.base_postgres_cmd = ['docker-compose', '--project-directory', self.base_dir, '--project-name', self.base_postgres_cmd = ['docker-compose', '--project-directory', self.base_dir, '--project-name',
self.project_name, '--file', self.project_name, '--file',
p.join(HELPERS_DIR, 'docker_compose_postgres.yml')] p.join(DOCKER_COMPOSE_DIR, 'docker_compose_postgres.yml')]
cmds.append(self.base_postgres_cmd) cmds.append(self.base_postgres_cmd)
if with_kafka and not self.with_kafka: if with_kafka and not self.with_kafka:
self.with_kafka = True self.with_kafka = True
self.base_cmd.extend(['--file', p.join(HELPERS_DIR, 'docker_compose_kafka.yml')]) self.base_cmd.extend(['--file', p.join(DOCKER_COMPOSE_DIR, 'docker_compose_kafka.yml')])
self.base_kafka_cmd = ['docker-compose', '--project-directory', self.base_dir, '--project-name', self.base_kafka_cmd = ['docker-compose', '--project-directory', self.base_dir, '--project-name',
self.project_name, '--file', p.join(HELPERS_DIR, 'docker_compose_kafka.yml')] self.project_name, '--file', p.join(DOCKER_COMPOSE_DIR, 'docker_compose_kafka.yml')]
cmds.append(self.base_kafka_cmd) cmds.append(self.base_kafka_cmd)
if with_hdfs and not self.with_hdfs: if with_hdfs and not self.with_hdfs:
self.with_hdfs = True self.with_hdfs = True
self.base_cmd.extend(['--file', p.join(HELPERS_DIR, 'docker_compose_hdfs.yml')]) self.base_cmd.extend(['--file', p.join(DOCKER_COMPOSE_DIR, 'docker_compose_hdfs.yml')])
self.base_hdfs_cmd = ['docker-compose', '--project-directory', self.base_dir, '--project-name', self.base_hdfs_cmd = ['docker-compose', '--project-directory', self.base_dir, '--project-name',
self.project_name, '--file', p.join(HELPERS_DIR, 'docker_compose_hdfs.yml')] self.project_name, '--file', p.join(DOCKER_COMPOSE_DIR, 'docker_compose_hdfs.yml')]
cmds.append(self.base_hdfs_cmd) cmds.append(self.base_hdfs_cmd)
if with_mongo and not self.with_mongo: if with_mongo and not self.with_mongo:
self.with_mongo = True self.with_mongo = True
self.base_cmd.extend(['--file', p.join(HELPERS_DIR, 'docker_compose_mongo.yml')]) self.base_cmd.extend(['--file', p.join(DOCKER_COMPOSE_DIR, 'docker_compose_mongo.yml')])
self.base_mongo_cmd = ['docker-compose', '--project-directory', self.base_dir, '--project-name', self.base_mongo_cmd = ['docker-compose', '--project-directory', self.base_dir, '--project-name',
self.project_name, '--file', p.join(HELPERS_DIR, 'docker_compose_mongo.yml')] self.project_name, '--file', p.join(DOCKER_COMPOSE_DIR, 'docker_compose_mongo.yml')]
cmds.append(self.base_mongo_cmd) cmds.append(self.base_mongo_cmd)
if self.with_net_trics: if self.with_net_trics:
for cmd in cmds: for cmd in cmds:
cmd.extend(['--file', p.join(HELPERS_DIR, 'docker_compose_net.yml')]) cmd.extend(['--file', p.join(DOCKER_COMPOSE_DIR, 'docker_compose_net.yml')])
if with_redis and not self.with_redis: if with_redis and not self.with_redis:
self.with_redis = True self.with_redis = True
self.base_cmd.extend(['--file', p.join(HELPERS_DIR, 'docker_compose_redis.yml')]) self.base_cmd.extend(['--file', p.join(DOCKER_COMPOSE_DIR, 'docker_compose_redis.yml')])
self.base_redis_cmd = ['docker-compose', '--project-directory', self.base_dir, '--project-name', self.base_redis_cmd = ['docker-compose', '--project-directory', self.base_dir, '--project-name',
self.project_name, '--file', p.join(HELPERS_DIR, 'docker_compose_redis.yml')] self.project_name, '--file', p.join(DOCKER_COMPOSE_DIR, 'docker_compose_redis.yml')]
if with_minio and not self.with_minio: if with_minio and not self.with_minio:
self.with_minio = True self.with_minio = True
self.base_cmd.extend(['--file', p.join(HELPERS_DIR, 'docker_compose_minio.yml')]) self.base_cmd.extend(['--file', p.join(DOCKER_COMPOSE_DIR, 'docker_compose_minio.yml')])
self.base_minio_cmd = ['docker-compose', '--project-directory', self.base_dir, '--project-name', self.base_minio_cmd = ['docker-compose', '--project-directory', self.base_dir, '--project-name',
self.project_name, '--file', p.join(HELPERS_DIR, 'docker_compose_minio.yml')] self.project_name, '--file', p.join(DOCKER_COMPOSE_DIR, 'docker_compose_minio.yml')]
cmds.append(self.base_minio_cmd) cmds.append(self.base_minio_cmd)
return instance return instance

View File

@ -5,7 +5,7 @@ import os
import docker import docker
from .cluster import HELPERS_DIR from .cluster import CLICKHOUSE_ROOT_DIR
class PartitionManager: class PartitionManager:
@ -156,7 +156,7 @@ class _NetworkManager:
def __init__( def __init__(
self, self,
image_name='clickhouse_tests_helper', image_name='clickhouse_tests_helper',
image_path=p.join(HELPERS_DIR, 'helper_container'), image_path=p.join(CLICKHOUSE_ROOT_DIR, 'docker', 'test', 'integration', 'helper_container'),
container_expire_timeout=50, container_exit_timeout=60): container_expire_timeout=50, container_exit_timeout=60):
self.container_expire_timeout = container_expire_timeout self.container_expire_timeout = container_expire_timeout