mirror of
https://github.com/ClickHouse/ClickHouse.git
synced 2024-11-09 17:14:47 +00:00
Update integration tests runner too
This commit is contained in:
parent
edcf981c48
commit
ead81879b4
@ -2,4 +2,7 @@
|
||||
# Helper docker container to run iptables without sudo
|
||||
|
||||
FROM alpine
|
||||
RUN apk add -U iproute2
|
||||
RUN apk add --no-cache -U iproute2 \
|
||||
&& for bin in iptables iptables-restore iptables-save; \
|
||||
do ln -sf xtables-nft-multi "/sbin/$bin"; \
|
||||
done
|
||||
|
@ -1,5 +1,5 @@
|
||||
# docker build -t clickhouse/integration-tests-runner .
|
||||
FROM ubuntu:20.04
|
||||
FROM ubuntu:22.04
|
||||
|
||||
# ARG for quick switch to a given ubuntu mirror
|
||||
ARG apt_archive="http://archive.ubuntu.com"
|
||||
@ -56,17 +56,19 @@ RUN curl -fsSL https://download.docker.com/linux/ubuntu/gpg | apt-key add - \
|
||||
/var/lib/apt/lists/* \
|
||||
/var/cache/debconf \
|
||||
/tmp/* \
|
||||
&& apt-get clean
|
||||
&& apt-get clean \
|
||||
&& dockerd --version; docker --version
|
||||
|
||||
RUN dockerd --version; docker --version
|
||||
|
||||
RUN python3 -m pip install --no-cache-dir \
|
||||
PyMySQL \
|
||||
aerospike==4.0.0 \
|
||||
avro==1.10.2 \
|
||||
aerospike==11.1.0 \
|
||||
asyncio \
|
||||
avro==1.10.2 \
|
||||
azure-storage-blob \
|
||||
cassandra-driver \
|
||||
confluent-kafka==1.5.0 \
|
||||
confluent-kafka==1.9.2 \
|
||||
delta-spark==2.3.0 \
|
||||
dict2xml \
|
||||
dicttoxml \
|
||||
docker \
|
||||
@ -76,47 +78,47 @@ RUN python3 -m pip install --no-cache-dir \
|
||||
kafka-python \
|
||||
kazoo \
|
||||
lz4 \
|
||||
meilisearch==0.18.3 \
|
||||
minio \
|
||||
nats-py \
|
||||
protobuf \
|
||||
psycopg2-binary==2.8.6 \
|
||||
psycopg2-binary==2.9.6 \
|
||||
pyhdfs \
|
||||
pymongo==3.11.0 \
|
||||
pyspark==3.3.2 \
|
||||
pytest \
|
||||
pytest-order==1.0.0 \
|
||||
pytest-timeout \
|
||||
pytest-random \
|
||||
pytest-xdist \
|
||||
pytest-repeat \
|
||||
pytest-timeout \
|
||||
pytest-xdist \
|
||||
pytz \
|
||||
redis \
|
||||
tzlocal==2.1 \
|
||||
urllib3 \
|
||||
requests-kerberos \
|
||||
pyspark==3.3.2 \
|
||||
delta-spark==2.2.0 \
|
||||
pyhdfs \
|
||||
azure-storage-blob \
|
||||
meilisearch==0.18.3
|
||||
|
||||
COPY modprobe.sh /usr/local/bin/modprobe
|
||||
COPY dockerd-entrypoint.sh /usr/local/bin/
|
||||
COPY compose/ /compose/
|
||||
COPY misc/ /misc/
|
||||
tzlocal==2.1 \
|
||||
urllib3
|
||||
|
||||
# Hudi supports only spark 3.3.*, not 3.4
|
||||
RUN curl -fsSL -O https://dlcdn.apache.org/spark/spark-3.3.2/spark-3.3.2-bin-hadoop3.tgz \
|
||||
&& tar xzvf spark-3.3.2-bin-hadoop3.tgz -C / \
|
||||
&& rm spark-3.3.2-bin-hadoop3.tgz
|
||||
|
||||
# download spark and packages
|
||||
# if you change packages, don't forget to update them in tests/integration/helpers/cluster.py
|
||||
RUN echo ":quit" | /spark-3.3.2-bin-hadoop3/bin/spark-shell --packages "org.apache.hudi:hudi-spark3.3-bundle_2.12:0.13.0,io.delta:delta-core_2.12:2.2.0,org.apache.iceberg:iceberg-spark-runtime-3.3_2.12:1.1.0" > /dev/null
|
||||
RUN echo ":quit" | /spark-3.3.2-bin-hadoop3/bin/spark-shell --packages "org.apache.hudi:hudi-spark3.3-bundle_2.12:0.13.0,io.delta:delta-core_2.12:2.3.0,org.apache.iceberg:iceberg-spark-runtime-3.3_2.12:1.1.0" > /dev/null
|
||||
|
||||
RUN set -x \
|
||||
&& addgroup --system dockremap \
|
||||
&& adduser --system dockremap \
|
||||
&& adduser --system dockremap \
|
||||
&& adduser dockremap dockremap \
|
||||
&& echo 'dockremap:165536:65536' >> /etc/subuid \
|
||||
&& echo 'dockremap:165536:65536' >> /etc/subgid
|
||||
&& echo 'dockremap:165536:65536' >> /etc/subgid
|
||||
|
||||
COPY modprobe.sh /usr/local/bin/modprobe
|
||||
COPY dockerd-entrypoint.sh /usr/local/bin/
|
||||
COPY compose/ /compose/
|
||||
COPY misc/ /misc/
|
||||
|
||||
|
||||
# Same options as in test/base/Dockerfile
|
||||
# (in case you need to override them in tests)
|
||||
|
@ -3054,7 +3054,6 @@ CLICKHOUSE_STAY_ALIVE_COMMAND = "bash -c \"trap 'pkill tail' INT TERM; {} --daem
|
||||
CLICKHOUSE_START_COMMAND
|
||||
)
|
||||
|
||||
# /run/xtables.lock passed inside for correct iptables --wait
|
||||
DOCKER_COMPOSE_TEMPLATE = """
|
||||
version: '2.3'
|
||||
services:
|
||||
@ -3066,7 +3065,6 @@ services:
|
||||
- {db_dir}:/var/lib/clickhouse/
|
||||
- {logs_dir}:/var/log/clickhouse-server/
|
||||
- /etc/passwd:/etc/passwd:ro
|
||||
- /run/xtables.lock:/run/xtables.lock:ro
|
||||
{binary_volume}
|
||||
{odbc_bridge_volume}
|
||||
{library_bridge_volume}
|
||||
|
@ -271,10 +271,6 @@ class _NetworkManager:
|
||||
image_name,
|
||||
auto_remove=True,
|
||||
command=("sleep %s" % self.container_exit_timeout),
|
||||
# /run/xtables.lock passed inside for correct iptables --wait
|
||||
volumes={
|
||||
"/run/xtables.lock": {"bind": "/run/xtables.lock", "mode": "ro"}
|
||||
},
|
||||
detach=True,
|
||||
network_mode="host",
|
||||
)
|
||||
|
@ -19,3 +19,6 @@ markers =
|
||||
long_run: marks tests which run for a long time
|
||||
addopts =
|
||||
-m 'not long_run'
|
||||
; 'The asyncore module is deprecated' comes from casandra driver
|
||||
filterwarnings =
|
||||
ignore:The asyncore module is deprecated:DeprecationWarning
|
||||
|
@ -395,39 +395,27 @@ if __name__ == "__main__":
|
||||
if args.keyword_expression:
|
||||
args.pytest_args += ["-k", args.keyword_expression]
|
||||
|
||||
cmd_base = "docker run {net} {tty} --rm --name {name} --privileged \
|
||||
--volume={odbc_bridge_bin}:/clickhouse-odbc-bridge --volume={bin}:/clickhouse \
|
||||
--volume={library_bridge_bin}:/clickhouse-library-bridge \
|
||||
--volume={base_cfg}:/clickhouse-config --volume={cases_dir}:/ClickHouse/tests/integration \
|
||||
--volume={src_dir}/Server/grpc_protos:/ClickHouse/src/Server/grpc_protos \
|
||||
--volume=/run:/run/host:ro \
|
||||
{dockerd_internal_volume} -e DOCKER_CLIENT_TIMEOUT=300 -e COMPOSE_HTTP_TIMEOUT=600 \
|
||||
-e XTABLES_LOCKFILE=/run/host/xtables.lock \
|
||||
-e PYTHONUNBUFFERED=1 \
|
||||
{env_tags} {env_cleanup} -e PYTEST_OPTS='{parallel} {opts} {tests_list} {rand} -vvv' {img}".format(
|
||||
net=net,
|
||||
tty=tty,
|
||||
bin=args.binary,
|
||||
odbc_bridge_bin=args.odbc_bridge_binary,
|
||||
library_bridge_bin=args.library_bridge_binary,
|
||||
base_cfg=args.base_configs_dir,
|
||||
cases_dir=args.cases_dir,
|
||||
src_dir=args.src_dir,
|
||||
env_tags=env_tags,
|
||||
env_cleanup=env_cleanup,
|
||||
parallel=parallel_args,
|
||||
rand=rand_args,
|
||||
opts=" ".join(args.pytest_args).replace("'", "\\'"),
|
||||
tests_list=" ".join(args.tests_list),
|
||||
dockerd_internal_volume=dockerd_internal_volume,
|
||||
img=DIND_INTEGRATION_TESTS_IMAGE_NAME + ":" + args.docker_image_version,
|
||||
name=CONTAINER_NAME,
|
||||
pytest_opts = " ".join(args.pytest_args).replace("'", "\\'")
|
||||
tests_list = " ".join(args.tests_list)
|
||||
cmd_base = (
|
||||
f"docker run {net} {tty} --rm --name {CONTAINER_NAME} "
|
||||
"--privileged --dns-search='.' " # since recent dns search leaks from host
|
||||
f"--volume={args.odbc_bridge_binary}:/clickhouse-odbc-bridge "
|
||||
f"--volume={args.binary}:/clickhouse "
|
||||
f"--volume={args.library_bridge_binary}:/clickhouse-library-bridge "
|
||||
f"--volume={args.base_configs_dir}:/clickhouse-config "
|
||||
f"--volume={args.cases_dir}:/ClickHouse/tests/integration "
|
||||
f"--volume={args.src_dir}/Server/grpc_protos:/ClickHouse/src/Server/grpc_protos "
|
||||
f"--volume=/run:/run/host:ro {dockerd_internal_volume} {env_tags} {env_cleanup} "
|
||||
"-e DOCKER_CLIENT_TIMEOUT=300 -e COMPOSE_HTTP_TIMEOUT=600 -e PYTHONUNBUFFERED=1 "
|
||||
f"-e PYTEST_OPTS='{parallel_args} {pytest_opts} {tests_list} {rand_args} -vvv'"
|
||||
f" {DIND_INTEGRATION_TESTS_IMAGE_NAME}:{args.docker_image_version}"
|
||||
)
|
||||
|
||||
cmd = cmd_base + " " + args.command
|
||||
cmd_pre_pull = (
|
||||
cmd_base
|
||||
+ " find /compose -name docker_compose_*.yml -exec docker-compose -f '{}' pull \;"
|
||||
f"{cmd_base} find /compose -name docker_compose_*.yml "
|
||||
r"-exec docker-compose -f '{}' pull \;"
|
||||
)
|
||||
|
||||
containers = subprocess.check_output(
|
||||
|
@ -79,7 +79,7 @@ def run_query(instance, query, stdin=None, settings=None):
|
||||
|
||||
|
||||
def write_hudi_from_df(spark, table_name, df, result_path, mode="overwrite"):
|
||||
if mode is "overwrite":
|
||||
if mode == "overwrite":
|
||||
hudi_write_mode = "insert_overwrite"
|
||||
else:
|
||||
hudi_write_mode = "upsert"
|
||||
|
Loading…
Reference in New Issue
Block a user