mirror of
https://github.com/ClickHouse/ClickHouse.git
synced 2024-11-28 18:42:26 +00:00
d0ecdb617e
Right now we cannot remove that compression since non-github actions check will upload non compressed files then, but once old CI will be removed it seems to be possible to remove that manual compression.
143 lines
6.4 KiB
Bash
Executable File
143 lines
6.4 KiB
Bash
Executable File
#!/bin/bash
|
|
|
|
set -e -x
|
|
|
|
# Choose random timezone for this test run
|
|
TZ="$(grep -v '#' /usr/share/zoneinfo/zone.tab | awk '{print $3}' | shuf | head -n1)"
|
|
echo "Choosen random timezone $TZ"
|
|
ln -snf "/usr/share/zoneinfo/$TZ" /etc/localtime && echo "$TZ" > /etc/timezone
|
|
|
|
dpkg -i package_folder/clickhouse-common-static_*.deb;
|
|
dpkg -i package_folder/clickhouse-common-static-dbg_*.deb
|
|
dpkg -i package_folder/clickhouse-server_*.deb
|
|
dpkg -i package_folder/clickhouse-client_*.deb
|
|
dpkg -i package_folder/clickhouse-test_*.deb
|
|
|
|
# install test configs
|
|
/usr/share/clickhouse-test/config/install.sh
|
|
|
|
function start()
|
|
{
|
|
if [[ -n "$USE_DATABASE_REPLICATED" ]] && [[ "$USE_DATABASE_REPLICATED" -eq 1 ]]; then
|
|
# NOTE We run "clickhouse server" instead of "clickhouse-server"
|
|
# to make "pidof clickhouse-server" return single pid of the main instance.
|
|
# We wil run main instance using "service clickhouse-server start"
|
|
sudo -E -u clickhouse /usr/bin/clickhouse server --config /etc/clickhouse-server1/config.xml --daemon \
|
|
-- --path /var/lib/clickhouse1/ --logger.stderr /var/log/clickhouse-server/stderr1.log \
|
|
--logger.log /var/log/clickhouse-server/clickhouse-server1.log --logger.errorlog /var/log/clickhouse-server/clickhouse-server1.err.log \
|
|
--tcp_port 19000 --tcp_port_secure 19440 --http_port 18123 --https_port 18443 --interserver_http_port 19009 --tcp_with_proxy_port 19010 \
|
|
--mysql_port 19004 --postgresql_port 19005 \
|
|
--keeper_server.tcp_port 19181 --keeper_server.server_id 2
|
|
|
|
sudo -E -u clickhouse /usr/bin/clickhouse server --config /etc/clickhouse-server2/config.xml --daemon \
|
|
-- --path /var/lib/clickhouse2/ --logger.stderr /var/log/clickhouse-server/stderr2.log \
|
|
--logger.log /var/log/clickhouse-server/clickhouse-server2.log --logger.errorlog /var/log/clickhouse-server/clickhouse-server2.err.log \
|
|
--tcp_port 29000 --tcp_port_secure 29440 --http_port 28123 --https_port 28443 --interserver_http_port 29009 --tcp_with_proxy_port 29010 \
|
|
--mysql_port 29004 --postgresql_port 29005 \
|
|
--keeper_server.tcp_port 29181 --keeper_server.server_id 3
|
|
fi
|
|
|
|
counter=0
|
|
until clickhouse-client --query "SELECT 1"
|
|
do
|
|
if [ "$counter" -gt 120 ]
|
|
then
|
|
echo "Cannot start clickhouse-server"
|
|
cat /var/log/clickhouse-server/stdout.log
|
|
tail -n1000 /var/log/clickhouse-server/stderr.log
|
|
tail -n1000 /var/log/clickhouse-server/clickhouse-server.log
|
|
break
|
|
fi
|
|
timeout 120 service clickhouse-server start
|
|
sleep 0.5
|
|
counter=$((counter + 1))
|
|
done
|
|
}
|
|
|
|
start
|
|
# shellcheck disable=SC2086 # No quotes because I want to split it into words.
|
|
/s3downloader --url-prefix "$S3_URL" --dataset-names $DATASETS
|
|
chmod 777 -R /var/lib/clickhouse
|
|
clickhouse-client --query "SHOW DATABASES"
|
|
|
|
clickhouse-client --query "ATTACH DATABASE datasets ENGINE = Ordinary"
|
|
service clickhouse-server restart
|
|
|
|
# Wait for server to start accepting connections
|
|
for _ in {1..120}; do
|
|
clickhouse-client --query "SELECT 1" && break
|
|
sleep 1
|
|
done
|
|
|
|
clickhouse-client --query "SHOW TABLES FROM datasets"
|
|
|
|
if [[ -n "$USE_DATABASE_REPLICATED" ]] && [[ "$USE_DATABASE_REPLICATED" -eq 1 ]]; then
|
|
clickhouse-client --query "CREATE DATABASE test ON CLUSTER 'test_cluster_database_replicated'
|
|
ENGINE=Replicated('/test/clickhouse/db/test', '{shard}', '{replica}')"
|
|
|
|
clickhouse-client --query "CREATE TABLE test.hits AS datasets.hits_v1"
|
|
clickhouse-client --query "CREATE TABLE test.visits AS datasets.visits_v1"
|
|
|
|
clickhouse-client --query "INSERT INTO test.hits SELECT * FROM datasets.hits_v1"
|
|
clickhouse-client --query "INSERT INTO test.visits SELECT * FROM datasets.visits_v1"
|
|
|
|
clickhouse-client --query "DROP TABLE datasets.hits_v1"
|
|
clickhouse-client --query "DROP TABLE datasets.visits_v1"
|
|
|
|
MAX_RUN_TIME=$((MAX_RUN_TIME < 9000 ? MAX_RUN_TIME : 9000)) # min(MAX_RUN_TIME, 2.5 hours)
|
|
MAX_RUN_TIME=$((MAX_RUN_TIME != 0 ? MAX_RUN_TIME : 9000)) # set to 2.5 hours if 0 (unlimited)
|
|
else
|
|
clickhouse-client --query "CREATE DATABASE test"
|
|
clickhouse-client --query "SHOW TABLES FROM test"
|
|
clickhouse-client --query "RENAME TABLE datasets.hits_v1 TO test.hits"
|
|
clickhouse-client --query "RENAME TABLE datasets.visits_v1 TO test.visits"
|
|
fi
|
|
|
|
clickhouse-client --query "SHOW TABLES FROM test"
|
|
clickhouse-client --query "SELECT count() FROM test.hits"
|
|
clickhouse-client --query "SELECT count() FROM test.visits"
|
|
|
|
function run_tests()
|
|
{
|
|
set -x
|
|
# We can have several additional options so we path them as array because it's
|
|
# more idiologically correct.
|
|
read -ra ADDITIONAL_OPTIONS <<< "${ADDITIONAL_OPTIONS:-}"
|
|
|
|
if [[ -n "$USE_DATABASE_REPLICATED" ]] && [[ "$USE_DATABASE_REPLICATED" -eq 1 ]]; then
|
|
ADDITIONAL_OPTIONS+=('--replicated-database')
|
|
fi
|
|
|
|
set +e
|
|
clickhouse-test --testname --shard --zookeeper --check-zookeeper-session --no-stateless --hung-check --print-time "${ADDITIONAL_OPTIONS[@]}" \
|
|
"$SKIP_TESTS_OPTION" 2>&1 | ts '%Y-%m-%d %H:%M:%S' | tee test_output/test_result.txt
|
|
set -e
|
|
}
|
|
|
|
export -f run_tests
|
|
timeout "$MAX_RUN_TIME" bash -c run_tests ||:
|
|
|
|
./process_functional_tests_result.py || echo -e "failure\tCannot parse results" > /test_output/check_status.tsv
|
|
|
|
grep -Fa "Fatal" /var/log/clickhouse-server/clickhouse-server.log ||:
|
|
|
|
pigz < /var/log/clickhouse-server/clickhouse-server.log > /test_output/clickhouse-server.log.gz ||:
|
|
# FIXME: remove once only github actions will be left
|
|
rm /var/log/clickhouse-server/clickhouse-server.log
|
|
mv /var/log/clickhouse-server/stderr.log /test_output/ ||:
|
|
|
|
if [[ -n "$WITH_COVERAGE" ]] && [[ "$WITH_COVERAGE" -eq 1 ]]; then
|
|
tar -chf /test_output/clickhouse_coverage.tar.gz /profraw ||:
|
|
fi
|
|
if [[ -n "$USE_DATABASE_REPLICATED" ]] && [[ "$USE_DATABASE_REPLICATED" -eq 1 ]]; then
|
|
grep -Fa "Fatal" /var/log/clickhouse-server/clickhouse-server1.log ||:
|
|
grep -Fa "Fatal" /var/log/clickhouse-server/clickhouse-server2.log ||:
|
|
pigz < /var/log/clickhouse-server/clickhouse-server1.log > /test_output/clickhouse-server1.log.gz ||:
|
|
pigz < /var/log/clickhouse-server/clickhouse-server2.log > /test_output/clickhouse-server2.log.gz ||:
|
|
# FIXME: remove once only github actions will be left
|
|
rm /var/log/clickhouse-server/clickhouse-server1.log
|
|
rm /var/log/clickhouse-server/clickhouse-server2.log
|
|
mv /var/log/clickhouse-server/stderr1.log /test_output/ ||:
|
|
mv /var/log/clickhouse-server/stderr2.log /test_output/ ||:
|
|
fi
|