Merge pull request #35865 from azat/clickhouse-test-left-queries

clickhouse-test: fix left-queries-check, to fix test log parser
This commit is contained in:
alesapin 2022-04-22 15:59:38 +02:00 committed by GitHub
commit 18d094d79d
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
9 changed files with 81 additions and 64 deletions

View File

@ -92,6 +92,8 @@ function run_tests()
if [[ -n "$USE_DATABASE_REPLICATED" ]] && [[ "$USE_DATABASE_REPLICATED" -eq 1 ]]; then if [[ -n "$USE_DATABASE_REPLICATED" ]] && [[ "$USE_DATABASE_REPLICATED" -eq 1 ]]; then
ADDITIONAL_OPTIONS+=('--replicated-database') ADDITIONAL_OPTIONS+=('--replicated-database')
# Cannot be used with replicated database, due to distributed_ddl_output_mode=none
ADDITIONAL_OPTIONS+=('--no-left-queries-check')
ADDITIONAL_OPTIONS+=('--jobs') ADDITIONAL_OPTIONS+=('--jobs')
ADDITIONAL_OPTIONS+=('2') ADDITIONAL_OPTIONS+=('2')
else else

View File

@ -356,6 +356,7 @@ class FailureReason(enum.Enum):
RESULT_DIFF = "result differs with reference: " RESULT_DIFF = "result differs with reference: "
TOO_LONG = "Test runs too long (> 60s). Make it faster." TOO_LONG = "Test runs too long (> 60s). Make it faster."
INTERNAL_QUERY_FAIL = "Internal query (CREATE/DROP DATABASE) failed:" INTERNAL_QUERY_FAIL = "Internal query (CREATE/DROP DATABASE) failed:"
LEFT_QUERIES = "Queries left in background after the test finished:"
# SKIPPED reasons # SKIPPED reasons
DISABLED = "disabled" DISABLED = "disabled"
@ -671,6 +672,15 @@ class TestCase:
# We're in Flaky Check mode, check the run time as well while we're at it. # We're in Flaky Check mode, check the run time as well while we're at it.
return TestResult(self.name, TestStatus.FAIL, FailureReason.TOO_LONG, total_time, description) return TestResult(self.name, TestStatus.FAIL, FailureReason.TOO_LONG, total_time, description)
left_queries_check = args.no_left_queries_check is False
if self.tags and 'no-left-queries-check' in self.tags:
left_queries_check = False
if left_queries_check:
processlist = get_processlist_after_test(self.testcase_args)
if processlist:
description += "\n{}\n".format(json.dumps(processlist, indent=4))
return TestResult(self.name, TestStatus.FAIL, FailureReason.LEFT_QUERIES, total_time, description)
if os.path.exists(self.stdout_file): if os.path.exists(self.stdout_file):
os.remove(self.stdout_file) os.remove(self.stdout_file)
if os.path.exists(self.stderr_file): if os.path.exists(self.stderr_file):
@ -747,16 +757,6 @@ class TestCase:
proc.stdout is None or 'Exception' not in proc.stdout) proc.stdout is None or 'Exception' not in proc.stdout)
need_drop_database = not maybe_passed need_drop_database = not maybe_passed
left_queries_check = args.no_left_queries_check is False
if self.tags and 'no-left-queries-check' in self.tags:
left_queries_check = False
if left_queries_check:
processlist = get_processlist_after_test(args)
if processlist:
print(colored(f"\nFound queries left in processlist after running {args.testcase_basename} (database={database}):", args, "red", attrs=["bold"]))
print(json.dumps(processlist, indent=4))
exit_code.value = 1
if need_drop_database: if need_drop_database:
seconds_left = max(args.timeout - (datetime.now() - start_time).total_seconds(), 20) seconds_left = max(args.timeout - (datetime.now() - start_time).total_seconds(), 20)
try: try:

View File

@ -15,3 +15,5 @@ while [[ $i -lt $retries ]]; do
timeout 1.8s ${CLICKHOUSE_CURL} -sS "${CLICKHOUSE_URL}&max_distributed_connections=2&max_threads=1" -d "$query" && break timeout 1.8s ${CLICKHOUSE_CURL} -sS "${CLICKHOUSE_URL}&max_distributed_connections=2&max_threads=1" -d "$query" && break
((++i)) ((++i))
done done
clickhouse_test_wait_queries 60

View File

@ -10,46 +10,40 @@ CURDIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)
. "$CURDIR"/../shell_config.sh . "$CURDIR"/../shell_config.sh
function thread_create { function thread_create()
while true; do {
$CLICKHOUSE_CLIENT --query "CREATE TABLE IF NOT EXISTS $1 (x UInt64, s Array(Nullable(String))) ENGINE = $2" $CLICKHOUSE_CLIENT --query "CREATE TABLE IF NOT EXISTS $1 (x UInt64, s Array(Nullable(String))) ENGINE = $2"
sleep 0.0$RANDOM sleep 0.0$RANDOM
done
} }
function thread_drop { function thread_drop()
while true; do {
$CLICKHOUSE_CLIENT --query "DROP TABLE IF EXISTS $1" $CLICKHOUSE_CLIENT --query "DROP TABLE IF EXISTS $1"
sleep 0.0$RANDOM sleep 0.0$RANDOM
done
} }
function thread_rename { function thread_rename()
while true; do {
$CLICKHOUSE_CLIENT --query "RENAME TABLE $1 TO $2" 2>&1 | grep -v -e 'Received exception from server' -e '^(query: ' | grep -v -P 'Code: (60|57)' $CLICKHOUSE_CLIENT --query "RENAME TABLE $1 TO $2" 2>&1 | grep -v -e 'Received exception from server' -e '^(query: ' | grep -v -P 'Code: (60|57)'
sleep 0.0$RANDOM sleep 0.0$RANDOM
done
} }
function thread_select { function thread_select()
while true; do {
$CLICKHOUSE_CLIENT --query "SELECT * FROM $1 FORMAT Null" 2>&1 | grep -v -e 'Received exception from server' -e '^(query: ' | grep -v -P 'Code: (60|218)' $CLICKHOUSE_CLIENT --query "SELECT * FROM $1 FORMAT Null" 2>&1 | grep -v -e 'Received exception from server' -e '^(query: ' | grep -v -P 'Code: (60|218)'
sleep 0.0$RANDOM sleep 0.0$RANDOM
done
} }
function thread_insert { function thread_insert()
while true; do {
$CLICKHOUSE_CLIENT --query "INSERT INTO $1 SELECT rand64(1), [toString(rand64(2))] FROM numbers($2)" 2>&1 | grep -v -e 'Received exception from server' -e '^(query: '| grep -v -P 'Code: (60|218)' $CLICKHOUSE_CLIENT --query "INSERT INTO $1 SELECT rand64(1), [toString(rand64(2))] FROM numbers($2)" 2>&1 | grep -v -e 'Received exception from server' -e '^(query: '| grep -v -P 'Code: (60|218)'
sleep 0.0$RANDOM sleep 0.0$RANDOM
done
} }
function thread_insert_select { function thread_insert_select()
while true; do {
$CLICKHOUSE_CLIENT --query "INSERT INTO $1 SELECT * FROM $2" 2>&1 | grep -v -e 'Received exception from server' -e '^(query: ' | grep -v -P 'Code: (60|218)' $CLICKHOUSE_CLIENT --query "INSERT INTO $1 SELECT * FROM $2" 2>&1 | grep -v -e 'Received exception from server' -e '^(query: ' | grep -v -P 'Code: (60|218)'
sleep 0.0$RANDOM sleep 0.0$RANDOM
done
} }
export -f thread_create export -f thread_create
@ -65,18 +59,18 @@ export -f thread_insert_select
function test_with_engine { function test_with_engine {
echo "Testing $1" echo "Testing $1"
timeout 10 bash -c "thread_create t1 $1" & clickhouse_client_loop_timeout 10 thread_create t1 $1 &
timeout 10 bash -c "thread_create t2 $1" & clickhouse_client_loop_timeout 10 thread_create t2 $1 &
timeout 10 bash -c 'thread_drop t1' & clickhouse_client_loop_timeout 10 thread_drop t1 &
timeout 10 bash -c 'thread_drop t2' & clickhouse_client_loop_timeout 10 thread_drop t2 &
timeout 10 bash -c 'thread_rename t1 t2' & clickhouse_client_loop_timeout 10 thread_rename t1 t2 &
timeout 10 bash -c 'thread_rename t2 t1' & clickhouse_client_loop_timeout 10 thread_rename t2 t1 &
timeout 10 bash -c 'thread_select t1' & clickhouse_client_loop_timeout 10 thread_select t1 &
timeout 10 bash -c 'thread_select t2' & clickhouse_client_loop_timeout 10 thread_select t2 &
timeout 10 bash -c 'thread_insert t1 5' & clickhouse_client_loop_timeout 10 thread_insert t1 5 &
timeout 10 bash -c 'thread_insert t2 10' & clickhouse_client_loop_timeout 10 thread_insert t2 10 &
timeout 10 bash -c 'thread_insert_select t1 t2' & clickhouse_client_loop_timeout 10 thread_insert_select t1 t2 &
timeout 10 bash -c 'thread_insert_select t2 t1' & clickhouse_client_loop_timeout 10 thread_insert_select t2 t1 &
wait wait
echo "Done $1" echo "Done $1"

View File

@ -17,3 +17,9 @@ while [[ $counter -lt $retries ]]; do
done done
echo 'Ok' echo 'Ok'
# wait queries, since there is 'Maximum parse depth' error on the client
# and in this case it simply reset the connection and don't read everything
# from server, so there is no guarantee that the query is stopped when the
# client returns
clickhouse_test_wait_queries 60

View File

@ -17,3 +17,9 @@ done
#echo "I = ${I}" #echo "I = ${I}"
echo 'Ok' echo 'Ok'
# wait queries, since there is 'Maximum parse depth' error on the client
# and in this case it simply reset the connection and don't read everything
# from server, so there is no guarantee that the query is stopped when the
# client returns
clickhouse_test_wait_queries 60

View File

@ -8,3 +8,5 @@ CURDIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)
# cancellation with async_socket_for_remote=1 (that ignores # cancellation with async_socket_for_remote=1 (that ignores
# max_distributed_connections) # max_distributed_connections)
timeout --signal=SIGINT 1 ${CLICKHOUSE_CLIENT} --max_distributed_connections=1 --max_block_size=2 --interactive_delay=900000 -q "select number + sleep(0.3) as x from remote('127.{2,3}', system.numbers) settings max_block_size = 2" 2>&1 | grep "Empty task was returned from async task queue" || true timeout --signal=SIGINT 1 ${CLICKHOUSE_CLIENT} --max_distributed_connections=1 --max_block_size=2 --interactive_delay=900000 -q "select number + sleep(0.3) as x from remote('127.{2,3}', system.numbers) settings max_block_size = 2" 2>&1 | grep "Empty task was returned from async task queue" || true
clickhouse_test_wait_queries 60

View File

@ -5,21 +5,19 @@ CURDIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)
# shellcheck source=../shell_config.sh # shellcheck source=../shell_config.sh
. "$CURDIR"/../shell_config.sh . "$CURDIR"/../shell_config.sh
$CLICKHOUSE_CLIENT -q 'CREATE USER IF NOT EXISTS u1 IDENTIFIED WITH no_password' $CLICKHOUSE_CLIENT -q 'DROP USER IF EXISTS u02104'
$CLICKHOUSE_CLIENT -q 'GRANT ALL ON *.* TO u1' $CLICKHOUSE_CLIENT -q 'CREATE USER IF NOT EXISTS u02104 IDENTIFIED WITH no_password'
$CLICKHOUSE_CLIENT -q 'GRANT ALL ON *.* TO u02104'
function overcommited() function overcommited()
{ {
while true; do $CLICKHOUSE_CLIENT -u u02104 -q 'SELECT number FROM numbers(130000) GROUP BY number SETTINGS max_guaranteed_memory_usage=1,memory_usage_overcommit_max_wait_microseconds=500' 2>&1 \
$CLICKHOUSE_CLIENT -u u1 -q 'SELECT number FROM numbers(130000) GROUP BY number SETTINGS max_guaranteed_memory_usage=1,memory_usage_overcommit_max_wait_microseconds=500' 2>&1 | grep -F -q "MEMORY_LIMIT_EXCEEDED" && echo "OVERCOMMITED WITH USER LIMIT IS KILLED" | grep -F -q "MEMORY_LIMIT_EXCEEDED" && echo "OVERCOMMITED WITH USER LIMIT IS KILLED"
done
} }
function expect_execution() function expect_execution()
{ {
while true; do $CLICKHOUSE_CLIENT -u u02104 -q 'SELECT number FROM numbers(130000) GROUP BY number SETTINGS max_memory_usage_for_user=5000000,max_guaranteed_memory_usage=2,memory_usage_overcommit_max_wait_microseconds=500' >/dev/null 2>/dev/null
$CLICKHOUSE_CLIENT -u u1 -q 'SELECT number FROM numbers(130000) GROUP BY number SETTINGS max_memory_usage_for_user=5000000,max_guaranteed_memory_usage=2,memory_usage_overcommit_max_wait_microseconds=500' >/dev/null 2>/dev/null
done
} }
export -f overcommited export -f overcommited
@ -29,9 +27,9 @@ function user_test()
{ {
for _ in {1..10}; for _ in {1..10};
do do
timeout 10 bash -c overcommited & clickhouse_client_loop_timeout 10 overcommited &
timeout 10 bash -c expect_execution & clickhouse_client_loop_timeout 10 expect_execution &
done; done
wait wait
} }
@ -45,4 +43,4 @@ else
echo "OVERCOMMITED WITH USER LIMIT WAS KILLED" echo "OVERCOMMITED WITH USER LIMIT WAS KILLED"
fi fi
$CLICKHOUSE_CLIENT -q 'DROP USER IF EXISTS u1' $CLICKHOUSE_CLIENT -q 'DROP USER IF EXISTS u02104'

View File

@ -1,4 +1,5 @@
#!/usr/bin/env bash #!/usr/bin/env bash
# Tags: long
CURDIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd) CURDIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)
# shellcheck source=../shell_config.sh # shellcheck source=../shell_config.sh
@ -25,3 +26,9 @@ timeout 0.15s ${CLICKHOUSE_CURL} -sS -F "s=@$tmp_file;" "${CLICKHOUSE_URL}&s_str
echo $? echo $?
timeout 0.15s ${CLICKHOUSE_CURL} -sS -F "s=@$tmp_file;" "${CLICKHOUSE_URL}&s_structure=key+Int&query=SELECT+dummy+IN+s&input_format_parallel_parsing=false" -o /dev/null timeout 0.15s ${CLICKHOUSE_CURL} -sS -F "s=@$tmp_file;" "${CLICKHOUSE_URL}&s_structure=key+Int&query=SELECT+dummy+IN+s&input_format_parallel_parsing=false" -o /dev/null
echo $? echo $?
# wait until the query above will start,
# so that clickhouse_test_wait_queries will see them.
sleep 5
clickhouse_test_wait_queries 60