Revert some changes

This commit is contained in:
Alexey Milovidov 2020-08-01 16:25:59 +03:00
parent 0c33498df1
commit 29a296b5da
21 changed files with 28 additions and 28 deletions

View File

@ -18,9 +18,9 @@ ${CLICKHOUSE_CLIENT} --query="INSERT INTO test_optimize_exception_replicated VAL
${CLICKHOUSE_CLIENT} --optimize_throw_if_noop 1 --query="OPTIMIZE TABLE test_optimize_exception PARTITION 201709 FINAL"
${CLICKHOUSE_CLIENT} --optimize_throw_if_noop 1 --query="OPTIMIZE TABLE test_optimize_exception_replicated PARTITION 201709 FINAL"
echo "$("${CLICKHOUSE_CLIENT}" --optimize_throw_if_noop 1 --server_logs_file=/dev/null --query="OPTIMIZE TABLE test_optimize_exception PARTITION 201710" 2>&1)" \
echo "$(${CLICKHOUSE_CLIENT} --optimize_throw_if_noop 1 --server_logs_file=/dev/null --query="OPTIMIZE TABLE test_optimize_exception PARTITION 201710" 2>&1)" \
| grep -c 'Code: 388. DB::Exception: .* DB::Exception: .* Cannot select parts for optimization'
echo "$("${CLICKHOUSE_CLIENT}" --optimize_throw_if_noop 1 --server_logs_file=/dev/null --query="OPTIMIZE TABLE test_optimize_exception_replicated PARTITION 201710" 2>&1)" \
echo "$(${CLICKHOUSE_CLIENT} --optimize_throw_if_noop 1 --server_logs_file=/dev/null --query="OPTIMIZE TABLE test_optimize_exception_replicated PARTITION 201710" 2>&1)" \
| grep -c 'Code: 388. DB::Exception: .* DB::Exception:.* Cannot select parts for optimization'
${CLICKHOUSE_CLIENT} --query="DROP TABLE test_optimize_exception NO DELAY"

View File

@ -3,4 +3,4 @@
CURDIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)
. "$CURDIR"/../shell_config.sh
env TZ=UTC "$CLICKHOUSE_CLIENT" --use_client_time_zone=1 --query="SELECT toDateTime(1000000000)"
env TZ=UTC ${CLICKHOUSE_CLIENT} --use_client_time_zone=1 --query="SELECT toDateTime(1000000000)"

View File

@ -10,7 +10,7 @@ ${CLICKHOUSE_CLIENT} --query "DROP TABLE IF EXISTS test_00575;"
${CLICKHOUSE_CLIENT} --query "CREATE TABLE test_00575 (dt Date DEFAULT now(), id UInt32, id2 UInt32 DEFAULT id + 1) ENGINE = MergeTree(dt, dt, 8192);"
${CLICKHOUSE_CLIENT} --query "INSERT INTO test_00575(dt,id) VALUES ('2018-02-22',3), ('2018-02-22',4), ('2018-02-22',5);"
${CLICKHOUSE_CLIENT} --query "SELECT * FROM test_00575 ORDER BY id;"
echo "$("${CLICKHOUSE_CLIENT}" --query "ALTER TABLE test_00575 DROP COLUMN id;" --server_logs_file=/dev/null 2>&1 | grep -c "$exception_pattern")"
echo "$(${CLICKHOUSE_CLIENT} --query "ALTER TABLE test_00575 DROP COLUMN id;" --server_logs_file=/dev/null 2>&1 | grep -c "$exception_pattern")"
${CLICKHOUSE_CLIENT} --query "ALTER TABLE test_00575 DROP COLUMN id2;"
${CLICKHOUSE_CLIENT} --query "SELECT * FROM test_00575 ORDER BY id;"
${CLICKHOUSE_CLIENT} --query "ALTER TABLE test_00575 DROP COLUMN id;"

View File

@ -15,7 +15,7 @@ ${CLICKHOUSE_CLIENT} --query "INSERT INTO test_truncate.test_view_depend VALUES(
${CLICKHOUSE_CLIENT} --query "SELECT * FROM test_truncate.test_view;"
${CLICKHOUSE_CLIENT} --query "SELECT '========Execute Truncate========';"
echo "$("${CLICKHOUSE_CLIENT}" --query "TRUNCATE TABLE test_truncate.test_view;" --server_logs_file=/dev/null 2>&1 | grep -c "Code: 48.*Truncate is not supported by storage View")"
echo "$(${CLICKHOUSE_CLIENT} --query "TRUNCATE TABLE test_truncate.test_view;" --server_logs_file=/dev/null 2>&1 | grep -c "Code: 48.*Truncate is not supported by storage View")"
${CLICKHOUSE_CLIENT} --query "SELECT '========After Truncate========';"
${CLICKHOUSE_CLIENT} --query "SELECT * FROM test_truncate.test_view;"

View File

@ -43,7 +43,7 @@ ${CLICKHOUSE_CLIENT} --query="INSERT INTO unsigned_integer_test_table VALUES (0)
${CLICKHOUSE_CLIENT} --query="INSERT INTO enum_test_table VALUES ('hello'), ('world'), ('world'), ('yandex'), ('clickhouse'), ('clickhouse');"
${CLICKHOUSE_CLIENT} --query="INSERT INTO date_test_table VALUES (1), (2), (2), (256), (257), (257);"
CLICKHOUSE_CLIENT=$(echo "${CLICKHOUSE_CLIENT}" | sed 's/'"--send_logs_level=${CLICKHOUSE_CLIENT_SERVER_LOGS_LEVEL}"'/--send_logs_level=debug/g')
CLICKHOUSE_CLIENT=$(echo ${CLICKHOUSE_CLIENT} | sed 's/'"--send_logs_level=${CLICKHOUSE_CLIENT_SERVER_LOGS_LEVEL}"'/--send_logs_level=debug/g')
${CLICKHOUSE_CLIENT} --query="SELECT count() FROM string_test_table WHERE toUInt64(val) == 0;" 2>&1 |grep -q "3 marks to read from 1 ranges" && echo "no monotonic int case: String -> UInt64"
${CLICKHOUSE_CLIENT} --query="SELECT count() FROM fixed_string_test_table WHERE toUInt64(val) == 0;" 2>&1 |grep -q "3 marks to read from 1 ranges" && echo "no monotonic int case: FixedString -> UInt64"
@ -78,7 +78,7 @@ ${CLICKHOUSE_CLIENT} --query="SELECT count() FROM date_test_table WHERE toUInt16
${CLICKHOUSE_CLIENT} --query="SELECT count() FROM date_test_table WHERE toInt8(val) == 1;" 2>&1 |grep -q "5 marks to read from" && echo "monotonic int case: Date -> Int8"
${CLICKHOUSE_CLIENT} --query="SELECT count() FROM date_test_table WHERE toUInt8(val) == 1;" 2>&1 |grep -q "5 marks to read from" && echo "monotonic int case: Date -> UInt8"
CLICKHOUSE_CLIENT=$(echo "${CLICKHOUSE_CLIENT}" | sed 's/--send_logs_level=debug/'"--send_logs_level=${CLICKHOUSE_CLIENT_SERVER_LOGS_LEVEL}"'/g')
CLICKHOUSE_CLIENT=$(echo ${CLICKHOUSE_CLIENT} | sed 's/--send_logs_level=debug/'"--send_logs_level=${CLICKHOUSE_CLIENT_SERVER_LOGS_LEVEL}"'/g')
${CLICKHOUSE_CLIENT} --query="DROP TABLE IF EXISTS string_test_table;"
${CLICKHOUSE_CLIENT} --query="DROP TABLE IF EXISTS fixed_string_test_table;"

View File

@ -22,7 +22,7 @@ ${CLICKHOUSE_CLIENT} --query "SELECT TOP 2 * FROM test_00687 ORDER BY val;"
${CLICKHOUSE_CLIENT} --query "SELECT TOP (2) * FROM test_00687 ORDER BY val;"
${CLICKHOUSE_CLIENT} --query "SELECT * FROM test_00687 ORDER BY val LIMIT 2 OFFSET 2;"
echo "$("${CLICKHOUSE_CLIENT}" --query "SELECT TOP 2 * FROM test_00687 ORDER BY val LIMIT 2;" 2>&1 | grep -c "Code: 406")"
echo "$("${CLICKHOUSE_CLIENT}" --query "SELECT * FROM test_00687 ORDER BY val LIMIT 2,3 OFFSET 2;" 2>&1 | grep -c "Code: 62")"
echo "$(${CLICKHOUSE_CLIENT} --query "SELECT TOP 2 * FROM test_00687 ORDER BY val LIMIT 2;" 2>&1 | grep -c "Code: 406")"
echo "$(${CLICKHOUSE_CLIENT} --query "SELECT * FROM test_00687 ORDER BY val LIMIT 2,3 OFFSET 2;" 2>&1 | grep -c "Code: 62")"
${CLICKHOUSE_CLIENT} --query "DROP TABLE test_00687;"

View File

@ -34,7 +34,7 @@ $CLICKHOUSE_CLIENT -q "KILL QUERY WHERE query='$query_to_kill' ASYNC" &>/dev/nul
sleep 1
# Kill $query_for_pending SYNC. This query is not blocker, so it should be killed fast.
timeout 20 "$CLICKHOUSE_CLIENT" -q "KILL QUERY WHERE query='$query_for_pending' SYNC" &>/dev/null
timeout 20 ${CLICKHOUSE_CLIENT} -q "KILL QUERY WHERE query='$query_for_pending' SYNC" &>/dev/null
# Both queries have to be killed, doesn't matter with SYNC or ASYNC kill
for _ in {1..15}

View File

@ -3,7 +3,7 @@
CURDIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)
. "$CURDIR"/../shell_config.sh
CLICKHOUSE_CLIENT=$(echo "${CLICKHOUSE_CLIENT}" | sed 's/'"--send_logs_level=${CLICKHOUSE_CLIENT_SERVER_LOGS_LEVEL}"'/--send_logs_level=none/g')
CLICKHOUSE_CLIENT=$(echo ${CLICKHOUSE_CLIENT} | sed 's/'"--send_logs_level=${CLICKHOUSE_CLIENT_SERVER_LOGS_LEVEL}"'/--send_logs_level=none/g')
$CLICKHOUSE_CLIENT --query="DROP TABLE IF EXISTS check;"

View File

@ -18,7 +18,7 @@ ${CLICKHOUSE_CLIENT} -n -q "
INSERT INTO $R1 VALUES (1)
"
timeout 10s "${CLICKHOUSE_CLIENT}" -n -q "
timeout 10s ${CLICKHOUSE_CLIENT} -n -q "
SET receive_timeout=1;
SYSTEM SYNC REPLICA $R2
" 2>&1 | grep -F -q "Code: 159. DB::Exception" && echo 'OK' || echo 'Failed!'

View File

@ -3,7 +3,7 @@
CURDIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)
. "$CURDIR"/../shell_config.sh
CLICKHOUSE_CLIENT=$(echo "${CLICKHOUSE_CLIENT}" | sed 's/'"--send_logs_level=${CLICKHOUSE_CLIENT_SERVER_LOGS_LEVEL}"'/--send_logs_level=none/g')
CLICKHOUSE_CLIENT=$(echo ${CLICKHOUSE_CLIENT} | sed 's/'"--send_logs_level=${CLICKHOUSE_CLIENT_SERVER_LOGS_LEVEL}"'/--send_logs_level=none/g')
$CLICKHOUSE_CLIENT --query="SELECT * FROM (SELECT number % 5 AS a, count() AS b, c FROM numbers(10) ARRAY JOIN [1,2] AS c GROUP BY a,c) AS table ORDER BY a LIMIT 3 WITH TIES BY a" 2>&1 | grep -q "Code: 498." && echo 'OK' || echo 'FAIL' ||:

View File

@ -7,5 +7,5 @@ CUR_DIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)
${CLICKHOUSE_CLIENT} --query="SELECT avgWeighted(x, weight) FROM (SELECT t.1 AS x, t.2 AS weight FROM (SELECT arrayJoin([(1, 5), (2, 4), (3, 3), (4, 2), (5, 1)]) AS t));"
${CLICKHOUSE_CLIENT} --query="SELECT avgWeighted(x, weight) FROM (SELECT t.1 AS x, t.2 AS weight FROM (SELECT arrayJoin([(1, 0), (2, 0), (3, 0), (4, 0), (5, 0)]) AS t));"
echo "$("${CLICKHOUSE_CLIENT}" --server_logs_file=/dev/null --query="SELECT avgWeighted(toDecimal64(0, 0), toFloat64(0))" 2>&1)" \
echo "$(${CLICKHOUSE_CLIENT} --server_logs_file=/dev/null --query="SELECT avgWeighted(toDecimal64(0, 0), toFloat64(0))" 2>&1)" \
| grep -c 'Code: 43. DB::Exception: .* DB::Exception:.* Different types .* of arguments for aggregate function avgWeighted'

View File

@ -29,7 +29,7 @@ $CLICKHOUSE_CLIENT --query "SELECT '12 -> ', dictGetInt64('dictdb.dict', 'y', to
$CLICKHOUSE_CLIENT --query "INSERT INTO dictdb.table VALUES (13, 103, now())"
$CLICKHOUSE_CLIENT --query "INSERT INTO dictdb.table VALUES (14, 104, now() - INTERVAL 1 DAY)"
while [ "$("$CLICKHOUSE_CLIENT" --query "SELECT dictGetInt64('dictdb.dict', 'y', toUInt64(13))")" = -1 ]
while [ "$(${CLICKHOUSE_CLIENT} --query "SELECT dictGetInt64('dictdb.dict', 'y', toUInt64(13))")" = -1 ]
do
sleep 0.5
done

View File

@ -96,7 +96,7 @@ done
echo "Equal number of columns"
# This alter will finish all previous, but replica 1 maybe still not up-to-date
while [[ $(timeout 120 "$CLICKHOUSE_CLIENT" --query "ALTER TABLE concurrent_alter_add_drop_1 MODIFY COLUMN value0 String SETTINGS replication_alter_partitions_sync=2" 2>&1) ]]; do
while [[ $(timeout 120 ${CLICKHOUSE_CLIENT} --query "ALTER TABLE concurrent_alter_add_drop_1 MODIFY COLUMN value0 String SETTINGS replication_alter_partitions_sync=2" 2>&1) ]]; do
sleep 1
done

View File

@ -90,7 +90,7 @@ for i in $(seq $REPLICAS); do
done
# This alter will finish all previous, but replica 1 maybe still not up-to-date
while [[ $(timeout 120 "$CLICKHOUSE_CLIENT" --query "ALTER TABLE concurrent_alter_detach_1 MODIFY COLUMN value1 String SETTINGS replication_alter_partitions_sync=2" 2>&1) ]]; do
while [[ $(timeout 120 ${CLICKHOUSE_CLIENT} --query "ALTER TABLE concurrent_alter_detach_1 MODIFY COLUMN value1 String SETTINGS replication_alter_partitions_sync=2" 2>&1) ]]; do
sleep 1
# just try to attach table if it failed for some reason in the code above
for i in $(seq $REPLICAS); do

View File

@ -107,7 +107,7 @@ echo "Finishing alters"
#
# 120 seconds is more than enough, but in rare cases for slow builds (debug,
# thread) it maybe necessary.
while [[ $(timeout 120 "$CLICKHOUSE_CLIENT" --query "ALTER TABLE concurrent_alter_mt_1 MODIFY COLUMN value1 String SETTINGS replication_alter_partitions_sync=2" 2>&1) ]]; do
while [[ $(timeout 120 ${CLICKHOUSE_CLIENT} --query "ALTER TABLE concurrent_alter_mt_1 MODIFY COLUMN value1 String SETTINGS replication_alter_partitions_sync=2" 2>&1) ]]; do
sleep 1
done

View File

@ -10,4 +10,4 @@ opts=(
)
# 1.8 less then 2 seconds, but long enough to cover possible load peaks
# "$@" left to pass manual options (like --experimental_use_processors 0) during manual testing
timeout 1.8s "$CLICKHOUSE_CLIENT" "${opts[@]}" "$@"
timeout 1.8s ${CLICKHOUSE_CLIENT} "${opts[@]}" "$@"

View File

@ -5,4 +5,4 @@ CURDIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)
query="SELECT sleepEachRow(1) FROM remote('127.{2,3}', system.one)"
# 1.8 less then 2 seconds, but long enough to cover possible load peaks
timeout 1.8s "${CLICKHOUSE_CURL}" -sS "${CLICKHOUSE_URL}&max_distributed_connections=2&max_threads=1" -d "$query"
timeout 1.8s ${CLICKHOUSE_CURL} -sS "${CLICKHOUSE_URL}&max_distributed_connections=2&max_threads=1" -d "$query"

View File

@ -7,7 +7,7 @@ CURDIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)
# Don't even try to do that. This test should be disabled for sanitizer builds.
${CLICKHOUSE_LOCAL} --query "SELECT max(value LIKE '%sanitize%') FROM system.build_options" | grep -q '1' && echo 'Skip test for sanitizer build' && exit
command=$(command -v "${CLICKHOUSE_LOCAL}")
command=$(command -v ${CLICKHOUSE_LOCAL})
function run_with_cpu()
{

View File

@ -3,7 +3,7 @@
CURDIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)
. "$CURDIR"/../shell_config.sh
PARSER=("${CLICKHOUSE_LOCAL}" --query 'SELECT t, s, d FROM table' --structure 't DateTime, s String, d Decimal64(10)' --input-format CSV)
PARSER=(${CLICKHOUSE_LOCAL} --query 'SELECT t, s, d FROM table' --structure 't DateTime, s String, d Decimal64(10)' --input-format CSV)
echo '2020-04-21 12:34:56, "Hello", 12345678' | "${PARSER[@]}" 2>&1| grep "ERROR" || echo "CSV"
echo '2020-04-21 12:34:56, "Hello", 123456789' | "${PARSER[@]}" 2>&1| grep "ERROR"
echo '2020-04-21 12:34:567, "Hello", 123456789' | "${PARSER[@]}" 2>&1| grep "ERROR"
@ -12,7 +12,7 @@ echo '2020-04-21 12:34:56, "Hello", 12345678,1' | "${PARSER[@]}" 2>&1| grep "ERR
echo '2020-04-21 12:34:56,,123Hello' | "${PARSER[@]}" 2>&1| grep "ERROR"
echo -e '2020-04-21 12:34:56, "Hello", 12345678\n' | "${PARSER[@]}" 2>&1| grep "ERROR"
PARSER=("${CLICKHOUSE_LOCAL}" --query 'SELECT t, s, d FROM table' --structure 't DateTime, s String, d Decimal64(10)' --input-format CustomSeparatedIgnoreSpaces --format_custom_escaping_rule CSV --format_custom_field_delimiter ',' --format_custom_row_after_delimiter "")
PARSER=(${CLICKHOUSE_LOCAL} --query 'SELECT t, s, d FROM table' --structure 't DateTime, s String, d Decimal64(10)' --input-format CustomSeparatedIgnoreSpaces --format_custom_escaping_rule CSV --format_custom_field_delimiter ',' --format_custom_row_after_delimiter "")
echo '2020-04-21 12:34:56, "Hello", 12345678' | "${PARSER[@]}" 2>&1| grep "ERROR" || echo -e "\nCustomSeparatedIgnoreSpaces"
echo '2020-04-21 12:34:56, "Hello", 123456789' | "${PARSER[@]}" 2>&1| grep "ERROR"
echo '2020-04-21 12:34:567, "Hello", 123456789' | "${PARSER[@]}" 2>&1| grep "ERROR"
@ -20,7 +20,7 @@ echo '2020-04-21 12:34:56, "Hello", 12345678,1' | "${PARSER[@]}" 2>&1| grep "ERR
echo '2020-04-21 12:34:56,,123Hello' | "${PARSER[@]}" 2>&1| grep "ERROR"
echo -e '2020-04-21 12:34:56, "Hello", 12345678\n\n\n\n ' | "${PARSER[@]}" 2>&1| grep "ERROR" || echo "OK"
PARSER=("${CLICKHOUSE_LOCAL}" --query 'SELECT t, s, d FROM table' --structure 't DateTime, s String, d Decimal64(10)' --input-format TSV)
PARSER=(${CLICKHOUSE_LOCAL} --query 'SELECT t, s, d FROM table' --structure 't DateTime, s String, d Decimal64(10)' --input-format TSV)
echo -e '2020-04-21 12:34:56\tHello\t12345678' | "${PARSER[@]}" 2>&1| grep "ERROR" || echo -e "\nTSV"
echo -e '2020-04-21 12:34:56\tHello\t123456789' | "${PARSER[@]}" 2>&1| grep "ERROR"
echo -e '2020-04-21 12:34:567\tHello\t123456789' | "${PARSER[@]}" 2>&1| grep "ERROR"
@ -28,7 +28,7 @@ echo -e '2020-04-21 12:34:56\tHello\t12345678\t1' | "${PARSER[@]}" 2>&1| grep "E
echo -e '2020-04-21 12:34:56\t\t123Hello' | "${PARSER[@]}" 2>&1| grep "ERROR"
echo -e '2020-04-21 12:34:56\tHello\t12345678\n' | "${PARSER[@]}" 2>&1| grep "ERROR"
PARSER=("${CLICKHOUSE_LOCAL}" --query 'SELECT t, s, d FROM table' --structure 't DateTime, s String, d Decimal64(10)' --input-format CustomSeparated)
PARSER=(${CLICKHOUSE_LOCAL} --query 'SELECT t, s, d FROM table' --structure 't DateTime, s String, d Decimal64(10)' --input-format CustomSeparated)
echo -e '2020-04-21 12:34:56\tHello\t12345678' | "${PARSER[@]}" 2>&1| grep "ERROR" || echo -e "\nCustomSeparated"
echo -e '2020-04-21 12:34:56\tHello\t123456789' | "${PARSER[@]}" 2>&1| grep "ERROR"
echo -e '2020-04-21 12:34:567\tHello\t123456789' | "${PARSER[@]}" 2>&1| grep "ERROR"

View File

@ -13,7 +13,7 @@ ${CLICKHOUSE_CLIENT} --format Null -n <<<'SELECT sleepEachRow(1) FROM numbers(5)
yes 'SELECT 1' 2>/dev/null | {
head -n1000
} | {
xargs -i "${CLICKHOUSE_CURL}" -sS "${CLICKHOUSE_URL}&wait_end_of_query=1&max_memory_usage_for_user=$((1<<30))" -d '{}'
xargs -i ${CLICKHOUSE_CURL} -sS "${CLICKHOUSE_URL}&wait_end_of_query=1&max_memory_usage_for_user=$((1<<30))" -d '{}'
} | grep -x -c 1
wait

View File

@ -10,7 +10,7 @@ CURDIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)
# unless the my-program will try to output a thousand more lines overflowing pipe buffer and terminating with Broken Pipe.
# But if my program just output 5 (or slightly more) lines and hang up, the pipeline is not terminated.
timeout 1 "${CLICKHOUSE_LOCAL}" --max_execution_time 10 --query "SELECT DISTINCT number % 5 FROM system.numbers" ||:
timeout 1 ${CLICKHOUSE_LOCAL} --max_execution_time 10 --query "SELECT DISTINCT number % 5 FROM system.numbers" ||:
echo '---'
timeout 1 "${CLICKHOUSE_CURL}" -sS --no-buffer "${CLICKHOUSE_URL}&max_execution_time=10" --data-binary "SELECT DISTINCT number % 5 FROM system.numbers" ||:
timeout 1 ${CLICKHOUSE_CURL} -sS --no-buffer "${CLICKHOUSE_URL}&max_execution_time=10" --data-binary "SELECT DISTINCT number % 5 FROM system.numbers" ||:
echo '---'