Remove obsolete --multiquery parameter from tests

This commit is contained in:
Robert Schulze 2024-07-29 20:06:55 +00:00
parent d9c621112b
commit d4d3d590e3
No known key found for this signature in database
GPG Key ID: 26703B55FB13728A
161 changed files with 316 additions and 322 deletions

View File

@ -55,7 +55,7 @@ def test_single_file(started_cluster, cluster):
path = get_dist_path(cluster, "distr_1", 1)
query = f"select * from file('{path}/1.bin', 'Distributed')"
out = node.exec_in_container(
["/usr/bin/clickhouse", "local", "--multiquery", "--stacktrace", "-q", query]
["/usr/bin/clickhouse", "local", "--stacktrace", "-q", query]
)
assert out == "1\ta\n2\tbb\n3\tccc\n"
@ -65,7 +65,7 @@ def test_single_file(started_cluster, cluster):
select * from t;
"""
out = node.exec_in_container(
["/usr/bin/clickhouse", "local", "--multiquery", "--stacktrace", "-q", query]
["/usr/bin/clickhouse", "local", "--stacktrace", "-q", query]
)
assert out == "1\ta\n2\tbb\n3\tccc\n"
@ -106,7 +106,7 @@ def test_two_files(started_cluster, cluster):
select * from t order by x;
"""
out = node.exec_in_container(
["/usr/bin/clickhouse", "local", "--multiquery", "--stacktrace", "-q", query]
["/usr/bin/clickhouse", "local", "--stacktrace", "-q", query]
)
assert out == "0\t_\n1\ta\n2\tbb\n3\tccc\n"
@ -141,7 +141,7 @@ def test_single_file_old(started_cluster, cluster):
select * from t;
"""
out = node.exec_in_container(
["/usr/bin/clickhouse", "local", "--multiquery", "--stacktrace", "-q", query]
["/usr/bin/clickhouse", "local", "--stacktrace", "-q", query]
)
assert out == "1\ta\n2\tbb\n3\tccc\n"

View File

@ -5,4 +5,4 @@ CURDIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)
# shellcheck source=../shell_config.sh
. "$CURDIR"/../shell_config.sh
$CLICKHOUSE_CLIENT --multiquery --query="SELECT 1; SELECT xyz; SELECT 2;" 2> /dev/null || true;
$CLICKHOUSE_CLIENT --query="SELECT 1; SELECT xyz; SELECT 2;" 2> /dev/null || true;

View File

@ -25,7 +25,7 @@ ${CLICKHOUSE_CURL} -sS "${CLICKHOUSE_URL}&session_id=${CLICKHOUSE_DATABASE}_4&se
${CLICKHOUSE_CURL} -sS "${CLICKHOUSE_URL}&session_id=${CLICKHOUSE_DATABASE}_5&session_timeout=60" --data-binary "SELECT 1"
echo "Sessions are local per user:"
${CLICKHOUSE_CLIENT} --multiquery --query "DROP USER IF EXISTS test_00463; CREATE USER test_00463; GRANT ALL ON *.* TO test_00463;"
${CLICKHOUSE_CLIENT} --query "DROP USER IF EXISTS test_00463; CREATE USER test_00463; GRANT ALL ON *.* TO test_00463;"
${CLICKHOUSE_CURL} -sS -X POST "${CLICKHOUSE_URL}&session_id=${CLICKHOUSE_DATABASE}_6&session_timeout=600" --data-binary "CREATE TEMPORARY TABLE t (s String)"
${CLICKHOUSE_CURL} -sS -X POST "${CLICKHOUSE_URL}&session_id=${CLICKHOUSE_DATABASE}_6" --data-binary "INSERT INTO t VALUES ('Hello')"
@ -37,7 +37,7 @@ ${CLICKHOUSE_CURL} -sS -X POST "${CLICKHOUSE_URL}&user=test_00463&session_id=${C
${CLICKHOUSE_CURL} -sS -X POST "${CLICKHOUSE_URL}&session_id=${CLICKHOUSE_DATABASE}_6" --data-binary "SELECT * FROM t"
${CLICKHOUSE_CURL} -sS -X POST "${CLICKHOUSE_URL}&user=test_00463&session_id=${CLICKHOUSE_DATABASE}_6" --data-binary "SELECT * FROM t"
${CLICKHOUSE_CLIENT} --multiquery --query "DROP USER test_00463";
${CLICKHOUSE_CLIENT} --query "DROP USER test_00463";
echo "And cannot be accessed for a non-existent user:"
${CLICKHOUSE_CURL} -sS -X POST "${CLICKHOUSE_URL}&user=test_00463&session_id=${CLICKHOUSE_DATABASE}_6" --data-binary "SELECT * FROM t" | grep -c -F 'Exception'
@ -59,7 +59,7 @@ done
echo "A session successfully expire after a timeout and the session's temporary table shadows the permanent table:"
# An infinite loop is required to make the test reliable. We will check that the timeout corresponds to the observed time at least once
${CLICKHOUSE_CLIENT} --multiquery --query "DROP TABLE IF EXISTS t; CREATE TABLE t (s String) ENGINE = Memory; INSERT INTO t VALUES ('World');"
${CLICKHOUSE_CLIENT} --query "DROP TABLE IF EXISTS t; CREATE TABLE t (s String) ENGINE = Memory; INSERT INTO t VALUES ('World');"
while true
do
(
@ -70,7 +70,7 @@ do
${CLICKHOUSE_CURL} -sS "${CLICKHOUSE_URL}&session_id=${CLICKHOUSE_DATABASE}_8" --data-binary "SELECT * FROM t"
) | tr -d '\n' | grep -F 'HelloWorld' && break || sleep 1
done
${CLICKHOUSE_CLIENT} --multiquery --query "DROP TABLE t"
${CLICKHOUSE_CLIENT} --query "DROP TABLE t"
echo "A session cannot be used by concurrent connections:"
@ -83,5 +83,5 @@ do
done
${CLICKHOUSE_CURL} -sS -X POST "${CLICKHOUSE_URL}&session_id=${CLICKHOUSE_DATABASE}_9" --data-binary "SELECT 1" | grep -c -F 'SESSION_IS_LOCKED'
${CLICKHOUSE_CLIENT} --multiquery --query "KILL QUERY WHERE query_id = '${CLICKHOUSE_DATABASE}_9' SYNC FORMAT Null";
${CLICKHOUSE_CLIENT} --query "KILL QUERY WHERE query_id = '${CLICKHOUSE_DATABASE}_9' SYNC FORMAT Null";
wait

View File

@ -8,8 +8,8 @@ CURDIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)
$CLICKHOUSE_CLIENT --query="select toUInt64(pow(2, 62)) as value format JSON" --output_format_json_quote_64bit_integers=0 | grep value
$CLICKHOUSE_CLIENT --query="select toUInt64(pow(2, 62)) as value format JSON" --output_format_json_quote_64bit_integers=1 | grep value
$CLICKHOUSE_CLIENT --readonly=1 --multiquery --query="set output_format_json_quote_64bit_integers=1 ; select toUInt64(pow(2, 63)) as value format JSON" --server_logs_file=/dev/null 2>&1 | grep -o -q 'value\|Cannot modify .* setting in readonly mode' && echo "OK" || echo "FAIL"
$CLICKHOUSE_CLIENT --readonly=1 --multiquery --query="set output_format_json_quote_64bit_integers=0 ; select toUInt64(pow(2, 63)) as value format JSON" --server_logs_file=/dev/null 2>&1 | grep -o -q 'value\|Cannot modify .* setting in readonly mode' && echo "OK" || echo "FAIL"
$CLICKHOUSE_CLIENT --readonly=1 --query="set output_format_json_quote_64bit_integers=1 ; select toUInt64(pow(2, 63)) as value format JSON" --server_logs_file=/dev/null 2>&1 | grep -o -q 'value\|Cannot modify .* setting in readonly mode' && echo "OK" || echo "FAIL"
$CLICKHOUSE_CLIENT --readonly=1 --query="set output_format_json_quote_64bit_integers=0 ; select toUInt64(pow(2, 63)) as value format JSON" --server_logs_file=/dev/null 2>&1 | grep -o -q 'value\|Cannot modify .* setting in readonly mode' && echo "OK" || echo "FAIL"
${CLICKHOUSE_CURL} -sS "${CLICKHOUSE_URL}&query=SELECT+toUInt64(pow(2,+63))+as+value+format+JSON&output_format_json_quote_64bit_integers=1" | grep value
${CLICKHOUSE_CURL} -sS "${CLICKHOUSE_URL}&query=SELECT+toUInt64(pow(2,+63))+as+value+format+JSON&output_format_json_quote_64bit_integers=0" | grep value

View File

@ -7,7 +7,7 @@ CURDIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)
$CLICKHOUSE_CLIENT --multiquery <<EOF
$CLICKHOUSE_CLIENT <<EOF
DROP TABLE IF EXISTS pk_in_tuple_perf;
CREATE TABLE pk_in_tuple_perf
(
@ -27,7 +27,7 @@ $CLICKHOUSE_CLIENT --query "$query FORMAT JSON" | grep "rows_read"
## Test with non-const args in tuple
$CLICKHOUSE_CLIENT --multiquery <<EOF
$CLICKHOUSE_CLIENT <<EOF
DROP TABLE IF EXISTS pk_in_tuple_perf_non_const;
CREATE TABLE pk_in_tuple_perf_non_const
(

View File

@ -22,7 +22,7 @@ echo '"Hello, world"; 123; "2016-01-01"
"Hello, ""world"""; "456"; 2016-01-02;
Hello "world"; 789 ;2016-01-03
"Hello
world"; 100; 2016-01-04;' | $CLICKHOUSE_CLIENT --multiquery --query="SET format_csv_delimiter=';'; INSERT INTO csv FORMAT CSV";
world"; 100; 2016-01-04;' | $CLICKHOUSE_CLIENT --query="SET format_csv_delimiter=';'; INSERT INTO csv FORMAT CSV";
$CLICKHOUSE_CLIENT --query="SELECT * FROM csv ORDER BY d";
$CLICKHOUSE_CLIENT --format_csv_delimiter=";" --query="SELECT * FROM csv ORDER BY d FORMAT CSV";
@ -33,7 +33,7 @@ $CLICKHOUSE_CLIENT --query="CREATE TABLE csv (s1 String, s2 String) ENGINE = Mem
echo 'abc,def;hello;
hello; world;
"hello ""world""";abc,def;' | $CLICKHOUSE_CLIENT --multiquery --query="SET format_csv_delimiter=';'; INSERT INTO csv FORMAT CSV";
"hello ""world""";abc,def;' | $CLICKHOUSE_CLIENT --query="SET format_csv_delimiter=';'; INSERT INTO csv FORMAT CSV";
$CLICKHOUSE_CLIENT --query="SELECT * FROM csv";
@ -44,7 +44,7 @@ $CLICKHOUSE_CLIENT --query="CREATE TABLE csv (s1 String, s2 String) ENGINE = Mem
echo '"s1";"s2"
abc,def;hello;
hello; world;
"hello ""world""";abc,def;' | $CLICKHOUSE_CLIENT --multiquery --query="SET format_csv_delimiter=';'; INSERT INTO csv FORMAT CSVWithNames";
"hello ""world""";abc,def;' | $CLICKHOUSE_CLIENT --query="SET format_csv_delimiter=';'; INSERT INTO csv FORMAT CSVWithNames";
$CLICKHOUSE_CLIENT --format_csv_delimiter=";" --query="SELECT * FROM csv FORMAT CSV";
$CLICKHOUSE_CLIENT --format_csv_delimiter="," --query="SELECT * FROM csv FORMAT CSV";

View File

@ -18,7 +18,7 @@ $CLICKHOUSE_CLIENT --query="DROP TABLE csv";
$CLICKHOUSE_CLIENT --query="CREATE TABLE csv (s String, n UInt64, d Date) ENGINE = Memory";
echo "'single quote' not end, 123, 2016-01-01
'em good, 456, 2016-01-02" | $CLICKHOUSE_CLIENT --multiquery --query="SET format_csv_allow_single_quotes=0; INSERT INTO csv FORMAT CSV";
'em good, 456, 2016-01-02" | $CLICKHOUSE_CLIENT --query="SET format_csv_allow_single_quotes=0; INSERT INTO csv FORMAT CSV";
$CLICKHOUSE_CLIENT --query="SELECT * FROM csv ORDER BY d";
@ -38,7 +38,7 @@ $CLICKHOUSE_CLIENT --query="DROP TABLE csv";
$CLICKHOUSE_CLIENT --query="CREATE TABLE csv (s String, n UInt64, d Date) ENGINE = Memory";
echo '"double quote" not end, 123, 2016-01-01
"em good, 456, 2016-01-02' | $CLICKHOUSE_CLIENT --multiquery --query="SET format_csv_allow_double_quotes=0; INSERT INTO csv FORMAT CSV";
"em good, 456, 2016-01-02' | $CLICKHOUSE_CLIENT --query="SET format_csv_allow_double_quotes=0; INSERT INTO csv FORMAT CSV";
$CLICKHOUSE_CLIENT --query="SELECT * FROM csv ORDER BY d";

View File

@ -5,4 +5,4 @@ CURDIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)
# shellcheck source=../shell_config.sh
. "$CURDIR"/../shell_config.sh
${CLICKHOUSE_CLIENT} --ignore-error --multiquery --query "DROP TABLE IF EXISTS tab_00651; CREATE TABLE tab_00651 (val UInt64) engine = Memory; SHOW CREATE TABLE tab_00651 format abcd; DESC tab_00651; DROP TABLE tab_00651;" 2>/dev/null ||:
${CLICKHOUSE_CLIENT} --ignore-error --query "DROP TABLE IF EXISTS tab_00651; CREATE TABLE tab_00651 (val UInt64) engine = Memory; SHOW CREATE TABLE tab_00651 format abcd; DESC tab_00651; DROP TABLE tab_00651;" 2>/dev/null ||:

View File

@ -4,7 +4,7 @@ CURDIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)
# shellcheck source=../shell_config.sh
. "$CURDIR"/../shell_config.sh
${CLICKHOUSE_CLIENT} --multiquery --mutations_sync=1 << EOF
${CLICKHOUSE_CLIENT} --mutations_sync=1 << EOF
DROP TABLE IF EXISTS mutations;
DROP TABLE IF EXISTS for_subquery;

View File

@ -9,7 +9,7 @@ CURDIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)
# shellcheck source=./mergetree_mutations.lib
. "$CURDIR"/mergetree_mutations.lib
${CLICKHOUSE_CLIENT} --allow_nondeterministic_mutations=1 --multiquery << EOF
${CLICKHOUSE_CLIENT} --allow_nondeterministic_mutations=1 << EOF
DROP TABLE IF EXISTS mutations_r1;
DROP TABLE IF EXISTS for_subquery;

View File

@ -7,7 +7,7 @@ CURDIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)
. "$CURDIR"/../shell_config.sh
${CLICKHOUSE_CLIENT} --multiquery --query="
${CLICKHOUSE_CLIENT} --query="
DROP TABLE IF EXISTS view_00699;
DROP TABLE IF EXISTS null_00699;
@ -20,14 +20,14 @@ SELECT count(), min(x), max(x) FROM view_00699;
ALTER TABLE null_00699 DELETE WHERE x % 2 = 0;" --mutations_sync=1
${CLICKHOUSE_CLIENT} --multiquery --query="
${CLICKHOUSE_CLIENT} --query="
SELECT count(), min(x), max(x) FROM null_00699;
SELECT count(), min(x), max(x) FROM view_00699;
ALTER TABLE view_00699 DELETE WHERE x % 2 = 0;
" --mutations_sync=1
${CLICKHOUSE_CLIENT} --multiquery --query="
${CLICKHOUSE_CLIENT} --query="
SELECT count(), min(x), max(x) FROM null_00699;
SELECT count(), min(x), max(x) FROM view_00699;
@ -35,7 +35,7 @@ ALTER TABLE null_00699 DELETE WHERE x % 2 = 1;
ALTER TABLE view_00699 DELETE WHERE x % 2 = 1;
" --mutations_sync=1
${CLICKHOUSE_CLIENT} --multiquery --query="
${CLICKHOUSE_CLIENT} --query="
SELECT count(), min(x), max(x) FROM null_00699;
SELECT count(), min(x), max(x) FROM view_00699;

View File

@ -7,7 +7,7 @@ CURDIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)
# shellcheck source=../shell_config.sh
. "$CURDIR"/../shell_config.sh
${CLICKHOUSE_CLIENT} --multiquery --query="
${CLICKHOUSE_CLIENT} --query="
DROP TABLE IF EXISTS memory;
CREATE TABLE memory (x UInt64) ENGINE = Memory;
@ -21,13 +21,13 @@ INSERT INTO memory SELECT * FROM numbers(1000);"
# But if the table will be dropped before query - just pass.
# It's Ok, because otherwise the test will depend on the race condition in the test itself.
${CLICKHOUSE_CLIENT} --multiquery --query="
${CLICKHOUSE_CLIENT} --query="
SET max_threads = 1;
SELECT count() FROM memory WHERE NOT ignore(sleep(0.0001));" 2>&1 | grep -c -P '^1000$|^0$|Exception' &
sleep 0.05;
${CLICKHOUSE_CLIENT} --multiquery --query="
${CLICKHOUSE_CLIENT} --query="
TRUNCATE TABLE memory;
DROP TABLE memory;
"

View File

@ -5,8 +5,8 @@ CURDIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)
# shellcheck source=../shell_config.sh
. "$CURDIR"/../shell_config.sh
yes 'CREATE TABLE IF NOT EXISTS table (x UInt8) ENGINE = MergeTree ORDER BY tuple();' | head -n 1000 | $CLICKHOUSE_CLIENT --multiquery &
yes 'DROP TABLE IF EXISTS table;' | head -n 1000 | $CLICKHOUSE_CLIENT --multiquery &
yes 'CREATE TABLE IF NOT EXISTS table (x UInt8) ENGINE = MergeTree ORDER BY tuple();' | head -n 1000 | $CLICKHOUSE_CLIENT &
yes 'DROP TABLE IF EXISTS table;' | head -n 1000 | $CLICKHOUSE_CLIENT &
wait
${CLICKHOUSE_CLIENT} --query "DROP TABLE IF EXISTS table"

View File

@ -16,12 +16,12 @@ ${CLICKHOUSE_CLIENT} --query="CREATE TABLE buffer_00763_2 (s String) ENGINE = Bu
function thread1()
{
seq 1 500 | sed -r -e 's/.+/DROP TABLE IF EXISTS mt_00763_2; CREATE TABLE mt_00763_2 (s String) ENGINE = MergeTree ORDER BY s; INSERT INTO mt_00763_2 SELECT toString(number) FROM numbers(10);/' | ${CLICKHOUSE_CLIENT} --fsync-metadata 0 --multiquery --ignore-error ||:
seq 1 500 | sed -r -e 's/.+/DROP TABLE IF EXISTS mt_00763_2; CREATE TABLE mt_00763_2 (s String) ENGINE = MergeTree ORDER BY s; INSERT INTO mt_00763_2 SELECT toString(number) FROM numbers(10);/' | ${CLICKHOUSE_CLIENT} --fsync-metadata 0 --ignore-error ||:
}
function thread2()
{
seq 1 500 | sed -r -e 's/.+/SELECT count() FROM buffer_00763_2;/' | ${CLICKHOUSE_CLIENT} --multiquery --server_logs_file='/dev/null' --ignore-error 2>&1 | grep -vP '^0$|^10$|^Received exception|^Code: 60|^Code: 218|^Code: 473' | grep -v '(query: '
seq 1 500 | sed -r -e 's/.+/SELECT count() FROM buffer_00763_2;/' | ${CLICKHOUSE_CLIENT} --server_logs_file='/dev/null' --ignore-error 2>&1 | grep -vP '^0$|^10$|^Received exception|^Code: 60|^Code: 218|^Code: 473' | grep -v '(query: '
}
thread1 &

View File

@ -18,12 +18,12 @@ ${CLICKHOUSE_CLIENT} --query="INSERT INTO mt_00763_1 VALUES (1, '1'), (2, '2'),
function thread1()
{
seq 1 300 | sed -r -e 's/.+/ALTER TABLE mt_00763_1 MODIFY column s UInt32; ALTER TABLE mt_00763_1 MODIFY column s String;/' | ${CLICKHOUSE_CLIENT} --multiquery --ignore-error ||:
seq 1 300 | sed -r -e 's/.+/ALTER TABLE mt_00763_1 MODIFY column s UInt32; ALTER TABLE mt_00763_1 MODIFY column s String;/' | ${CLICKHOUSE_CLIENT} --ignore-error ||:
}
function thread2()
{
seq 1 2000 | sed -r -e 's/.+/SELECT sum(length(s)) FROM buffer_00763_1;/' | ${CLICKHOUSE_CLIENT} --multiquery --ignore-error 2>&1 | grep -vP '(^3$|^Received exception from server|^Code: 473)'
seq 1 2000 | sed -r -e 's/.+/SELECT sum(length(s)) FROM buffer_00763_1;/' | ${CLICKHOUSE_CLIENT} --ignore-error 2>&1 | grep -vP '(^3$|^Received exception from server|^Code: 473)'
}
thread1 &

View File

@ -9,7 +9,7 @@ SCHEMADIR=$CURDIR/format_schemas
set -eo pipefail
# Run the client.
$CLICKHOUSE_CLIENT --multiquery <<EOF
$CLICKHOUSE_CLIENT <<EOF
DROP TABLE IF EXISTS array_3dim_protobuf_00825;
CREATE TABLE array_3dim_protobuf_00825

View File

@ -11,7 +11,7 @@ SCHEMADIR=$CURDIR/format_schemas
set -eo pipefail
# Run the client.
$CLICKHOUSE_CLIENT --multiquery <<EOF
$CLICKHOUSE_CLIENT <<EOF
DROP TABLE IF EXISTS array_of_arrays_protobuf_00825;
CREATE TABLE array_of_arrays_protobuf_00825

View File

@ -11,7 +11,7 @@ SCHEMADIR=$CURDIR/format_schemas
set -eo pipefail
# Run the client.
$CLICKHOUSE_CLIENT --multiquery <<EOF
$CLICKHOUSE_CLIENT <<EOF
DROP TABLE IF EXISTS enum_mapping_protobuf_00825;
CREATE TABLE enum_mapping_protobuf_00825

View File

@ -9,7 +9,7 @@ SCHEMADIR=$CURDIR/format_schemas
set -eo pipefail
# Run the client.
$CLICKHOUSE_CLIENT --multiquery <<EOF
$CLICKHOUSE_CLIENT <<EOF
DROP TABLE IF EXISTS map_protobuf_00825;
CREATE TABLE map_protobuf_00825

View File

@ -11,7 +11,7 @@ SCHEMADIR=$CURDIR/format_schemas
set -eo pipefail
# Run the client.
$CLICKHOUSE_CLIENT --multiquery <<EOF
$CLICKHOUSE_CLIENT <<EOF
DROP TABLE IF EXISTS nested_in_nested_protobuf_00825;
CREATE TABLE nested_in_nested_protobuf_00825 (x Nested (y Nested (z Int64))) ENGINE = MergeTree ORDER BY tuple();

View File

@ -11,7 +11,7 @@ SCHEMADIR=$CURDIR/format_schemas
set -eo pipefail
# Run the client.
$CLICKHOUSE_CLIENT --multiquery <<EOF
$CLICKHOUSE_CLIENT <<EOF
DROP TABLE IF EXISTS nested_optional_protobuf_00825;
CREATE TABLE nested_optional_protobuf_00825

View File

@ -9,7 +9,7 @@ SCHEMADIR=$CURDIR/format_schemas
set -eo pipefail
# Run the client.
$CLICKHOUSE_CLIENT --multiquery <<EOF
$CLICKHOUSE_CLIENT <<EOF
DROP TABLE IF EXISTS no_length_delimiter_protobuf_00825;
DROP TABLE IF EXISTS roundtrip_no_length_delimiter_protobuf_00825;
@ -43,11 +43,11 @@ $CLICKHOUSE_CLIENT --query "SELECT * FROM roundtrip_no_length_delimiter_protobuf
rm "$BINARY_FILE_PATH"
# The ProtobufSingle format can't be used to write multiple rows because this format doesn't have any row delimiter.
$CLICKHOUSE_CLIENT --multiquery > /dev/null <<EOF
$CLICKHOUSE_CLIENT > /dev/null <<EOF
SELECT * FROM no_length_delimiter_protobuf_00825 FORMAT ProtobufSingle SETTINGS format_schema = '$SCHEMADIR/00825_protobuf_format_no_length_delimiter:Message'; -- { clientError 546 }
EOF
$CLICKHOUSE_CLIENT --multiquery <<EOF
$CLICKHOUSE_CLIENT <<EOF
DROP TABLE no_length_delimiter_protobuf_00825;
DROP TABLE roundtrip_no_length_delimiter_protobuf_00825;
EOF

View File

@ -17,7 +17,7 @@ SCHEMADIR=$CURDIR/format_schemas
set -eo pipefail
# Run the client.
$CLICKHOUSE_CLIENT --multiquery <<EOF
$CLICKHOUSE_CLIENT <<EOF
DROP TABLE IF EXISTS persons_00825;
DROP TABLE IF EXISTS roundtrip_persons_00825;
DROP TABLE IF EXISTS alt_persons_00825;
@ -129,7 +129,7 @@ $CLICKHOUSE_CLIENT --query "INSERT INTO edition2023_persons_00825 SETTINGS forma
$CLICKHOUSE_CLIENT --query "SELECT * FROM edition2023_persons_00825 ORDER BY name"
rm "$BINARY_FILE_PATH"
$CLICKHOUSE_CLIENT --multiquery <<EOF
$CLICKHOUSE_CLIENT <<EOF
DROP TABLE persons_00825;
DROP TABLE roundtrip_persons_00825;
DROP TABLE alt_persons_00825;

View File

@ -11,7 +11,7 @@ SCHEMADIR=$CURDIR/format_schemas
set -eo pipefail
# Run the client.
$CLICKHOUSE_CLIENT --multiquery <<EOF
$CLICKHOUSE_CLIENT <<EOF
DROP TABLE IF EXISTS table_skipped_column_in_nested_00825;
CREATE TABLE table_skipped_column_in_nested_00825 (

View File

@ -9,7 +9,7 @@ SCHEMADIR=$CURDIR/format_schemas
set -eo pipefail
# Run the client.
$CLICKHOUSE_CLIENT --multiquery <<EOF
$CLICKHOUSE_CLIENT <<EOF
DROP TABLE IF EXISTS splitted_nested_protobuf_00825;
CREATE TABLE splitted_nested_protobuf_00825 (

View File

@ -9,7 +9,7 @@ SCHEMADIR=$CURDIR/format_schemas
set -eo pipefail
# Run the client.
$CLICKHOUSE_CLIENT --multiquery <<EOF
$CLICKHOUSE_CLIENT <<EOF
DROP TABLE IF EXISTS squares_protobuf_00825;
CREATE TABLE squares_protobuf_00825 (number UInt32, square UInt64) ENGINE = MergeTree ORDER BY tuple();

View File

@ -9,7 +9,7 @@ SCHEMADIR=$CURDIR/format_schemas
set -eo pipefail
# Run the client.
$CLICKHOUSE_CLIENT --multiquery <<EOF
$CLICKHOUSE_CLIENT <<EOF
DROP TABLE IF EXISTS table_default_protobuf_00825;
CREATE TABLE table_default_protobuf_00825

View File

@ -58,7 +58,7 @@ for NAME in $(find "$DATA_DIR"/*.parquet -print0 | xargs -0 -n 1 basename | LC_A
COLUMNS=$(cat "$COLUMNS_FILE") || continue
${CLICKHOUSE_CLIENT} --query="DROP TABLE IF EXISTS parquet_load"
$CLICKHOUSE_CLIENT --multiquery <<EOF
$CLICKHOUSE_CLIENT <<EOF
CREATE TABLE parquet_load ($COLUMNS) ENGINE = Memory;
EOF

View File

@ -6,7 +6,7 @@ CUR_DIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)
. "$CUR_DIR"/../shell_config.sh
${CLICKHOUSE_CLIENT} --query="DROP TABLE IF EXISTS maps"
${CLICKHOUSE_CLIENT} --multiquery <<EOF
${CLICKHOUSE_CLIENT} <<EOF
CREATE TABLE maps (m1 Map(UInt32, UInt32), m2 Map(String, String), m3 Map(UInt32, Tuple(UInt32, UInt32)), m4 Map(UInt32, Array(UInt32)), m5 Array(Map(UInt32, UInt32)), m6 Tuple(Map(UInt32, UInt32), Map(String, String)), m7 Array(Map(UInt32, Array(Tuple(Map(UInt32, UInt32), Tuple(UInt32)))))) ENGINE=Memory();
EOF

View File

@ -23,7 +23,7 @@ echo -e "\n"
# Test that if both format_template_row_format setting and format_template_row are provided, error is thrown
row_format_file="$CURDIR"/"${CLICKHOUSE_TEST_UNIQUE_NAME}"_template_output_format_row.tmp
echo -ne 'Question: ${question:Quoted}, Answer: ${answer:Quoted}, Number of Likes: ${likes:Raw}, Date: ${date:Raw}' > $row_format_file
$CLICKHOUSE_CLIENT --multiline --multiquery --query "SELECT * FROM template GROUP BY question, answer, likes, date WITH TOTALS ORDER BY date LIMIT 3 FORMAT Template SETTINGS \
$CLICKHOUSE_CLIENT --multiline --query "SELECT * FROM template GROUP BY question, answer, likes, date WITH TOTALS ORDER BY date LIMIT 3 FORMAT Template SETTINGS \
format_template_row = '$row_format_file', \
format_template_row_format = 'Question: \${question:Quoted}, Answer: \${answer:Quoted}, Number of Likes: \${likes:Raw}, Date: \${date:Raw}', \
format_template_rows_between_delimiter = ';\n'; --{clientError 474}"
@ -38,7 +38,7 @@ format_template_rows_between_delimiter = ';\n'";
# Test that if both format_template_result_format setting and format_template_resultset are provided, error is thrown
resultset_output_file="$CURDIR"/"$CLICKHOUSE_TEST_UNIQUE_NAME"_template_output_format_resultset.tmp
echo -ne '===== Resultset ===== \n \${data} \n ===============' > $resultset_output_file
$CLICKHOUSE_CLIENT --multiline --multiquery --query "SELECT * FROM template GROUP BY question, answer, likes, date WITH TOTALS ORDER BY date LIMIT 3 FORMAT Template SETTINGS \
$CLICKHOUSE_CLIENT --multiline --query "SELECT * FROM template GROUP BY question, answer, likes, date WITH TOTALS ORDER BY date LIMIT 3 FORMAT Template SETTINGS \
format_template_resultset = '$resultset_output_file', \
format_template_resultset_format = '===== Resultset ===== \n \${data} \n ===============', \
format_template_row_format = 'Question: \${question:Quoted}, Answer: \${answer:Quoted}, Number of Likes: \${likes:Raw}, Date: \${date:Raw}', \

View File

@ -17,7 +17,7 @@ echo 1
# normal execution
$CLICKHOUSE_CLIENT \
--query="SELECT 'find_me_TOPSECRET=TOPSECRET' FROM numbers(1) FORMAT Null" \
--log_queries=1 --ignore-error --multiquery >"$tmp_file" 2>&1
--log_queries=1 --ignore-error >"$tmp_file" 2>&1
grep -F 'find_me_[hidden]' "$tmp_file" >/dev/null || echo 'fail 1a'
grep -F 'TOPSECRET' "$tmp_file" && echo 'fail 1b'
@ -38,7 +38,7 @@ echo 3
# failure at before query start
$CLICKHOUSE_CLIENT \
--query="SELECT 1 FROM system.numbers WHERE credit_card_number='find_me_TOPSECRET=TOPSECRET' FORMAT Null" \
--log_queries=1 --ignore-error --multiquery |& grep -v '^(query: ' > "$tmp_file"
--log_queries=1 --ignore-error |& grep -v '^(query: ' > "$tmp_file"
grep -F 'find_me_[hidden]' "$tmp_file" >/dev/null || echo 'fail 3a'
grep -F 'TOPSECRET' "$tmp_file" && echo 'fail 3b'
@ -56,7 +56,7 @@ echo 4
# failure at the end of query
$CLICKHOUSE_CLIENT \
--query="SELECT 'find_me_TOPSECRET=TOPSECRET', intDiv( 100, number - 10) FROM numbers(11) FORMAT Null" \
--log_queries=1 --ignore-error --max_block_size=2 --multiquery |& grep -v '^(query: ' > "$tmp_file"
--log_queries=1 --ignore-error --max_block_size=2 |& grep -v '^(query: ' > "$tmp_file"
grep -F 'find_me_[hidden]' "$tmp_file" >/dev/null || echo 'fail 4a'
grep -F 'TOPSECRET' "$tmp_file" && echo 'fail 4b'
@ -67,7 +67,7 @@ rm -f "$tmp_file2" >/dev/null 2>&1
bash -c "$CLICKHOUSE_CLIENT \
--function_sleep_max_microseconds_per_block 60000000 \
--query=\"select sleepEachRow(1) from numbers(10) where ignore('find_me_TOPSECRET=TOPSECRET')=0 and ignore('fwerkh_that_magic_string_make_me_unique') = 0 FORMAT Null\" \
--log_queries=1 --ignore-error --multiquery |& grep -v '^(query: ' > $tmp_file2" &
--log_queries=1 --ignore-error |& grep -v '^(query: ' > $tmp_file2" &
rm -f "$tmp_file" >/dev/null 2>&1
# check that executing query doesn't expose secrets in processlist
@ -133,7 +133,7 @@ insert into sensitive select number as id, toDate('2019-01-01') as date, 'abcd'
insert into sensitive select number as id, toDate('2019-01-01') as date, 'find_me_TOPSECRET=TOPSECRET' as value1, rand() as valuer from numbers(10);
insert into sensitive select number as id, toDate('2019-01-01') as date, 'abcd' as value1, rand() as valuer from numbers(10000);
select * from sensitive WHERE value1 = 'find_me_TOPSECRET=TOPSECRET' FORMAT Null;
drop table sensitive;" --log_queries=1 --ignore-error --multiquery >"$tmp_file" 2>&1
drop table sensitive;" --log_queries=1 --ignore-error >"$tmp_file" 2>&1
grep -F 'find_me_[hidden]' "$tmp_file" >/dev/null || echo 'fail 8a'
grep -F 'TOPSECRET' "$tmp_file" && echo 'fail 8b'
@ -144,7 +144,7 @@ echo 9
$CLICKHOUSE_CLIENT \
--server_logs_file=/dev/null \
--query="SELECT if( count() > 0, 'text_log non empty', 'text_log empty') FROM system.text_log WHERE event_date >= yesterday() and message like '%find_me%';
select * from system.text_log where event_date >= yesterday() and message like '%TOPSECRET=TOPSECRET%';" --ignore-error --multiquery
select * from system.text_log where event_date >= yesterday() and message like '%TOPSECRET=TOPSECRET%';" --ignore-error
echo 'finish'
rm -f "$tmp_file" >/dev/null 2>&1

View File

@ -7,7 +7,7 @@ CURDIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)
# shellcheck source=../shell_config.sh
. "$CURDIR"/../shell_config.sh
$CLICKHOUSE_CLIENT --multiquery <<EOF
$CLICKHOUSE_CLIENT <<EOF
DROP TABLE IF EXISTS src;
DROP TABLE IF EXISTS mv;

View File

@ -6,7 +6,7 @@ CURDIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)
# shellcheck source=../shell_config.sh
. "$CURDIR"/../shell_config.sh
$CLICKHOUSE_CLIENT --multiquery <<EOF
$CLICKHOUSE_CLIENT <<EOF
DROP TABLE IF EXISTS src_a;
DROP TABLE IF EXISTS src_b;

View File

@ -6,7 +6,7 @@ CURDIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)
# that test is failing on versions <= 19.11.12
${CLICKHOUSE_CLIENT} --multiquery --query="
${CLICKHOUSE_CLIENT} --query="
DROP TABLE IF EXISTS lc_empty_part_bug;
create table lc_empty_part_bug (id UInt64, s String) Engine=MergeTree ORDER BY id SETTINGS number_of_free_entries_in_pool_to_execute_mutation=0;
insert into lc_empty_part_bug select number as id, toString(rand()) from numbers(100);
@ -15,7 +15,7 @@ ${CLICKHOUSE_CLIENT} --multiquery --query="
echo 'Waited for mutation to finish'
${CLICKHOUSE_CLIENT} --multiquery --query="
${CLICKHOUSE_CLIENT} --query="
alter table lc_empty_part_bug modify column s LowCardinality(String);
SELECT 'still alive';
insert into lc_empty_part_bug select number+100 as id, toString(rand()) from numbers(100);

View File

@ -8,7 +8,7 @@ CURDIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)
set -e -o pipefail
$CLICKHOUSE_CLIENT --multiquery <<EOF
$CLICKHOUSE_CLIENT <<EOF
CREATE TABLE ${CLICKHOUSE_DATABASE}.table(x Int64, y Int64, insert_time DateTime) ENGINE = MergeTree ORDER BY tuple();
INSERT INTO ${CLICKHOUSE_DATABASE}.table VALUES (12, 102, now());

View File

@ -9,7 +9,7 @@ opts=(
"--allow_experimental_analyzer=0"
)
$CLICKHOUSE_CLIENT "${opts[@]}" --multiquery <<EOF
$CLICKHOUSE_CLIENT "${opts[@]}" <<EOF
SET allow_experimental_window_view = 1;
DROP TABLE IF EXISTS mt;
DROP TABLE IF EXISTS dst;

View File

@ -9,7 +9,7 @@ opts=(
"--allow_experimental_analyzer=0"
)
$CLICKHOUSE_CLIENT "${opts[@]}" --multiquery <<EOF
$CLICKHOUSE_CLIENT "${opts[@]}" <<EOF
SET allow_experimental_window_view = 1;
DROP TABLE IF EXISTS mt;
DROP TABLE IF EXISTS dst;

View File

@ -9,7 +9,7 @@ opts=(
"--allow_experimental_analyzer=0"
)
$CLICKHOUSE_CLIENT "${opts[@]}" --multiquery <<EOF
$CLICKHOUSE_CLIENT "${opts[@]}" <<EOF
SET allow_experimental_window_view = 1;
DROP TABLE IF EXISTS mt;
DROP TABLE IF EXISTS dst;

View File

@ -9,7 +9,7 @@ opts=(
"--allow_experimental_analyzer=0"
)
$CLICKHOUSE_CLIENT "${opts[@]}" --multiquery <<EOF
$CLICKHOUSE_CLIENT "${opts[@]}" <<EOF
SET allow_experimental_window_view = 1;
DROP TABLE IF EXISTS mt;
DROP TABLE IF EXISTS dst;

View File

@ -8,7 +8,7 @@ opts=(
"--allow_experimental_analyzer=0"
)
$CLICKHOUSE_CLIENT "${opts[@]}" --multiquery <<EOF
$CLICKHOUSE_CLIENT "${opts[@]}" <<EOF
SET allow_experimental_window_view = 1;
DROP TABLE IF EXISTS mt;
DROP TABLE IF EXISTS dst;

View File

@ -8,7 +8,7 @@ opts=(
"--allow_experimental_analyzer=0"
)
$CLICKHOUSE_CLIENT "${opts[@]}" --multiquery <<EOF
$CLICKHOUSE_CLIENT "${opts[@]}" <<EOF
SET allow_experimental_window_view = 1;
DROP TABLE IF EXISTS mt;
DROP TABLE IF EXISTS dst;

View File

@ -8,7 +8,7 @@ opts=(
"--allow_experimental_analyzer=0"
)
$CLICKHOUSE_CLIENT "${opts[@]}" --multiquery <<EOF
$CLICKHOUSE_CLIENT "${opts[@]}" <<EOF
SET allow_experimental_window_view = 1;
DROP TABLE IF EXISTS mt;
DROP TABLE IF EXISTS dst;

View File

@ -8,7 +8,7 @@ opts=(
"--allow_experimental_analyzer=0"
)
$CLICKHOUSE_CLIENT "${opts[@]}" --multiquery <<EOF
$CLICKHOUSE_CLIENT "${opts[@]}" <<EOF
SET allow_experimental_window_view = 1;
DROP TABLE IF EXISTS mt;
DROP TABLE IF EXISTS dst;

View File

@ -8,7 +8,7 @@ opts=(
"--allow_experimental_analyzer=0"
)
$CLICKHOUSE_CLIENT "${opts[@]}" --multiquery <<EOF
$CLICKHOUSE_CLIENT "${opts[@]}" <<EOF
SET allow_experimental_window_view = 1;
DROP TABLE IF EXISTS mt;
DROP TABLE IF EXISTS dst;

View File

@ -8,7 +8,7 @@ opts=(
"--allow_experimental_analyzer=0"
)
$CLICKHOUSE_CLIENT "${opts[@]}" --multiquery <<EOF
$CLICKHOUSE_CLIENT "${opts[@]}" <<EOF
SET allow_experimental_window_view = 1;
DROP TABLE IF EXISTS mt;
DROP TABLE IF EXISTS dst;

View File

@ -8,7 +8,7 @@ opts=(
"--allow_experimental_analyzer=0"
)
$CLICKHOUSE_CLIENT "${opts[@]}" --multiquery <<EOF
$CLICKHOUSE_CLIENT "${opts[@]}" <<EOF
SET allow_experimental_window_view = 1;
DROP TABLE IF EXISTS mt;
DROP TABLE IF EXISTS dst;

View File

@ -8,7 +8,7 @@ opts=(
"--allow_experimental_analyzer=0"
)
$CLICKHOUSE_CLIENT "${opts[@]}" --multiquery <<EOF
$CLICKHOUSE_CLIENT "${opts[@]}" <<EOF
SET allow_experimental_window_view = 1;
DROP TABLE IF EXISTS mt;
DROP TABLE IF EXISTS dst;

View File

@ -8,7 +8,7 @@ opts=(
"--allow_experimental_analyzer=0"
)
$CLICKHOUSE_CLIENT "${opts[@]}" --multiquery <<EOF
$CLICKHOUSE_CLIENT "${opts[@]}" <<EOF
SET allow_experimental_window_view = 1;
DROP TABLE IF EXISTS mt;
DROP TABLE IF EXISTS dst;

View File

@ -8,7 +8,7 @@ opts=(
"--allow_experimental_analyzer=0"
)
$CLICKHOUSE_CLIENT "${opts[@]}" --multiquery <<EOF
$CLICKHOUSE_CLIENT "${opts[@]}" <<EOF
SET allow_experimental_window_view = 1;
DROP TABLE IF EXISTS mt;
DROP TABLE IF EXISTS info;

View File

@ -10,7 +10,7 @@ opts=(
"--allow_experimental_analyzer=0"
)
$CLICKHOUSE_CLIENT "${opts[@]}" --multiquery <<EOF
$CLICKHOUSE_CLIENT "${opts[@]}" <<EOF
SET allow_experimental_window_view = 1;
DROP TABLE IF EXISTS mt;
DROP TABLE IF EXISTS dst;

View File

@ -8,7 +8,7 @@ opts=(
"--allow_experimental_analyzer=0"
)
$CLICKHOUSE_CLIENT "${opts[@]}" --multiquery <<EOF
$CLICKHOUSE_CLIENT "${opts[@]}" <<EOF
SET allow_experimental_window_view = 1;
DROP TABLE IF EXISTS mt;
DROP TABLE IF EXISTS dst;
@ -33,7 +33,7 @@ while true; do
$CLICKHOUSE_CLIENT "${opts[@]}" --query="SELECT count(*) FROM dst" | grep -q "7" && break || sleep .5 ||:
done
$CLICKHOUSE_CLIENT "${opts[@]}" --multiquery <<EOF
$CLICKHOUSE_CLIENT "${opts[@]}" <<EOF
SELECT * FROM dst ORDER BY market, w_end;
INSERT INTO mt VALUES (1, 8, '1990/01/01 12:00:35');
INSERT INTO mt VALUES (1, 8, '1990/01/01 12:00:37');

View File

@ -8,7 +8,7 @@ opts=(
"--allow_experimental_analyzer=0"
)
$CLICKHOUSE_CLIENT "${opts[@]}" --multiquery <<EOF
$CLICKHOUSE_CLIENT "${opts[@]}" <<EOF
SET allow_experimental_window_view = 1;
DROP TABLE IF EXISTS mt;
DROP TABLE IF EXISTS info;

View File

@ -9,7 +9,7 @@ opts=(
"--allow_experimental_analyzer=0"
)
$CLICKHOUSE_CLIENT "${opts[@]}" --multiquery <<EOF
$CLICKHOUSE_CLIENT "${opts[@]}" <<EOF
SET allow_experimental_window_view = 1;
DROP TABLE IF EXISTS mt;
DROP TABLE IF EXISTS dst;

View File

@ -8,7 +8,7 @@ opts=(
"--allow_experimental_analyzer=0"
)
$CLICKHOUSE_CLIENT "${opts[@]}" --multiquery <<EOF
$CLICKHOUSE_CLIENT "${opts[@]}" <<EOF
SET allow_experimental_window_view = 1;
DROP TABLE IF EXISTS mt;
DROP TABLE IF EXISTS dst;
@ -33,7 +33,7 @@ done
$CLICKHOUSE_CLIENT "${opts[@]}" --query="SELECT * FROM dst ORDER BY market, w_end;"
$CLICKHOUSE_CLIENT "${opts[@]}" --query="SELECT '----ALTER TABLE...MODIFY QUERY----';"
$CLICKHOUSE_CLIENT "${opts[@]}" --multiquery <<EOF
$CLICKHOUSE_CLIENT "${opts[@]}" <<EOF
ALTER TABLE wv MODIFY QUERY SELECT count(a) AS count, mt.market * 2 as market, tumbleEnd(wid) AS w_end FROM mt GROUP BY tumble(timestamp, INTERVAL '5' SECOND, 'US/Samoa') AS wid, mt.market;
INSERT INTO mt VALUES (1, 6, '1990/01/01 12:00:10');

View File

@ -8,7 +8,7 @@ opts=(
"--allow_experimental_analyzer=0"
)
$CLICKHOUSE_CLIENT "${opts[@]}" --multiquery <<EOF
$CLICKHOUSE_CLIENT "${opts[@]}" <<EOF
SET allow_experimental_window_view = 1;
DROP TABLE IF EXISTS mt;
DROP TABLE IF EXISTS mt2;
@ -35,7 +35,7 @@ done
$CLICKHOUSE_CLIENT "${opts[@]}" --query="SELECT * FROM dst ORDER BY market, w_end;"
$CLICKHOUSE_CLIENT "${opts[@]}" --query="SELECT '----ALTER TABLE...MODIFY QUERY----';"
$CLICKHOUSE_CLIENT "${opts[@]}" --multiquery <<EOF
$CLICKHOUSE_CLIENT "${opts[@]}" <<EOF
ALTER TABLE wv MODIFY QUERY SELECT count(a) AS count, mt2.market * 2 as market, tumbleEnd(wid) AS w_end FROM mt2 GROUP BY tumble(timestamp, INTERVAL '5' SECOND, 'US/Samoa') AS wid, mt2.market;
INSERT INTO mt2 VALUES (1, 6, '1990/01/01 12:00:10');

View File

@ -8,7 +8,7 @@ opts=(
"--allow_experimental_analyzer=0"
)
$CLICKHOUSE_CLIENT "${opts[@]}" --multiquery <<EOF
$CLICKHOUSE_CLIENT "${opts[@]}" <<EOF
SET allow_experimental_window_view = 1;
DROP TABLE IF EXISTS mt;
DROP TABLE IF EXISTS dst;

View File

@ -8,7 +8,7 @@ opts=(
"--allow_experimental_analyzer=0"
)
$CLICKHOUSE_CLIENT "${opts[@]}" --multiquery <<EOF
$CLICKHOUSE_CLIENT "${opts[@]}" <<EOF
SET allow_experimental_window_view = 1;
DROP TABLE IF EXISTS mt;
DROP TABLE IF EXISTS dst;

View File

@ -8,7 +8,7 @@ opts=(
"--allow_experimental_analyzer=0"
)
$CLICKHOUSE_CLIENT "${opts[@]}" --multiquery <<EOF
$CLICKHOUSE_CLIENT "${opts[@]}" <<EOF
SET allow_experimental_window_view = 1;
DROP TABLE IF EXISTS mt;
DROP TABLE IF EXISTS wv;

View File

@ -8,7 +8,7 @@ opts=(
"--allow_experimental_analyzer=0"
)
$CLICKHOUSE_CLIENT "${opts[@]}" --multiquery <<EOF
$CLICKHOUSE_CLIENT "${opts[@]}" <<EOF
SET allow_experimental_window_view = 1;
DROP TABLE IF EXISTS mt;
DROP TABLE IF EXISTS dst;

View File

@ -8,7 +8,7 @@ opts=(
"--allow_experimental_analyzer=0"
)
$CLICKHOUSE_CLIENT "${opts[@]}" --multiquery <<EOF
$CLICKHOUSE_CLIENT "${opts[@]}" <<EOF
SET allow_experimental_window_view = 1;
DROP TABLE IF EXISTS mt;
DROP TABLE IF EXISTS wv;

View File

@ -12,7 +12,7 @@ opts=(
"--allow_experimental_analyzer=0"
)
$CLICKHOUSE_CLIENT "${opts[@]}" --allow_deprecated_database_ordinary=1 --multiquery <<EOF
$CLICKHOUSE_CLIENT "${opts[@]}" --allow_deprecated_database_ordinary=1 <<EOF
SET allow_experimental_window_view = 1;
SET window_view_clean_interval = 1;

View File

@ -8,7 +8,7 @@ opts=(
"--allow_experimental_analyzer=0"
)
$CLICKHOUSE_CLIENT "${opts[@]}" --multiquery <<EOF
$CLICKHOUSE_CLIENT "${opts[@]}" <<EOF
SET allow_experimental_window_view = 1;
DROP TABLE IF EXISTS mt;
DROP TABLE IF EXISTS wv;
@ -31,7 +31,7 @@ done
$CLICKHOUSE_CLIENT "${opts[@]}" --query="SELECT * FROM wv ORDER BY market, w_end;"
$CLICKHOUSE_CLIENT "${opts[@]}" --query="SELECT '----ALTER TABLE...MODIFY QUERY----';"
$CLICKHOUSE_CLIENT "${opts[@]}" --multiquery <<EOF
$CLICKHOUSE_CLIENT "${opts[@]}" <<EOF
ALTER TABLE wv MODIFY QUERY SELECT count(a) AS count, mt.market * 2 as market, tumbleEnd(wid) AS w_end FROM mt GROUP BY tumble(timestamp, INTERVAL '5' SECOND, 'US/Samoa') AS wid, mt.market;
INSERT INTO mt VALUES (1, 6, toDateTime('1990/01/01 12:00:10', 'US/Samoa'));

View File

@ -8,7 +8,7 @@ opts=(
"--allow_experimental_analyzer=0"
)
$CLICKHOUSE_CLIENT "${opts[@]}" --multiquery <<EOF
$CLICKHOUSE_CLIENT "${opts[@]}" <<EOF
SET allow_experimental_window_view = 1;
DROP TABLE IF EXISTS mt;
DROP TABLE IF EXISTS wv;

View File

@ -14,7 +14,7 @@ $CLICKHOUSE_CLIENT --query "CREATE TABLE mt (n Int64) ENGINE=MergeTree ORDER BY
function begin_commit_readonly()
{
while true; do
$CLICKHOUSE_CLIENT --multiquery --query "
$CLICKHOUSE_CLIENT --query "
SET wait_changes_become_visible_after_commit_mode='wait';
BEGIN TRANSACTION;
COMMIT;" 2>&1| grep -Fa "Exception: " | grep -Fv UNKNOWN_STATUS_OF_TRANSACTION
@ -24,7 +24,7 @@ function begin_commit_readonly()
function begin_rollback_readonly()
{
while true; do
$CLICKHOUSE_CLIENT --wait_changes_become_visible_after_commit_mode=wait_unknown --multiquery --query "
$CLICKHOUSE_CLIENT --wait_changes_become_visible_after_commit_mode=wait_unknown --query "
BEGIN TRANSACTION;
SET TRANSACTION SNAPSHOT 42;
ROLLBACK;"
@ -34,7 +34,7 @@ function begin_rollback_readonly()
function begin_insert_commit()
{
while true; do
$CLICKHOUSE_CLIENT --wait_changes_become_visible_after_commit_mode=async --multiquery --query "
$CLICKHOUSE_CLIENT --wait_changes_become_visible_after_commit_mode=async --query "
BEGIN TRANSACTION;
INSERT INTO mt VALUES ($RANDOM);
COMMIT;" 2>&1| grep -Fa "Exception: " | grep -Fv UNKNOWN_STATUS_OF_TRANSACTION

View File

@ -22,7 +22,7 @@ function thread_insert()
set -eu
val=1
while true; do
$CLICKHOUSE_CLIENT --multiquery --query "
$CLICKHOUSE_CLIENT --query "
BEGIN TRANSACTION;
INSERT INTO src VALUES /* ($val, 1) */ ($val, 1);
INSERT INTO src VALUES /* ($val, 2) */ ($val, 2);
@ -210,7 +210,7 @@ function thread_select()
set -eu
while true; do
output=$(
$CLICKHOUSE_CLIENT --multiquery --query "
$CLICKHOUSE_CLIENT --query "
BEGIN TRANSACTION;
-- no duplicates
SELECT type, throwIf(count(n) != countDistinct(n)) FROM src GROUP BY type FORMAT Null;

View File

@ -19,7 +19,7 @@ function thread_insert()
set -e
val=1
while true; do
$CLICKHOUSE_CLIENT --multiquery --query "
$CLICKHOUSE_CLIENT --query "
BEGIN TRANSACTION;
INSERT INTO src VALUES /* ($val, 1) */ ($val, 1);
INSERT INTO src VALUES /* ($val, 2) */ ($val, 2);
@ -40,7 +40,7 @@ function thread_partition_src_to_dst()
sum=0
for i in {1..20}; do
out=$(
$CLICKHOUSE_CLIENT --multiquery --query "
$CLICKHOUSE_CLIENT --query "
BEGIN TRANSACTION;
INSERT INTO src VALUES /* ($i, 3) */ ($i, 3);
INSERT INTO dst SELECT * FROM src;
@ -49,7 +49,7 @@ function thread_partition_src_to_dst()
SELECT throwIf((SELECT (count(), sum(n)) FROM merge(currentDatabase(), '') WHERE type=3) != ($count + 1, $sum + $i)) FORMAT Null;
COMMIT;" 2>&1) ||:
echo "$out" | grep -Fv "SERIALIZATION_ERROR" | grep -F "Received from " && $CLICKHOUSE_CLIENT --multiquery --query "
echo "$out" | grep -Fv "SERIALIZATION_ERROR" | grep -F "Received from " && $CLICKHOUSE_CLIENT --query "
begin transaction;
set transaction snapshot 3;
select $i, 'src', type, n, _part from src order by type, n;
@ -68,7 +68,7 @@ function thread_partition_dst_to_src()
if (( i % 2 )); then
action="COMMIT"
fi
$CLICKHOUSE_CLIENT --multiquery --query "
$CLICKHOUSE_CLIENT --query "
SYSTEM STOP MERGES dst;
ALTER TABLE dst DROP PARTITION ID 'nonexistent'; -- STOP MERGES doesn't wait for started merges to finish, so we use this trick
SYSTEM SYNC TRANSACTION LOG;
@ -87,7 +87,7 @@ function thread_select()
{
set -e
while true; do
$CLICKHOUSE_CLIENT --multiquery --query "
$CLICKHOUSE_CLIENT --query "
BEGIN TRANSACTION;
-- no duplicates
SELECT type, throwIf(count(n) != countDistinct(n)) FROM src GROUP BY type FORMAT Null;

View File

@ -65,7 +65,7 @@ function insert_commit_action()
local tag=$1; shift
# some transactions will fail due to constraint
$CLICKHOUSE_CLIENT --multiquery --query "
$CLICKHOUSE_CLIENT --query "
BEGIN TRANSACTION;
INSERT INTO src VALUES /* ($i, $tag) */ ($i, $tag);
SELECT throwIf((SELECT sum(nm) FROM mv) != $(($i * $tag))) /* ($i, $tag) */ FORMAT Null;
@ -83,7 +83,7 @@ function insert_rollback_action()
local i=$1; shift
local tag=$1; shift
$CLICKHOUSE_CLIENT --multiquery --query "
$CLICKHOUSE_CLIENT --query "
BEGIN TRANSACTION;
INSERT INTO src VALUES /* (42, $tag) */ (42, $tag);
SELECT throwIf((SELECT count() FROM src WHERE n=42 AND m=$tag) != 1) FORMAT Null;
@ -112,7 +112,7 @@ function optimize_action()
action="ROLLBACK"
fi
$CLICKHOUSE_CLIENT --multiquery --query "
$CLICKHOUSE_CLIENT --query "
BEGIN TRANSACTION;
$optimize_query;
$action;
@ -126,7 +126,7 @@ function select_action()
{
set -e
$CLICKHOUSE_CLIENT --multiquery --query "
$CLICKHOUSE_CLIENT --query "
BEGIN TRANSACTION;
SELECT throwIf((SELECT (sum(n), count() % 2) FROM src) != (0, 1)) FORMAT Null;
SELECT throwIf((SELECT (sum(nm), count() % 2) FROM mv) != (0, 1)) FORMAT Null;
@ -140,7 +140,7 @@ function select_insert_action()
{
set -e
$CLICKHOUSE_CLIENT --multiquery --query "
$CLICKHOUSE_CLIENT --query "
BEGIN TRANSACTION;
SELECT throwIf((SELECT count() FROM tmp) != 0) FORMAT Null;
INSERT INTO tmp SELECT 1, n*m FROM src;
@ -199,7 +199,7 @@ wait $PID_8 || echo "second select_insert_action has failed with status $?" 2>&1
wait_for_queries_to_finish $WAIT_FINISH
$CLICKHOUSE_CLIENT --multiquery --query "
$CLICKHOUSE_CLIENT --query "
BEGIN TRANSACTION;
SELECT throwIf((SELECT (sum(n), count() % 2) FROM src) != (0, 1)) FORMAT Null;
SELECT throwIf((SELECT (sum(nm), count() % 2) FROM mv) != (0, 1)) FORMAT Null;
@ -209,7 +209,7 @@ $CLICKHOUSE_CLIENT --multiquery --query "
COMMIT;
"
$CLICKHOUSE_CLIENT --multiquery --query "
$CLICKHOUSE_CLIENT --query "
SELECT throwIf((SELECT (sum(n), count() % 2) FROM src) != (0, 1)) FORMAT Null;
SELECT throwIf((SELECT (sum(nm), count() % 2) FROM mv) != (0, 1)) FORMAT Null;
SELECT throwIf((SELECT (sum(nm), count() % 2) FROM dst) != (0, 1)) FORMAT Null;

View File

@ -16,7 +16,7 @@ $CLICKHOUSE_CLIENT --query "CREATE TABLE mt (n Int8, m Int8) ENGINE=MergeTree OR
function thread_insert_commit()
{
for i in {1..50}; do
$CLICKHOUSE_CLIENT --multiquery --query "
$CLICKHOUSE_CLIENT --query "
BEGIN TRANSACTION;
INSERT INTO mt VALUES /* ($i, $1) */ ($i, $1);
INSERT INTO mt VALUES /* (-$i, $1) */ (-$i, $1);
@ -27,7 +27,7 @@ function thread_insert_commit()
function thread_insert_rollback()
{
for _ in {1..50}; do
$CLICKHOUSE_CLIENT --multiquery --query "
$CLICKHOUSE_CLIENT --query "
BEGIN TRANSACTION;
INSERT INTO mt VALUES /* (42, $1) */ (42, $1);
ROLLBACK;";
@ -38,7 +38,7 @@ function thread_select()
{
while true; do
# The first and the last queries must get the same result
$CLICKHOUSE_CLIENT --multiquery --query "
$CLICKHOUSE_CLIENT --query "
BEGIN TRANSACTION;
SET throw_on_unsupported_query_inside_transaction=0;
CREATE TEMPORARY TABLE tmp AS SELECT arraySort(groupArray(n)), arraySort(groupArray(m)), arraySort(groupArray(_part)) FROM mt FORMAT Null;
@ -58,7 +58,7 @@ kill -TERM $PID_4
wait
wait_for_queries_to_finish 40
$CLICKHOUSE_CLIENT --multiquery --query "
$CLICKHOUSE_CLIENT --query "
BEGIN TRANSACTION;
SELECT count(), sum(n), sum(m=1), sum(m=2), sum(m=3) FROM mt;";

View File

@ -5,4 +5,4 @@ CURDIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)
# shellcheck source=../shell_config.sh
. "$CURDIR"/../shell_config.sh
$CLICKHOUSE_CLIENT --quota_key Hello --query_id test_quota_key --log_queries 1 --multiquery --query "SELECT 1; SYSTEM FLUSH LOGS; SELECT DISTINCT quota_key FROM system.query_log WHERE current_database = currentDatabase() AND event_date >= yesterday() AND event_time >= now() - 300 AND query_id = 'test_quota_key'"
$CLICKHOUSE_CLIENT --quota_key Hello --query_id test_quota_key --log_queries 1 --query "SELECT 1; SYSTEM FLUSH LOGS; SELECT DISTINCT quota_key FROM system.query_log WHERE current_database = currentDatabase() AND event_date >= yesterday() AND event_time >= now() - 300 AND query_id = 'test_quota_key'"

View File

@ -4,7 +4,7 @@ CURDIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)
# shellcheck source=../shell_config.sh
. "$CURDIR"/../shell_config.sh
$CLICKHOUSE_CLIENT --multiquery --query "
$CLICKHOUSE_CLIENT --query "
DROP TABLE IF EXISTS NmSubj;
DROP TABLE IF EXISTS events;
@ -60,7 +60,7 @@ FROM events as e INNER JOIN NmSubj as ns
ON ns.NmId = toUInt32(e.Param1)
WHERE e.EventDate = today() - 7 AND e.EventId = 'GCO' AND ns.SubjectId = 2073"
$CLICKHOUSE_CLIENT --multiquery --query "
$CLICKHOUSE_CLIENT --query "
DROP TABLE NmSubj;
DROP TABLE events;
"

View File

@ -11,7 +11,7 @@ TIMELIMIT=31
while [ $SECONDS -lt "$TIMELIMIT" ] && [ $it -lt 100 ];
do
it=$((it+1))
$CLICKHOUSE_CLIENT --multiquery --query "
$CLICKHOUSE_CLIENT --query "
DROP TABLE IF EXISTS mt;
CREATE TABLE mt (x UInt8, k UInt8 DEFAULT 0) ENGINE = SummingMergeTree ORDER BY k;

View File

@ -5,7 +5,7 @@ CURDIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)
# shellcheck source=../shell_config.sh
. "$CURDIR"/../shell_config.sh
$CLICKHOUSE_CLIENT --multiquery --query "
$CLICKHOUSE_CLIENT --query "
DROP TABLE IF EXISTS bug;
CREATE TABLE bug (UserID UInt64, Date Date) ENGINE = MergeTree ORDER BY Date
SETTINGS index_granularity = 8192, index_granularity_bytes = '10Mi', merge_max_block_size = 8192;
@ -18,5 +18,5 @@ cat "$LOG" | grep Loaded
rm "$LOG"
$CLICKHOUSE_CLIENT --multiquery --query "
$CLICKHOUSE_CLIENT --query "
DROP TABLE bug;"

View File

@ -9,7 +9,7 @@ CURDIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)
echo 'zero rows'
for format in TSVWithNames TSVWithNamesAndTypes CSVWithNames CSVWithNamesAndTypes JSONCompactEachRowWithNames JSONCompactEachRowWithNamesAndTypes JSONCompactStringsEachRow JSONCompactStringsEachRowWithNamesAndTypes; do
echo $format
${CLICKHOUSE_LOCAL} --multiquery --query="
${CLICKHOUSE_LOCAL} --query="
CREATE TABLE ${format}_01375 ENGINE File($format, '01375_$format') AS SELECT * FROM numbers(1) WHERE number < 0;
SELECT * FROM ${format}_01375;
DROP TABLE ${format}_01375;
@ -22,7 +22,7 @@ echo 'multi clickhouse-local one file'
for format in TSVWithNames TSVWithNamesAndTypes CSVWithNames CSVWithNamesAndTypes JSONCompactEachRowWithNames JSONCompactEachRowWithNamesAndTypes JSONCompactStringsEachRow JSONCompactStringsEachRowWithNamesAndTypes; do
echo $format
for _ in {1..2}; do
${CLICKHOUSE_LOCAL} --multiquery --query="
${CLICKHOUSE_LOCAL} --query="
CREATE TABLE ${format}_01375 ENGINE File($format, '01375_$format') AS SELECT * FROM numbers(1);
SELECT * FROM ${format}_01375;
DROP TABLE ${format}_01375;

View File

@ -34,7 +34,7 @@ do
SELECT count() FROM t HAVING count() > 0;
SELECT ${i};
"
done | ${CLICKHOUSE_CLIENT} --multiquery
done | ${CLICKHOUSE_CLIENT}
wait

View File

@ -10,6 +10,6 @@ rm -rf "${WORKING_FOLDER_01527}"
mkdir -p "${WORKING_FOLDER_01527}"
# OPTIMIZE was crashing due to lack of temporary volume in local
${CLICKHOUSE_LOCAL} --multiquery --query "drop database if exists d; create database d; create table d.t engine MergeTree order by a as select 1 a; optimize table d.t final" --path="${WORKING_FOLDER_01527}"
${CLICKHOUSE_LOCAL} --query "drop database if exists d; create database d; create table d.t engine MergeTree order by a as select 1 a; optimize table d.t final" --path="${WORKING_FOLDER_01527}"
rm -rf "${WORKING_FOLDER_01527}"

View File

@ -5,7 +5,7 @@ CURDIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)
# shellcheck source=../shell_config.sh
. "$CURDIR"/../shell_config.sh
$CLICKHOUSE_CLIENT --multiquery --query "
$CLICKHOUSE_CLIENT --query "
SET allow_suspicious_low_cardinality_types=1;
CREATE TABLE IF NOT EXISTS test_01543 (value LowCardinality(String), value2 LowCardinality(UInt64)) ENGINE=Memory();
"

View File

@ -10,7 +10,7 @@ rm -f -- "$the_file"
# We are going to check that format settings work for File engine,
# by creating a table with a non-default delimiter, and reading from it.
${CLICKHOUSE_LOCAL} --multiquery --query "
${CLICKHOUSE_LOCAL} --query "
create table t(a int, b int) engine File(CSV, '$the_file') settings format_csv_delimiter = '|';
insert into t select 1 a, 1 b;
"
@ -18,7 +18,7 @@ ${CLICKHOUSE_LOCAL} --multiquery --query "
# See what's in the file
cat "$the_file"
${CLICKHOUSE_LOCAL} --multiquery --query "
${CLICKHOUSE_LOCAL} --query "
create table t(a int, b int) engine File(CSV, '$the_file') settings format_csv_delimiter = '|';
select * from t;
"

View File

@ -18,7 +18,7 @@ mkdir -p "${WORKING_FOLDER_01600}"
clickhouse_local() {
local query="$1"
shift
${CLICKHOUSE_LOCAL} --allow_deprecated_database_ordinary=1 --multiquery --query "$query" "$@" --path="${WORKING_FOLDER_01600}"
${CLICKHOUSE_LOCAL} --allow_deprecated_database_ordinary=1 --query "$query" "$@" --path="${WORKING_FOLDER_01600}"
}
test_detach_attach_sequence() {

View File

@ -24,7 +24,7 @@ verify()
if [[ $i -eq 5000 ]]
then
$CLICKHOUSE_CLIENT --multiquery "
$CLICKHOUSE_CLIENT "
SELECT sumIf(value, metric = 'PartsActive'), sumIf(value, metric = 'PartsOutdated') FROM system.metrics;
SELECT sum(active), sum(NOT active) FROM system.parts;
SELECT sum(active), sum(NOT active) FROM system.projection_parts;

View File

@ -19,7 +19,7 @@ done
${CLICKHOUSE_GIT_IMPORT} 2>&1 | wc -l
${CLICKHOUSE_CLIENT} --multiline --multiquery --query "
${CLICKHOUSE_CLIENT} --multiline --query "
DROP TABLE IF EXISTS commits;
DROP TABLE IF EXISTS file_changes;
@ -122,7 +122,7 @@ ${CLICKHOUSE_CLIENT} --query "SELECT count() FROM commits"
${CLICKHOUSE_CLIENT} --query "SELECT count() FROM file_changes"
${CLICKHOUSE_CLIENT} --query "SELECT count(), round(avg(indent), 1) FROM line_changes"
${CLICKHOUSE_CLIENT} --multiline --multiquery --query "
${CLICKHOUSE_CLIENT} --multiline --query "
DROP TABLE commits;
DROP TABLE file_changes;
DROP TABLE line_changes;

View File

@ -4,7 +4,7 @@ CURDIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)
# shellcheck source=../shell_config.sh
. "$CURDIR"/../shell_config.sh
${CLICKHOUSE_CLIENT} --multiquery --query "
${CLICKHOUSE_CLIENT} --query "
DROP TABLE IF EXISTS test;
CREATE TABLE test (a Array(String)) ENGINE = Memory;
"
@ -22,7 +22,7 @@ ${CLICKHOUSE_CLIENT} --input_format_csv_arrays_as_nested_csv 1 --query "INSERT I
"""Hello"", ""world"", ""42"""" TV"""
END
${CLICKHOUSE_CLIENT} --multiquery --query "
${CLICKHOUSE_CLIENT} --query "
SELECT * FROM test;
DROP TABLE IF EXISTS test;
"

View File

@ -8,7 +8,7 @@ CURDIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)
. "$CURDIR"/../shell_config.sh
$CLICKHOUSE_CLIENT --multiquery --query "DROP TABLE IF EXISTS test; CREATE TABLE IF NOT EXISTS test (x UInt64, s Array(Nullable(String))) ENGINE = TinyLog;"
$CLICKHOUSE_CLIENT --query "DROP TABLE IF EXISTS test; CREATE TABLE IF NOT EXISTS test (x UInt64, s Array(Nullable(String))) ENGINE = TinyLog;"
function thread_select {
local TIMELIMIT=$((SECONDS+$1))
@ -47,4 +47,4 @@ thread_insert $TIMEOUT &
wait
echo "Done"
$CLICKHOUSE_CLIENT --multiquery --query "DROP TABLE IF EXISTS test;"
$CLICKHOUSE_CLIENT --query "DROP TABLE IF EXISTS test;"

View File

@ -24,7 +24,7 @@ ${CLICKHOUSE_CLIENT} --query "select file('a.txt'), file('b.txt');";echo ":"$?
${CLICKHOUSE_CLIENT} --query "insert into data select file('a.txt'), file('b.txt');";echo ":"$?
${CLICKHOUSE_CLIENT} --query "insert into data select file('a.txt'), file('b.txt');";echo ":"$?
${CLICKHOUSE_CLIENT} --query "select file('c.txt'), * from data";echo ":"$?
${CLICKHOUSE_CLIENT} --multiquery --query "
${CLICKHOUSE_CLIENT} --query "
create table filenames(name String) engine=MergeTree() order by tuple();
insert into filenames values ('a.txt'), ('b.txt'), ('c.txt');
select file(name) from filenames format TSV;
@ -56,7 +56,7 @@ echo $c_count
# Valid cases:
# The default dir is the CWD path in LOCAL mode
${CLICKHOUSE_LOCAL} --multiquery --query "
${CLICKHOUSE_LOCAL} --query "
drop table if exists data;
create table data (A String, B String) engine=MergeTree() order by A;
select file('a.txt'), file('b.txt');

View File

@ -8,7 +8,7 @@ function wait_for_query_to_start() {
while [[ $($CLICKHOUSE_CURL -sS "$CLICKHOUSE_URL" -d "SELECT sum(read_rows) FROM system.processes WHERE query_id = '$1'") == 0 ]]; do sleep 0.1; done
}
${CLICKHOUSE_CLIENT} --multiline --multiquery --query "
${CLICKHOUSE_CLIENT} --multiline --query "
drop table if exists simple;
create table simple (i int, j int) engine = MergeTree order by i

View File

@ -82,7 +82,7 @@ trap "cleanup" EXIT
function executeQueryExpectError()
{
cat - > "${TMP_QUERY_FILE}"
! ${CLICKHOUSE_CLIENT} --multiquery --queries-file "${TMP_QUERY_FILE}" "${@}" 2>&1 | tee -a "${TMP_QUERY_FILE}"
! ${CLICKHOUSE_CLIENT} --queries-file "${TMP_QUERY_FILE}" "${@}" 2>&1 | tee -a "${TMP_QUERY_FILE}"
}
function createUser()
@ -303,7 +303,7 @@ function runEndpointTests()
if [[ -n "${setup_queries}" ]]
then
# echo "Executing setup queries: ${setup_queries}"
echo "${setup_queries}" | executeQuery --multiquery
echo "${setup_queries}" | executeQuery
fi
testTCP "${auth_type}" "${username}" "${password}"
@ -357,7 +357,7 @@ testAsUserIdentifiedBy "plaintext_password"
testAsUserIdentifiedBy "sha256_password"
testAsUserIdentifiedBy "double_sha1_password"
executeQuery --multiquery <<EOF
executeQuery <<EOF
SYSTEM FLUSH LOGS;
WITH

View File

@ -5,7 +5,7 @@ CUR_DIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)
. "$CUR_DIR"/../shell_config.sh
$CLICKHOUSE_CLIENT --query="DROP TABLE IF EXISTS nullable_low_cardinality_tsv_test;";
$CLICKHOUSE_CLIENT --multiquery --query="CREATE TABLE nullable_low_cardinality_tsv_test
$CLICKHOUSE_CLIENT --query="CREATE TABLE nullable_low_cardinality_tsv_test
(
A Date,
S LowCardinality(Nullable(String)),

View File

@ -4,7 +4,7 @@ CUR_DIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)
# shellcheck source=../shell_config.sh
. "$CUR_DIR"/../shell_config.sh
${CLICKHOUSE_CLIENT} --multiquery --query "
${CLICKHOUSE_CLIENT} --query "
drop table if exists aliases_lazyness;
create table aliases_lazyness (x UInt32, y ALIAS sleepEachRow(0.1)) Engine=MergeTree ORDER BY x;
insert into aliases_lazyness(x) select * from numbers(100);

View File

@ -6,13 +6,13 @@ CURDIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)
# shellcheck source=../shell_config.sh
. "$CURDIR"/../shell_config.sh
${CLICKHOUSE_CLIENT} --multiquery --query "DROP TABLE IF EXISTS t; CREATE TABLE t (x UInt64) ENGINE = Memory;"
${CLICKHOUSE_CLIENT} --query "DROP TABLE IF EXISTS t; CREATE TABLE t (x UInt64) ENGINE = Memory;"
# Rate limit is chosen for operation to spent more than one second.
seq 1 1000 | pv --quiet --rate-limit 400 | ${CLICKHOUSE_CLIENT} --query "INSERT INTO t FORMAT TSV"
# We check that the value of NetworkReceiveElapsedMicroseconds correctly includes the time spent waiting data from the client.
${CLICKHOUSE_CLIENT} --multiquery --query "SYSTEM FLUSH LOGS;
${CLICKHOUSE_CLIENT} --query "SYSTEM FLUSH LOGS;
WITH ProfileEvents['NetworkReceiveElapsedMicroseconds'] AS time
SELECT time >= 1000000 ? 1 : time FROM system.query_log
WHERE current_database = currentDatabase() AND query_kind = 'Insert' AND event_date >= yesterday() AND type = 2 ORDER BY event_time DESC LIMIT 1;"

View File

@ -4,11 +4,11 @@ CURDIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)
# shellcheck source=../shell_config.sh
. "$CURDIR"/../shell_config.sh
${CLICKHOUSE_CLIENT} --multiquery --query "DROP TABLE IF EXISTS t; CREATE TABLE t (x UInt64) ENGINE = Memory;"
${CLICKHOUSE_CLIENT} --query "DROP TABLE IF EXISTS t; CREATE TABLE t (x UInt64) ENGINE = Memory;"
seq 1 1000 | ${CLICKHOUSE_CLIENT} --query "INSERT INTO t FORMAT TSV"
${CLICKHOUSE_CLIENT} --multiquery --query "SYSTEM FLUSH LOGS;
${CLICKHOUSE_CLIENT} --query "SYSTEM FLUSH LOGS;
WITH ProfileEvents['NetworkReceiveBytes'] AS bytes
SELECT bytes >= 8000 AND bytes < 9000 ? 1 : bytes FROM system.query_log
WHERE current_database = currentDatabase() AND query_kind = 'Insert' AND event_date >= yesterday() AND type = 2 ORDER BY event_time DESC LIMIT 1;"

View File

@ -7,7 +7,7 @@ CUR_DIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)
# shellcheck source=../shell_config.sh
. "$CUR_DIR"/../shell_config.sh
${CLICKHOUSE_CLIENT} --multiquery --query "
${CLICKHOUSE_CLIENT} --query "
DROP USER IF EXISTS dns_fail_1, dns_fail_2;
CREATE USER dns_fail_1 HOST NAME 'non.existing.host.name', '${MYHOSTNAME}';
CREATE USER dns_fail_2 HOST NAME '${MYHOSTNAME}', 'non.existing.host.name';"

View File

@ -9,7 +9,7 @@ CUR_DIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)
mkdir -p ${USER_FILES_PATH}/
cp $CUR_DIR/data_zstd/test_01946.zstd ${USER_FILES_PATH}/
${CLICKHOUSE_CLIENT} --multiline --multiquery --query "
${CLICKHOUSE_CLIENT} --multiline --query "
set min_chunk_bytes_for_parallel_parsing=10485760;
set max_read_buffer_size = 65536;
set input_format_parallel_parsing = 0;

View File

@ -19,7 +19,7 @@ ${CLICKHOUSE_CLIENT} --query "INSERT INTO test_infile FROM INFILE '${CLICKHOUSE_
${CLICKHOUSE_CLIENT} --query "SELECT * FROM test_infile;"
# if it not fails, select will print information
${CLICKHOUSE_LOCAL} --multiquery --query "CREATE TABLE test_infile (word String) ENGINE=Memory(); INSERT INTO test_infile FROM INFILE '${CLICKHOUSE_TMP}/test_infile.gz' FORMAT CSV; SELECT * from test_infile;"
${CLICKHOUSE_LOCAL} --query "CREATE TABLE test_infile (word String) ENGINE=Memory(); INSERT INTO test_infile FROM INFILE '${CLICKHOUSE_TMP}/test_infile.gz' FORMAT CSV; SELECT * from test_infile;"
${CLICKHOUSE_CURL} -sS "${CLICKHOUSE_URL}&query=DROP+TABLE" -d 'IF EXISTS test_infile_url'
${CLICKHOUSE_CURL} -sS "${CLICKHOUSE_URL}&query=CREATE" -d 'TABLE test_infile_url (x String) ENGINE = Memory'

View File

@ -55,8 +55,8 @@ ${CLICKHOUSE_CLIENT} --query "DROP TABLE IF EXISTS test_compression_keyword;"
[ -e "${CLICKHOUSE_TMP}"/test_comp_for_input_and_output_without_gz.gz ] && rm "${CLICKHOUSE_TMP}"/test_comp_for_input_and_output_without_gz.gz
# create files using compression method and without it to check that both queries work correct
${CLICKHOUSE_LOCAL} --multiquery --query "SELECT * FROM (SELECT 'Hello, World! From local.') INTO OUTFILE '${CLICKHOUSE_TMP}/test_comp_for_input_and_output.gz' FORMAT TabSeparated;"
${CLICKHOUSE_LOCAL} --multiquery --query "SELECT * FROM (SELECT 'Hello, World! From local.') INTO OUTFILE '${CLICKHOUSE_TMP}/test_comp_for_input_and_output_without_gz' COMPRESSION 'GZ' FORMAT TabSeparated;"
${CLICKHOUSE_LOCAL} --query "SELECT * FROM (SELECT 'Hello, World! From local.') INTO OUTFILE '${CLICKHOUSE_TMP}/test_comp_for_input_and_output.gz' FORMAT TabSeparated;"
${CLICKHOUSE_LOCAL} --query "SELECT * FROM (SELECT 'Hello, World! From local.') INTO OUTFILE '${CLICKHOUSE_TMP}/test_comp_for_input_and_output_without_gz' COMPRESSION 'GZ' FORMAT TabSeparated;"
# check content of files
cp ${CLICKHOUSE_TMP}/test_comp_for_input_and_output.gz ${CLICKHOUSE_TMP}/test_comp_for_input_and_output_to_decomp.gz
@ -68,7 +68,7 @@ gunzip ${CLICKHOUSE_TMP}/test_comp_for_input_and_output_without_gz_to_decomp.gz
cat ${CLICKHOUSE_TMP}/test_comp_for_input_and_output_without_gz_to_decomp
# create table to check inserts
${CLICKHOUSE_LOCAL} --multiquery --query "
${CLICKHOUSE_LOCAL} --query "
DROP TABLE IF EXISTS test_compression_keyword;
CREATE TABLE test_compression_keyword (text String) Engine=Memory;
INSERT INTO TABLE test_compression_keyword FROM INFILE '${CLICKHOUSE_TMP}/test_comp_for_input_and_output.gz' FORMAT TabSeparated;

View File

@ -17,7 +17,7 @@ echo -e "103" > "${CLICKHOUSE_TMP}"/test_infile_parallel_3
gzip "${CLICKHOUSE_TMP}"/test_infile_parallel
${CLICKHOUSE_CLIENT} --multiquery <<EOF
${CLICKHOUSE_CLIENT} <<EOF
DROP TABLE IF EXISTS test_infile_parallel;
CREATE TABLE test_infile_parallel (Id Int32,Value Enum('first' = 1, 'second' = 2)) ENGINE=Memory();
SET input_format_allow_errors_num=1;
@ -27,13 +27,13 @@ SELECT count() FROM test_infile_parallel WHERE Value='second';
EOF
# Error code is 27 (DB::ParsingException). It is not ignored.
${CLICKHOUSE_CLIENT} -m --multiquery --query "DROP TABLE IF EXISTS test_infile_parallel;
${CLICKHOUSE_CLIENT} -m --query "DROP TABLE IF EXISTS test_infile_parallel;
CREATE TABLE test_infile_parallel (Id Int32,Value Enum('first' = 1, 'second' = 2)) ENGINE=Memory();
SET input_format_allow_errors_num=0;
INSERT INTO test_infile_parallel FROM INFILE '${CLICKHOUSE_TMP}/test_infile_parallel*' FORMAT TSV;
" 2>&1 | grep -q "27" && echo "Correct" || echo 'Fail'
${CLICKHOUSE_LOCAL} --multiquery <<EOF
${CLICKHOUSE_LOCAL} <<EOF
DROP TABLE IF EXISTS test_infile_parallel;
SET input_format_allow_errors_num=1;
CREATE TABLE test_infile_parallel (Id Int32,Value Enum('first' = 1, 'second' = 2)) ENGINE=Memory();

View File

@ -4,4 +4,4 @@ CUR_DIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)
# shellcheck source=../shell_config.sh
. "$CUR_DIR"/../shell_config.sh
${CLICKHOUSE_LOCAL} --multiquery --query "create table t (n int, m int default 42) engine=Memory;insert into t values (1, NULL);select * from t"
${CLICKHOUSE_LOCAL} --query "create table t (n int, m int default 42) engine=Memory;insert into t values (1, NULL);select * from t"

View File

@ -14,24 +14,24 @@ $CLICKHOUSE_CLIENT --query="SYSTEM STOP MERGES block_dedup_token;"
$CLICKHOUSE_CLIENT --query="SELECT 'insert 2 blocks with dedup token, 1 row per block'"
DEDUP_TOKEN='dedup1'
echo 'INSERT INTO block_dedup_token VALUES (1), (2)' | ${CLICKHOUSE_CURL} -sS "${CLICKHOUSE_URL}&$INSERT_BLOCK_SETTINGS&insert_deduplication_token='$DEDUP_TOKEN'&query=" --data-binary @-
$CLICKHOUSE_CLIENT --multiquery --query "$QUERY_COUNT_ORIGIN_BLOCKS;$QUERY_SELECT_FROM_TABLE_ORDERED"
$CLICKHOUSE_CLIENT --query "$QUERY_COUNT_ORIGIN_BLOCKS;$QUERY_SELECT_FROM_TABLE_ORDERED"
$CLICKHOUSE_CLIENT --query="SELECT 'insert deduplicated by token'"
echo 'INSERT INTO block_dedup_token VALUES (1), (2)' | ${CLICKHOUSE_CURL} -sS "${CLICKHOUSE_URL}&$INSERT_BLOCK_SETTINGS&insert_deduplication_token='$DEDUP_TOKEN'&query=" --data-binary @-
$CLICKHOUSE_CLIENT --multiquery --query "$QUERY_COUNT_ORIGIN_BLOCKS;$QUERY_SELECT_FROM_TABLE_ORDERED"
$CLICKHOUSE_CLIENT --query "$QUERY_COUNT_ORIGIN_BLOCKS;$QUERY_SELECT_FROM_TABLE_ORDERED"
$CLICKHOUSE_CLIENT --query="SELECT 'insert the same data by providing different dedup token'"
DEDUP_TOKEN='dedup2'
echo 'INSERT INTO block_dedup_token VALUES (1), (2)' | ${CLICKHOUSE_CURL} -sS "${CLICKHOUSE_URL}&$INSERT_BLOCK_SETTINGS&insert_deduplication_token='$DEDUP_TOKEN'&query=" --data-binary @-
$CLICKHOUSE_CLIENT --multiquery --query "$QUERY_COUNT_ORIGIN_BLOCKS;$QUERY_SELECT_FROM_TABLE_ORDERED"
$CLICKHOUSE_CLIENT --query "$QUERY_COUNT_ORIGIN_BLOCKS;$QUERY_SELECT_FROM_TABLE_ORDERED"
$CLICKHOUSE_CLIENT --query="SELECT 'insert 4 blocks, 2 deduplicated, 2 inserted'"
echo 'INSERT INTO block_dedup_token VALUES (1), (2), (3), (4)' | ${CLICKHOUSE_CURL} -sS "${CLICKHOUSE_URL}&$INSERT_BLOCK_SETTINGS&insert_deduplication_token='$DEDUP_TOKEN'&query=" --data-binary @-
$CLICKHOUSE_CLIENT --multiquery --query "$QUERY_COUNT_ORIGIN_BLOCKS;$QUERY_SELECT_FROM_TABLE_ORDERED"
$CLICKHOUSE_CLIENT --query "$QUERY_COUNT_ORIGIN_BLOCKS;$QUERY_SELECT_FROM_TABLE_ORDERED"
$CLICKHOUSE_CLIENT --query="SELECT 'disable token based deduplication, insert the same data as with token'"
DEDUP_TOKEN=''
echo 'INSERT INTO block_dedup_token VALUES (1), (2), (3), (4)' | ${CLICKHOUSE_CURL} -sS "${CLICKHOUSE_URL}&$INSERT_BLOCK_SETTINGS&insert_deduplication_token='$DEDUP_TOKEN'&query=" --data-binary @-
$CLICKHOUSE_CLIENT --multiquery --query "$QUERY_COUNT_ORIGIN_BLOCKS;$QUERY_SELECT_FROM_TABLE_ORDERED"
$CLICKHOUSE_CLIENT --query "$QUERY_COUNT_ORIGIN_BLOCKS;$QUERY_SELECT_FROM_TABLE_ORDERED"
$CLICKHOUSE_CLIENT --query="DROP TABLE block_dedup_token SYNC"

View File

@ -15,24 +15,24 @@ $CLICKHOUSE_CLIENT --query="SYSTEM STOP MERGES block_dedup_token_replica"
$CLICKHOUSE_CLIENT --query="SELECT 'insert 2 blocks with dedup token, 1 row per block'"
DEDUP_TOKEN='dedup1'
echo 'INSERT INTO block_dedup_token_replica VALUES (1), (2)' | ${CLICKHOUSE_CURL} -sS "${CLICKHOUSE_URL}&$INSERT_BLOCK_SETTINGS&insert_deduplication_token='$DEDUP_TOKEN'&query=" --data-binary @-
$CLICKHOUSE_CLIENT --multiquery --query "$QUERY_COUNT_ORIGIN_BLOCKS;$QUERY_SELECT_FROM_TABLE_ORDERED"
$CLICKHOUSE_CLIENT --query "$QUERY_COUNT_ORIGIN_BLOCKS;$QUERY_SELECT_FROM_TABLE_ORDERED"
$CLICKHOUSE_CLIENT --query="SELECT 'insert deduplicated by token'"
echo 'INSERT INTO block_dedup_token_replica VALUES (1), (2)' | ${CLICKHOUSE_CURL} -sS "${CLICKHOUSE_URL}&$INSERT_BLOCK_SETTINGS&insert_deduplication_token='$DEDUP_TOKEN'&query=" --data-binary @-
$CLICKHOUSE_CLIENT --multiquery --query "$QUERY_COUNT_ORIGIN_BLOCKS;$QUERY_SELECT_FROM_TABLE_ORDERED"
$CLICKHOUSE_CLIENT --query "$QUERY_COUNT_ORIGIN_BLOCKS;$QUERY_SELECT_FROM_TABLE_ORDERED"
$CLICKHOUSE_CLIENT --query="SELECT 'insert the same data by providing different dedup token'"
DEDUP_TOKEN='dedup2'
echo 'INSERT INTO block_dedup_token_replica VALUES (1), (2)' | ${CLICKHOUSE_CURL} -sS "${CLICKHOUSE_URL}&$INSERT_BLOCK_SETTINGS&insert_deduplication_token='$DEDUP_TOKEN'&query=" --data-binary @-
$CLICKHOUSE_CLIENT --multiquery --query "$QUERY_COUNT_ORIGIN_BLOCKS;$QUERY_SELECT_FROM_TABLE_ORDERED"
$CLICKHOUSE_CLIENT --query "$QUERY_COUNT_ORIGIN_BLOCKS;$QUERY_SELECT_FROM_TABLE_ORDERED"
$CLICKHOUSE_CLIENT --query="SELECT 'insert 4 blocks, 2 deduplicated, 2 inserted'"
echo 'INSERT INTO block_dedup_token_replica VALUES (1), (2), (3), (4)' | ${CLICKHOUSE_CURL} -sS "${CLICKHOUSE_URL}&$INSERT_BLOCK_SETTINGS&insert_deduplication_token='$DEDUP_TOKEN'&query=" --data-binary @-
$CLICKHOUSE_CLIENT --multiquery --query "$QUERY_COUNT_ORIGIN_BLOCKS;$QUERY_SELECT_FROM_TABLE_ORDERED"
$CLICKHOUSE_CLIENT --query "$QUERY_COUNT_ORIGIN_BLOCKS;$QUERY_SELECT_FROM_TABLE_ORDERED"
$CLICKHOUSE_CLIENT --query="SELECT 'disable token based deduplication, insert the same data as with token'"
DEDUP_TOKEN=''
echo 'INSERT INTO block_dedup_token_replica VALUES (1), (2), (3), (4)' | ${CLICKHOUSE_CURL} -sS "${CLICKHOUSE_URL}&$INSERT_BLOCK_SETTINGS&insert_deduplication_token='$DEDUP_TOKEN'&query=" --data-binary @-
$CLICKHOUSE_CLIENT --multiquery --query "$QUERY_COUNT_ORIGIN_BLOCKS;$QUERY_SELECT_FROM_TABLE_ORDERED"
$CLICKHOUSE_CLIENT --query "$QUERY_COUNT_ORIGIN_BLOCKS;$QUERY_SELECT_FROM_TABLE_ORDERED"
$CLICKHOUSE_CLIENT --query="DROP TABLE block_dedup_token_replica SYNC"

View File

@ -7,7 +7,7 @@ CURDIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)
# "max_parts_to_merge_at_once = 1" prevents merges to start in background before our own OPTIMIZE FINAL
$CLICKHOUSE_CLIENT --multiquery -q "
$CLICKHOUSE_CLIENT -q "
drop table if exists many_mutations;
create table many_mutations (x UInt32, y UInt32) engine = MergeTree order by x settings number_of_mutations_to_delay = 0, number_of_mutations_to_throw = 0, max_parts_to_merge_at_once = 1;
insert into many_mutations values (0, 0), (1, 1);
@ -17,7 +17,7 @@ select x, y from many_mutations order by x;
job()
{
yes "alter table many_mutations update y = y + 1 where 1;" | head -n 1000 | $CLICKHOUSE_CLIENT --multiquery
yes "alter table many_mutations update y = y + 1 where 1;" | head -n 1000 | $CLICKHOUSE_CLIENT
}
job &
@ -44,7 +44,7 @@ job &
wait
# truncate before drop, avoid removing all the mutations (it's slow) in DatabaseCatalog's thread (may affect other tests)
$CLICKHOUSE_CLIENT --multiquery -q "
$CLICKHOUSE_CLIENT -q "
select count() from system.mutations where database = currentDatabase() and table = 'many_mutations' and not is_done;
system start merges many_mutations;
optimize table many_mutations final SETTINGS optimize_throw_if_noop = 1;

View File

@ -7,7 +7,7 @@ CURDIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)
# "max_parts_to_merge_at_once = 1" prevents merges to start in background before our own OPTIMIZE FINAL
$CLICKHOUSE_CLIENT --multiquery -q "
$CLICKHOUSE_CLIENT -q "
drop table if exists many_mutations;
create table many_mutations (x UInt32, y UInt32) engine = MergeTree order by x settings number_of_mutations_to_delay = 0, number_of_mutations_to_throw = 0, max_parts_to_merge_at_once = 1;
insert into many_mutations select number, number + 1 from numbers(2000);
@ -21,7 +21,7 @@ job()
for i in {1..1000}
do
echo "alter table many_mutations delete where y = ${i} * 2 settings mutations_sync = 0;"
done | $CLICKHOUSE_CLIENT --multiquery
done | $CLICKHOUSE_CLIENT
}
job &
@ -48,7 +48,7 @@ job &
wait
# truncate before drop, avoid removing all the mutations (it's slow) in DatabaseCatalog's thread (may affect other tests)
$CLICKHOUSE_CLIENT --multiquery -q "
$CLICKHOUSE_CLIENT -q "
select count() from system.mutations where database = currentDatabase() and table = 'many_mutations' and not is_done;
system start merges many_mutations;
optimize table many_mutations final SETTINGS optimize_throw_if_noop = 1;

View File

@ -5,7 +5,7 @@ CURDIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)
. "$CURDIR"/../shell_config.sh
for Engine in Atomic Ordinary; do
$CLICKHOUSE_LOCAL --allow_deprecated_database_ordinary=1 --multiquery --query """
$CLICKHOUSE_LOCAL --allow_deprecated_database_ordinary=1 --query """
CREATE DATABASE foo_$Engine Engine=$Engine;
DROP DATABASE foo_$Engine;
"""

Some files were not shown because too many files have changed in this diff Show More