Fix tests for flaky check

This commit is contained in:
Igor Nikonov 2023-04-09 23:40:32 +00:00
parent 1187534545
commit e1fa279c98
2 changed files with 7 additions and 8 deletions

View File

@ -1,3 +1,3 @@
-- Tags: no-fasttest
-- Tags: no-fasttest, no-parallel
insert into table function file('data.jsonl', 'JSONEachRow', 'x UInt32') select * from numbers(10) SETTINGS engine_file_truncate_on_insert=1;
select * from file('data.jsonl') order by x;

View File

@ -1,16 +1,15 @@
#!/usr/bin/env bash
# Tags: no-parallel
CUR_DIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)
# shellcheck source=../shell_config.sh
. "$CUR_DIR"/../shell_config.sh
user_files_path=$($CLICKHOUSE_CLIENT --query "select _path,_file from file('nonexist.txt', 'CSV', 'val1 char')" 2>&1 | grep -E '^Code: 107.*FILE_DOESNT_EXIST' | head -1 | awk '{gsub("/nonexist.txt","",$9); print $9}')
USER_FILES_PATH=$($CLICKHOUSE_CLIENT --query "select _path,_file from file('nonexist.txt', 'CSV', 'val1 char')" 2>&1 | grep -E '^Code: 107.*FILE_DOESNT_EXIST' | head -1 | awk '{gsub("/nonexist.txt","",$9); print $9}')
UNIQ_DEST_PATH=$user_files_path/test-02455-$RANDOM-$RANDOM
mkdir -p $UNIQ_DEST_PATH
cp "$CUR_DIR"/data_csv/10m_rows.csv.xz $UNIQ_DEST_PATH/
cp "$CUR_DIR"/data_csv/10m_rows.csv.xz $USER_FILES_PATH/
${CLICKHOUSE_CLIENT} --query="SELECT * FROM file('$UNIQ_DEST_PATH/10m_rows.csv.xz' , 'CSVWithNames') order by identifier, number, name, surname, birthday LIMIT 1 settings max_memory_usage=1000000000"
${CLICKHOUSE_CLIENT} --query="SELECT * FROM file('$UNIQ_DEST_PATH/10m_rows.csv.xz' , 'CSVWithNames') order by identifier, number, name, surname, birthday LIMIT 1 settings max_memory_usage=100000000"
${CLICKHOUSE_CLIENT} --query="SELECT * FROM file('10m_rows.csv.xz' , 'CSVWithNames') order by identifier, number, name, surname, birthday LIMIT 1 settings max_memory_usage=1000000000"
${CLICKHOUSE_CLIENT} --query="SELECT * FROM file('10m_rows.csv.xz' , 'CSVWithNames') order by identifier, number, name, surname, birthday LIMIT 1 settings max_memory_usage=100000000"
rm -rf $UNIQ_DEST_PATH
rm $USER_FILES_PATH/10m_rows.csv.xz