ClickHouse/tests/queries/0_stateless/02700_s3_part_INT_MAX.sh
2024-11-11 11:37:12 +01:00

23 lines
915 B
Bash
Executable File

#!/usr/bin/env bash
# Tags: no-parallel, long, no-fasttest
CUR_DIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)
# shellcheck source=../shell_config.sh
. "$CUR_DIR"/../shell_config.sh
# Regression test for crash in case of part exceeds INT_MAX
#
# NOTE: .sh test is used over .sql because it needs $CLICKHOUSE_DATABASE to
# avoid truncation, since seems that the version of MinIO that is used on CI
# too slow with this.
#
# Unfortunately, the test has to buffer it in memory.
$CLICKHOUSE_CLIENT --max_memory_usage 16G -m -q "
INSERT INTO FUNCTION s3('http://localhost:11111/test/$CLICKHOUSE_DATABASE/test_INT_MAX.tsv', '', '', 'TSV')
SELECT repeat('a', 1024) FROM numbers((pow(2, 30) * 2) / 1024)
SETTINGS s3_max_single_part_upload_size = '5Gi', s3_retry_attempts=5;
SELECT count() FROM s3('http://localhost:11111/test/$CLICKHOUSE_DATABASE/test_INT_MAX.tsv')
SETTINGS s3_retry_attempts=5;
"