mirror of
https://github.com/ClickHouse/ClickHouse.git
synced 2024-11-22 07:31:57 +00:00
Put test files into data_minio instead of generating them in the test
This commit is contained in:
parent
03baa19e88
commit
41563e43a8
@ -1,2 +1,2 @@
|
||||
12639441726720293784
|
||||
49999995000000
|
||||
499999500000
|
||||
|
@ -3,11 +3,8 @@
|
||||
|
||||
-- Reading from s3 a parquet file of size between ~1 MB and ~2 MB was broken at some point
|
||||
-- (bug in CachedOnDiskReadBufferFromFile).
|
||||
insert into function s3(s3_conn, filename='test_02731_parquet.parquet') select cityHash64(number) from numbers(170000) settings s3_truncate_on_insert=1;
|
||||
select sum(*) from s3(s3_conn, filename='02731.parquet') settings remote_filesystem_read_method='threadpool', remote_filesystem_read_prefetch=1;
|
||||
|
||||
select sum(*) from s3(s3_conn, filename='test_02731_parquet.parquet') settings remote_filesystem_read_method='threadpool', remote_filesystem_read_prefetch=1;
|
||||
|
||||
-- Reading from s3 of arrow files of ~40 MB was broken at some point (but in ParallelReadBuffer).
|
||||
insert into function s3(s3_conn, filename='test_02731_arrow.arrow') select * from numbers(10000000) settings s3_truncate_on_insert=1;
|
||||
|
||||
select sum(*) from s3(s3_conn, filename='test_02731_arrow.arrow') settings remote_filesystem_read_method='read', max_download_buffer_size = 10485760;
|
||||
-- Reading from s3 of arrow files of ~40 MB (max_download_buffer_size * 4) was broken at some point
|
||||
-- (bug in ParallelReadBuffer).
|
||||
select sum(*) from s3(s3_conn, filename='02731.arrow') settings remote_filesystem_read_method='read', max_download_buffer_size = 1048576;
|
||||
|
BIN
tests/queries/0_stateless/data_minio/02731.arrow
Normal file
BIN
tests/queries/0_stateless/data_minio/02731.arrow
Normal file
Binary file not shown.
BIN
tests/queries/0_stateless/data_minio/02731.parquet
Normal file
BIN
tests/queries/0_stateless/data_minio/02731.parquet
Normal file
Binary file not shown.
Loading…
Reference in New Issue
Block a user