Put test files into data_minio instead of generating them in the test

This commit is contained in:
Michael Kolupaev 2023-06-14 19:36:35 +00:00
parent 03baa19e88
commit 41563e43a8
4 changed files with 5 additions and 8 deletions

View File

@ -1,2 +1,2 @@
12639441726720293784
49999995000000
499999500000

View File

@ -3,11 +3,8 @@
-- Reading from s3 a parquet file of size between ~1 MB and ~2 MB was broken at some point
-- (bug in CachedOnDiskReadBufferFromFile).
insert into function s3(s3_conn, filename='test_02731_parquet.parquet') select cityHash64(number) from numbers(170000) settings s3_truncate_on_insert=1;
select sum(*) from s3(s3_conn, filename='02731.parquet') settings remote_filesystem_read_method='threadpool', remote_filesystem_read_prefetch=1;
select sum(*) from s3(s3_conn, filename='test_02731_parquet.parquet') settings remote_filesystem_read_method='threadpool', remote_filesystem_read_prefetch=1;
-- Reading from s3 of arrow files of ~40 MB was broken at some point (but in ParallelReadBuffer).
insert into function s3(s3_conn, filename='test_02731_arrow.arrow') select * from numbers(10000000) settings s3_truncate_on_insert=1;
select sum(*) from s3(s3_conn, filename='test_02731_arrow.arrow') settings remote_filesystem_read_method='read', max_download_buffer_size = 10485760;
-- Reading from s3 of arrow files of ~40 MB (max_download_buffer_size * 4) was broken at some point
-- (bug in ParallelReadBuffer).
select sum(*) from s3(s3_conn, filename='02731.arrow') settings remote_filesystem_read_method='read', max_download_buffer_size = 1048576;

Binary file not shown.

Binary file not shown.