Fix checks

This commit is contained in:
kssenii 2022-05-28 01:17:08 +02:00
parent 0b350ce391
commit b9618ea71c
4 changed files with 14 additions and 13 deletions

View File

@ -292,7 +292,7 @@ void DiskCache::removeCacheIfExists(const String & path)
cache->removeIfExists(key);
}
}
catch (const Exception & e)
catch ([[maybe_unused]] const Exception & e)
{
#ifdef NDEBUG
/// Protect against concurrent file delition.

View File

@ -32,6 +32,7 @@ namespace ErrorCodes
extern const int CANNOT_SEEK_THROUGH_FILE;
extern const int CANNOT_USE_CACHE;
extern const int LOGICAL_ERROR;
extern const int ARGUMENT_OUT_OF_BOUND;
}
CachedReadBufferFromFile::CachedReadBufferFromFile(
@ -978,7 +979,7 @@ off_t CachedReadBufferFromFile::seek(off_t offset, int whence)
{
if (whence != SEEK_SET && whence != SEEK_CUR)
{
throw Exception("Exptected SEEK_SET or SEEK_CUR as whence", ErrorCodes::ARGUMENT_OUT_OF_BOUND);
throw Exception("Expected SEEK_SET or SEEK_CUR as whence", ErrorCodes::ARGUMENT_OUT_OF_BOUND);
}
if (whence == SEEK_CUR)

View File

@ -6,11 +6,6 @@
namespace DB
{
namespace ErrorCodes
{
extern const int BAD_ARGUMENTS;
}
static String getDiskMetadataPath(
const String & name,
const Poco::Util::AbstractConfiguration & config,

View File

@ -22,6 +22,13 @@
<access_key_id>clickhouse</access_key_id>
<secret_access_key>clickhouse</secret_access_key>
</s3_disk_2>
<s3_disk_3>
<type>s3</type>
<path>s3_disk_3/</path>
<endpoint>http://localhost:11111/test/00170_test/</endpoint>
<access_key_id>clickhouse</access_key_id>
<secret_access_key>clickhouse</secret_access_key>
</s3_disk_3>
<s3_cache_2>
<type>cache</type>
<disk>s3_disk_2</disk>
@ -29,12 +36,10 @@
<max_size>22548578304</max_size>
</s3_cache_2>
<s3_cache_3>
<type>s3</type>
<endpoint>http://localhost:11111/test/00170_test/</endpoint>
<access_key_id>clickhouse</access_key_id>
<secret_access_key>clickhouse</secret_access_key>
<data_cache_enabled>1</data_cache_enabled>
<cache_enabled>0</cache_enabled>
<type>cache</type>
<disk>s3_disk_3</disk>
<path>s3_disk_3_cache/</path>
<max_size>22548578304</max_size>
<data_cache_max_size>22548578304</data_cache_max_size>
<cache_on_write_operations>1</cache_on_write_operations>
<enable_cache_hits_threshold>1</enable_cache_hits_threshold>