From e85f1f348d4f4f5e14e5423cc6d640d24e41b799 Mon Sep 17 00:00:00 2001 From: Vitaly Baranov Date: Thu, 24 Oct 2024 21:35:53 +0200 Subject: [PATCH] Fix showing error message in ReadBufferFromS3 when retrying. --- src/IO/ReadBufferFromS3.cpp | 17 +++++++++-------- src/IO/ReadBufferFromS3.h | 2 +- 2 files changed, 10 insertions(+), 9 deletions(-) diff --git a/src/IO/ReadBufferFromS3.cpp b/src/IO/ReadBufferFromS3.cpp index e421753e823..dc2a567e4f6 100644 --- a/src/IO/ReadBufferFromS3.cpp +++ b/src/IO/ReadBufferFromS3.cpp @@ -138,9 +138,9 @@ bool ReadBufferFromS3::nextImpl() next_result = impl->next(); break; } - catch (Poco::Exception & e) + catch (...) { - if (!processException(e, getPosition(), attempt) || last_attempt) + if (!processException(getPosition(), attempt) || last_attempt) throw; /// Pause before next attempt. @@ -202,9 +202,9 @@ size_t ReadBufferFromS3::readBigAt(char * to, size_t n, size_t range_begin, cons /// Read remaining bytes after the end of the payload istr.ignore(INT64_MAX); } - catch (Poco::Exception & e) + catch (...) { - if (!processException(e, range_begin, attempt) || last_attempt) + if (!processException(range_begin, attempt) || last_attempt) throw; sleepForMilliseconds(sleep_time_with_backoff_milliseconds); @@ -219,7 +219,7 @@ size_t ReadBufferFromS3::readBigAt(char * to, size_t n, size_t range_begin, cons return initial_n; } -bool ReadBufferFromS3::processException(Poco::Exception & e, size_t read_offset, size_t attempt) const +bool ReadBufferFromS3::processException(size_t read_offset, size_t attempt) const { ProfileEvents::increment(ProfileEvents::ReadBufferFromS3RequestsErrors, 1); @@ -227,10 +227,11 @@ bool ReadBufferFromS3::processException(Poco::Exception & e, size_t read_offset, log, "Caught exception while reading S3 object. Bucket: {}, Key: {}, Version: {}, Offset: {}, " "Attempt: {}/{}, Message: {}", - bucket, key, version_id.empty() ? "Latest" : version_id, read_offset, attempt, request_settings.max_single_read_retries, e.message()); + bucket, key, version_id.empty() ? "Latest" : version_id, read_offset, attempt, request_settings.max_single_read_retries, + getCurrentExceptionMessage(/* with_stacktrace = */ false)); - if (auto * s3_exception = dynamic_cast(&e)) + if (auto * s3_exception = exception_cast(std::current_exception())) { /// It doesn't make sense to retry Access Denied or No Such Key if (!s3_exception->isRetryableError()) @@ -241,7 +242,7 @@ bool ReadBufferFromS3::processException(Poco::Exception & e, size_t read_offset, } /// It doesn't make sense to retry allocator errors - if (e.code() == ErrorCodes::CANNOT_ALLOCATE_MEMORY) + if (getCurrentExceptionCode() == ErrorCodes::CANNOT_ALLOCATE_MEMORY) { tryLogCurrentException(log); return false; diff --git a/src/IO/ReadBufferFromS3.h b/src/IO/ReadBufferFromS3.h index ff04f78ce7b..46f1af4daec 100644 --- a/src/IO/ReadBufferFromS3.h +++ b/src/IO/ReadBufferFromS3.h @@ -86,7 +86,7 @@ private: /// Call inside catch() block if GetObject fails. Bumps metrics, logs the error. /// Returns true if the error looks retriable. - bool processException(Poco::Exception & e, size_t read_offset, size_t attempt) const; + bool processException(size_t read_offset, size_t attempt) const; Aws::S3::Model::GetObjectResult sendRequest(size_t attempt, size_t range_begin, std::optional range_end_incl) const;