diff --git a/cmake/find/blob_storage.cmake b/cmake/find/blob_storage.cmake
index 74a907da7db..ec1b97f4695 100644
--- a/cmake/find/blob_storage.cmake
+++ b/cmake/find/blob_storage.cmake
@@ -1,14 +1,16 @@
option (ENABLE_AZURE_BLOB_STORAGE "Enable Azure blob storage" ${ENABLE_LIBRARIES})
-option(USE_INTERNAL_AZURE_BLOB_STORAGE_LIBRARY
- "Set to FALSE to use system Azure SDK instead of bundled (OFF currently not implemented)"
- ON)
-
if (ENABLE_AZURE_BLOB_STORAGE)
set(USE_AZURE_BLOB_STORAGE 1)
set(AZURE_BLOB_STORAGE_LIBRARY azure_sdk)
+else()
+ return()
endif()
+option(USE_INTERNAL_AZURE_BLOB_STORAGE_LIBRARY
+ "Set to FALSE to use system Azure SDK instead of bundled (OFF currently not implemented)"
+ ON)
+
if ((NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/azure/sdk"
OR NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/azure/cmake-modules")
AND USE_INTERNAL_AZURE_BLOB_STORAGE_LIBRARY)
diff --git a/cmake/find/ccache.cmake b/cmake/find/ccache.cmake
index 95ec3d8a034..9acc0423f67 100644
--- a/cmake/find/ccache.cmake
+++ b/cmake/find/ccache.cmake
@@ -31,6 +31,7 @@ if (CCACHE_FOUND AND NOT COMPILER_MATCHES_CCACHE)
if (CCACHE_VERSION VERSION_GREATER "3.2.0" OR NOT CMAKE_CXX_COMPILER_ID STREQUAL "Clang")
message(STATUS "Using ${CCACHE_FOUND} ${CCACHE_VERSION}")
+ set(LAUNCHER ${CCACHE_FOUND})
# debian (debhelpers) set SOURCE_DATE_EPOCH environment variable, that is
# filled from the debian/changelog or current time.
@@ -39,13 +40,8 @@ if (CCACHE_FOUND AND NOT COMPILER_MATCHES_CCACHE)
# of the manifest, which do not allow to use previous cache,
# - 4.2+ ccache ignores SOURCE_DATE_EPOCH for every file w/o __DATE__/__TIME__
#
- # So for:
- # - 4.2+ does not require any sloppiness
- # - 4.0+ will ignore SOURCE_DATE_EPOCH environment variable.
- if (CCACHE_VERSION VERSION_GREATER_EQUAL "4.2")
- message(STATUS "ccache is 4.2+ no quirks for SOURCE_DATE_EPOCH required")
- set(LAUNCHER ${CCACHE_FOUND})
- elseif (CCACHE_VERSION VERSION_GREATER_EQUAL "4.0")
+ # Exclude SOURCE_DATE_EPOCH env for ccache versions between [4.0, 4.2).
+ if (CCACHE_VERSION VERSION_GREATER_EQUAL "4.0" AND CCACHE_VERSION VERSION_LESS "4.2")
message(STATUS "Ignore SOURCE_DATE_EPOCH for ccache")
set(LAUNCHER env -u SOURCE_DATE_EPOCH ${CCACHE_FOUND})
endif()
diff --git a/docs/en/interfaces/http.md b/docs/en/interfaces/http.md
index 38e729fde0b..a49143bf599 100644
--- a/docs/en/interfaces/http.md
+++ b/docs/en/interfaces/http.md
@@ -186,7 +186,7 @@ $ echo "SELECT 1" | gzip -c | \
```
``` bash
-# Receiving compressed data from the server
+# Receiving compressed data archive from the server
$ curl -vsS "http://localhost:8123/?enable_http_compression=1" \
-H 'Accept-Encoding: gzip' --output result.gz -d 'SELECT number FROM system.numbers LIMIT 3'
$ zcat result.gz
@@ -195,6 +195,15 @@ $ zcat result.gz
2
```
+```bash
+# Receiving compressed data from the server and using the gunzip to receive decompressed data
+$ curl -sS "http://localhost:8123/?enable_http_compression=1" \
+ -H 'Accept-Encoding: gzip' -d 'SELECT number FROM system.numbers LIMIT 3' | gunzip -
+0
+1
+2
+```
+
## Default Database {#default-database}
You can use the ‘database’ URL parameter or the ‘X-ClickHouse-Database’ header to specify the default database.
diff --git a/docs/en/introduction/adopters.md b/docs/en/introduction/adopters.md
index 87c5a6f7aec..c2660653907 100644
--- a/docs/en/introduction/adopters.md
+++ b/docs/en/introduction/adopters.md
@@ -60,8 +60,10 @@ toc_title: Adopters
| Exness | Trading | Metrics, Logging | — | — | [Talk in Russian, May 2019](https://youtu.be/_rpU-TvSfZ8?t=3215) |
| EventBunker.io | Serverless Data Processing | — | — | — | [Tweet, April 2021](https://twitter.com/Halil_D_/status/1379839133472985091) |
| FastNetMon | DDoS Protection | Main Product | | — | [Official website](https://fastnetmon.com/docs-fnm-advanced/fastnetmon-advanced-traffic-persistency/) |
+| Firebolt | Analytics | Main product | - | - | [YouTube Tech Talk](https://www.youtube.com/watch?v=9rW9uEJ15tU) |
| Flipkart | e-Commerce | — | — | — | [Talk in English, July 2020](https://youtu.be/GMiXCMFDMow?t=239) |
| FunCorp | Games | | — | 14 bn records/day as of Jan 2021 | [Article](https://www.altinity.com/blog/migrating-from-redshift-to-clickhouse) |
+| Futurra Group | Analytics | — | — | — | [Article in Russian, December 2021](https://dou.ua/forums/topic/35587/) |
| Geniee | Ad network | Main product | — | — | [Blog post in Japanese, July 2017](https://tech.geniee.co.jp/entry/2017/07/20/160100) |
| Genotek | Bioinformatics | Main product | — | — | [Video, August 2020](https://youtu.be/v3KyZbz9lEE) |
| Gigapipe | Managed ClickHouse | Main product | — | — | [Official website](https://gigapipe.com/) |
@@ -70,6 +72,7 @@ toc_title: Adopters
| Grouparoo | Data Warehouse Integrations | Main product | — | — | [Official Website, November 2021](https://www.grouparoo.com/integrations) |
| HUYA | Video Streaming | Analytics | — | — | [Slides in Chinese, October 2018](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup19/7.%20ClickHouse万亿数据分析实践%20李本旺(sundy-li)%20虎牙.pdf) |
| Hydrolix | Cloud data platform | Main product | — | — | [Documentation](https://docs.hydrolix.io/guide/query) |
+| Hystax | Cloud Operations | Observability Analytics | - | - | [Blog](https://hystax.com/clickhouse-for-real-time-cost-saving-analytics-how-to-stop-hammering-screws-and-use-an-electric-screwdriver/) |
| ICA | FinTech | Risk Management | — | — | [Blog Post in English, Sep 2020](https://altinity.com/blog/clickhouse-vs-redshift-performance-for-fintech-risk-management?utm_campaign=ClickHouse%20vs%20RedShift&utm_content=143520807&utm_medium=social&utm_source=twitter&hss_channel=tw-3894792263) |
| Idealista | Real Estate | Analytics | — | — | [Blog Post in English, April 2019](https://clickhouse.com/blog/en/clickhouse-meetup-in-madrid-on-april-2-2019) |
| Infobaleen | AI markting tool | Analytics | — | — | [Official site](https://infobaleen.com) |
@@ -81,14 +84,18 @@ toc_title: Adopters
| Ippon Technologies | Technology Consulting | — | — | — | [Talk in English, July 2020](https://youtu.be/GMiXCMFDMow?t=205) |
| Ivi | Online Cinema | Analytics, Monitoring | — | — | [Article in Russian, Jan 2018](https://habr.com/en/company/ivi/blog/347408/) |
| Jinshuju 金数据 | BI Analytics | Main product | — | — | [Slides in Chinese, October 2019](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup24/3.%20金数据数据架构调整方案Public.pdf) |
-| Jitsu | Cloud Software | Data Pipeline | — | — | [Documentation](https://jitsu.com/docs/destinations-configuration/clickhouse-destination), [Hacker News](https://news.ycombinator.com/item?id=29106082) |
+| Jitsu | Cloud Software | Data Pipeline | — | — | [Documentation](https://jitsu.com/docs/destinations-configuration/clickhouse-destination), [Hacker News post](https://news.ycombinator.com/item?id=29106082) |
+| JuiceFS | Storage | Shopping Cart | - | - | [Blog](https://juicefs.com/blog/en/posts/shopee-clickhouse-with-juicefs/) |
| kakaocorp | Internet company | — | — | — | [if(kakao)2020](https://tv.kakao.com/channel/3693125/cliplink/414129353), [if(kakao)2021](https://if.kakao.com/session/24) |
| Kodiak Data | Clouds | Main product | — | — | [Slides in Engish, April 2018](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup13/kodiak_data.pdf) |
| Kontur | Software Development | Metrics | — | — | [Talk in Russian, November 2018](https://www.youtube.com/watch?v=U4u4Bd0FtrY) |
| Kuaishou | Video | — | — | — | [ClickHouse Meetup, October 2018](https://clickhouse.com/blog/en/2018/clickhouse-community-meetup-in-beijing-on-october-28-2018/) |
| KGK Global | Vehicle monitoring | — | — | — | [Press release, June 2021](https://zoom.cnews.ru/news/item/530921) |
+| LANCOM Systems | Network Solutions | Traffic analysis | - | - | [ClickHouse Operator for Kubernetes](https://www.lancom-systems.com/), [Hacker News post] (https://news.ycombinator.com/item?id=29413660) |
| Lawrence Berkeley National Laboratory | Research | Traffic analysis | 5 servers | 55 TiB | [Slides in English, April 2019](https://www.smitasin.com/presentations/2019-04-17_DOE-NSM.pdf) |
+| Lever | Talent Management | Recruiting | - | - | [Hacker News post](https://news.ycombinator.com/item?id=29558544) |
| LifeStreet | Ad network | Main product | 75 servers (3 replicas) | 5.27 PiB | [Blog post in Russian, February 2017](https://habr.com/en/post/322620/) |
+| Lookforsale | E-Commerce | — | — | — | [Job Posting, December 2021](https://telegram.me/javascript_jobs/587318) |
| Mail.ru Cloud Solutions | Cloud services | Main product | — | — | [Article in Russian](https://mcs.mail.ru/help/db-create/clickhouse#) |
| MAXILECT | Ad Tech, Blockchain, ML, AI | — | — | — | [Job advertisement, 2021](https://www.linkedin.com/feed/update/urn:li:activity:6780842017229430784/) |
| Marilyn | Advertising | Statistics | — | — | [Talk in Russian, June 2017](https://www.youtube.com/watch?v=iXlIgx2khwc) |
@@ -106,6 +113,7 @@ toc_title: Adopters
| Ok.ru | Social Network | — | 72 servers | 810 TB compressed, 50bn rows/day, 1.5 TB/day | [SmartData conference, October 2021](https://assets.ctfassets.net/oxjq45e8ilak/4JPHkbJenLgZhBGGyyonFP/57472ec6987003ec4078d0941740703b/____________________ClickHouse_______________________.pdf) |
| Omnicomm | Transportation Monitoring | — | — | — | [Facebook post, October 2021](https://www.facebook.com/OmnicommTeam/posts/2824479777774500) |
| OneAPM | Monitoring and Data Analysis | Main product | — | — | [Slides in Chinese, October 2018](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup19/8.%20clickhouse在OneAPM的应用%20杜龙.pdf) |
+| Opensee | Financial Analytics | Main product | - | - | [Blog](https://opensee.io/news/from-moscow-to-wall-street-the-remarkable-journey-of-clickhouse/) |
| Open Targets | Genome Research | Genome Search | — | — | [Tweet, October 2021](https://twitter.com/OpenTargets/status/1452570865342758913?s=20), [Blog](https://blog.opentargets.org/graphql/) |
| OZON | E-commerce | — | — | — | [Official website](https://job.ozon.ru/vacancy/razrabotchik-clickhouse-ekspluatatsiya-40991870/) |
| Panelbear | Analytics | Monitoring and Analytics | — | — | [Tech Stack, November 2020](https://panelbear.com/blog/tech-stack/) |
@@ -118,6 +126,7 @@ toc_title: Adopters
| PRANA | Industrial predictive analytics | Main product | — | — | [News (russian), Feb 2021](https://habr.com/en/news/t/541392/) |
| QINGCLOUD | Cloud services | Main product | — | — | [Slides in Chinese, October 2018](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup19/4.%20Cloud%20%2B%20TSDB%20for%20ClickHouse%20张健%20QingCloud.pdf) |
| Qrator | DDoS protection | Main product | — | — | [Blog Post, March 2019](https://blog.qrator.net/en/clickhouse-ddos-mitigation_37/) |
+| R-Vision | Information Security | — | — | — | [Article in Russian, December 2021](https://www.anti-malware.ru/reviews/R-Vision-SENSE-15) |
| Raiffeisenbank | Banking | Analytics | — | — | [Lecture in Russian, December 2020](https://cs.hse.ru/announcements/421965599.html) |
| Rambler | Internet services | Analytics | — | — | [Talk in Russian, April 2018](https://medium.com/@ramblertop/разработка-api-clickhouse-для-рамблер-топ-100-f4c7e56f3141) |
| Replica | Urban Planning | Analytics | — | — | [Job advertisement](https://boards.greenhouse.io/replica/jobs/5547732002?gh_jid=5547732002) |
@@ -153,6 +162,7 @@ toc_title: Adopters
| Tinybird | Real-time Data Products | Data processing | — | — | [Official website](https://www.tinybird.co/) |
| Traffic Stars | AD network | — | 300 servers in Europe/US | 1.8 PiB, 700 000 insert rps (as of 2021) | [Slides in Russian, May 2018](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup15/lightning/ninja.pdf) |
| Uber | Taxi | Logging | — | — | [Slides, February 2020](https://presentations.clickhouse.com/meetup40/uber.pdf) |
+| UseTech | Software Development | — | — | — | [Job Posting, December 2021](https://vk.com/wall136266658_2418) |
| UTMSTAT | Analytics | Main product | — | — | [Blog post, June 2020](https://vc.ru/tribuna/133956-striming-dannyh-iz-servisa-skvoznoy-analitiki-v-clickhouse) |
| Vercel | Traffic and Performance Analytics | — | — | — | Direct reference, October 2021 |
| VKontakte | Social Network | Statistics, Logging | — | — | [Slides in Russian, August 2018](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup17/3_vk.pdf) |
@@ -168,7 +178,8 @@ toc_title: Adopters
| Yandex Cloud | Public Cloud | Main product | — | — | [Talk in Russian, December 2019](https://www.youtube.com/watch?v=pgnak9e_E0o) |
| Yandex DataLens | Business Intelligence | Main product | — | — | [Slides in Russian, December 2019](https://presentations.clickhouse.com/meetup38/datalens.pdf) |
| Yandex Market | e-Commerce | Metrics, Logging | — | — | [Talk in Russian, January 2019](https://youtu.be/_l1qP0DyBcA?t=478) |
-| Yandex Metrica | Web analytics | Main product | 630 servers in one cluster, 360 servers in another cluster, 1862 servers in one department | 133 PiB / 8.31 PiB / 120 trillion records | [Slides, February 2020](https://presentations.clickhouse.com/meetup40/introduction/#13) |
+| Yandex Metrica | Web analytics | Macin product | 630 servers in one cluster, 360 servers in another cluster, 1862 servers in one department | 133 PiB / 8.31 PiB / 120 trillion records | [Slides, February 2020](https://presentations.clickhouse.com/meetup40/introduction/#13) |
+| | Analytics | Main product | - | - | [Integration](https://www.yellowfinbi.com/campaign/yellowfin-9-whats-new#el-30219e0e) |
| Yotascale | Cloud | Data pipeline | — | 2 bn records/day | [LinkedIn (Accomplishments)](https://www.linkedin.com/in/adilsaleem/) |
| Your Analytics | Product Analytics | Main Product | — | - | [Tweet, November 2021](https://twitter.com/mikenikles/status/1459737241165565953) |
| Zagrava Trading | — | — | — | — | [Job offer, May 2021](https://twitter.com/datastackjobs/status/1394707267082063874) |
@@ -178,9 +189,5 @@ toc_title: Adopters
| Цифровой Рабочий | Industrial IoT, Analytics | — | — | — | [Blog post in Russian, March 2021](https://habr.com/en/company/croc/blog/548018/) |
| ООО «МПЗ Богородский» | Agriculture | — | — | — | [Article in Russian, November 2020](https://cloud.yandex.ru/cases/okraina) |
| ДомКлик | Real Estate | — | — | — | [Article in Russian, October 2021](https://habr.com/ru/company/domclick/blog/585936/) |
-| Futurra Group | Analytics | — | — | — | [Article in Russian, December 2021](https://dou.ua/forums/topic/35587/) |
-| UseTech | Software Development | — | — | — | [Job Posting, December 2021](https://vk.com/wall136266658_2418) |
-| Lookforsale | E-Commerce | — | — | — | [Job Posting, December 2021](https://telegram.me/javascript_jobs/587318) |
-| R-Vision | Information Security | — | — | — | [Article in Russian, December 2021](https://www.anti-malware.ru/reviews/R-Vision-SENSE-15) |
[Original article](https://clickhouse.com/docs/en/introduction/adopters/)
diff --git a/docs/ru/sql-reference/functions/nlp-functions.md b/docs/ru/sql-reference/functions/nlp-functions.md
index 250403ab127..992a7d6ccf3 100644
--- a/docs/ru/sql-reference/functions/nlp-functions.md
+++ b/docs/ru/sql-reference/functions/nlp-functions.md
@@ -3,10 +3,10 @@ toc_priority: 67
toc_title: NLP
---
-# [экспериментально] Функции для работы с ествественным языком {#nlp-functions}
+# [экспериментально] Функции для работы с естественным языком {#nlp-functions}
!!! warning "Предупреждение"
- Сейчас использование функций для работы с ествественным языком является экспериментальной возможностью. Чтобы использовать данные функции, включите настройку `allow_experimental_nlp_functions = 1`.
+ Сейчас использование функций для работы с естественным языком является экспериментальной возможностью. Чтобы использовать данные функции, включите настройку `allow_experimental_nlp_functions = 1`.
## stem {#stem}
@@ -84,7 +84,7 @@ SELECT lemmatize('en', 'wolves');
Находит синонимы к заданному слову. Представлены два типа расширений словарей: `plain` и `wordnet`.
-Для работы расширения типа `plain` необходимо указать путь до простого текстового файла, где каждая строка соотвествует одному набору синонимов. Слова в данной строке должны быть разделены с помощью пробела или знака табуляции.
+Для работы расширения типа `plain` необходимо указать путь до простого текстового файла, где каждая строка соответствует одному набору синонимов. Слова в данной строке должны быть разделены с помощью пробела или знака табуляции.
Для работы расширения типа `plain` необходимо указать путь до WordNet тезауруса. Тезаурус должен содержать WordNet sense index.
diff --git a/programs/benchmark/Benchmark.cpp b/programs/benchmark/Benchmark.cpp
index 1c276a83768..35ffb97b8e2 100644
--- a/programs/benchmark/Benchmark.cpp
+++ b/programs/benchmark/Benchmark.cpp
@@ -342,6 +342,9 @@ private:
}
}
+ /// Now we don't block the Ctrl+C signal and second signal will terminate the program without waiting.
+ interrupt_listener.unblock();
+
pool.wait();
total_watch.stop();
@@ -586,7 +589,6 @@ public:
#ifndef __clang__
#pragma GCC optimize("-fno-var-tracking-assignments")
#endif
-#pragma GCC diagnostic ignored "-Wmissing-declarations"
int mainEntryClickHouseBenchmark(int argc, char ** argv)
{
diff --git a/src/Functions/FunctionsConversion.h b/src/Functions/FunctionsConversion.h
index 8018fa8e726..62e62b5f5dc 100644
--- a/src/Functions/FunctionsConversion.h
+++ b/src/Functions/FunctionsConversion.h
@@ -1835,6 +1835,8 @@ public:
size_t getNumberOfArguments() const override { return 0; }
bool useDefaultImplementationForConstants() const override { return true; }
+ bool canBeExecutedOnDefaultArguments() const override { return false; }
+
ColumnNumbers getArgumentsThatAreAlwaysConstant() const override { return {1}; }
DataTypePtr getReturnTypeImpl(const ColumnsWithTypeAndName & arguments) const override
diff --git a/src/IO/BrotliReadBuffer.cpp b/src/IO/BrotliReadBuffer.cpp
index b66bbf45054..77069746153 100644
--- a/src/IO/BrotliReadBuffer.cpp
+++ b/src/IO/BrotliReadBuffer.cpp
@@ -39,7 +39,7 @@ BrotliReadBuffer::BrotliReadBuffer(std::unique_ptr in_, size_t buf_s
, in_data(nullptr)
, out_capacity(0)
, out_data(nullptr)
- , eof(false)
+ , eof_flag(false)
{
}
@@ -47,7 +47,7 @@ BrotliReadBuffer::~BrotliReadBuffer() = default;
bool BrotliReadBuffer::nextImpl()
{
- if (eof)
+ if (eof_flag)
return false;
if (!in_available)
@@ -74,7 +74,7 @@ bool BrotliReadBuffer::nextImpl()
{
if (in->eof())
{
- eof = true;
+ eof_flag = true;
return !working_buffer.empty();
}
else
diff --git a/src/IO/BrotliReadBuffer.h b/src/IO/BrotliReadBuffer.h
index 0fa999d1de5..44a7dc7ddbd 100644
--- a/src/IO/BrotliReadBuffer.h
+++ b/src/IO/BrotliReadBuffer.h
@@ -32,7 +32,7 @@ private:
size_t out_capacity;
uint8_t * out_data;
- bool eof;
+ bool eof_flag;
};
}
diff --git a/src/IO/Bzip2ReadBuffer.cpp b/src/IO/Bzip2ReadBuffer.cpp
index df9a8d5b369..c2060612757 100644
--- a/src/IO/Bzip2ReadBuffer.cpp
+++ b/src/IO/Bzip2ReadBuffer.cpp
@@ -42,7 +42,7 @@ Bzip2ReadBuffer::Bzip2ReadBuffer(std::unique_ptr in_, size_t buf_siz
: BufferWithOwnMemory(buf_size, existing_memory, alignment)
, in(std::move(in_))
, bz(std::make_unique())
- , eof(false)
+ , eof_flag(false)
{
}
@@ -50,7 +50,7 @@ Bzip2ReadBuffer::~Bzip2ReadBuffer() = default;
bool Bzip2ReadBuffer::nextImpl()
{
- if (eof)
+ if (eof_flag)
return false;
if (!bz->stream.avail_in)
@@ -72,7 +72,7 @@ bool Bzip2ReadBuffer::nextImpl()
{
if (in->eof())
{
- eof = true;
+ eof_flag = true;
return !working_buffer.empty();
}
else
@@ -91,7 +91,7 @@ bool Bzip2ReadBuffer::nextImpl()
if (in->eof())
{
- eof = true;
+ eof_flag = true;
throw Exception(ErrorCodes::UNEXPECTED_END_OF_FILE, "Unexpected end of bzip2 archive");
}
diff --git a/src/IO/Bzip2ReadBuffer.h b/src/IO/Bzip2ReadBuffer.h
index dc113800683..de1e61ee388 100644
--- a/src/IO/Bzip2ReadBuffer.h
+++ b/src/IO/Bzip2ReadBuffer.h
@@ -26,7 +26,7 @@ private:
class Bzip2StateWrapper;
std::unique_ptr bz;
- bool eof;
+ bool eof_flag;
};
}
diff --git a/src/IO/LZMAInflatingReadBuffer.cpp b/src/IO/LZMAInflatingReadBuffer.cpp
index f2df6bdca6a..80da7421fc3 100644
--- a/src/IO/LZMAInflatingReadBuffer.cpp
+++ b/src/IO/LZMAInflatingReadBuffer.cpp
@@ -7,7 +7,7 @@ namespace ErrorCodes
extern const int LZMA_STREAM_DECODER_FAILED;
}
LZMAInflatingReadBuffer::LZMAInflatingReadBuffer(std::unique_ptr in_, size_t buf_size, char * existing_memory, size_t alignment)
- : BufferWithOwnMemory(buf_size, existing_memory, alignment), in(std::move(in_)), eof(false)
+ : BufferWithOwnMemory(buf_size, existing_memory, alignment), in(std::move(in_)), eof_flag(false)
{
lstr = LZMA_STREAM_INIT;
lstr.allocator = nullptr;
@@ -36,7 +36,7 @@ LZMAInflatingReadBuffer::~LZMAInflatingReadBuffer()
bool LZMAInflatingReadBuffer::nextImpl()
{
- if (eof)
+ if (eof_flag)
return false;
lzma_action action = LZMA_RUN;
@@ -64,7 +64,7 @@ bool LZMAInflatingReadBuffer::nextImpl()
{
if (in->eof())
{
- eof = true;
+ eof_flag = true;
return !working_buffer.empty();
}
else
diff --git a/src/IO/LZMAInflatingReadBuffer.h b/src/IO/LZMAInflatingReadBuffer.h
index 18922f64516..2d676eeeeb3 100644
--- a/src/IO/LZMAInflatingReadBuffer.h
+++ b/src/IO/LZMAInflatingReadBuffer.h
@@ -25,7 +25,7 @@ private:
std::unique_ptr in;
lzma_stream lstr;
- bool eof;
+ bool eof_flag;
};
}
diff --git a/src/IO/Lz4InflatingReadBuffer.cpp b/src/IO/Lz4InflatingReadBuffer.cpp
index 22bce94cad2..61e912d440c 100644
--- a/src/IO/Lz4InflatingReadBuffer.cpp
+++ b/src/IO/Lz4InflatingReadBuffer.cpp
@@ -32,7 +32,7 @@ Lz4InflatingReadBuffer::~Lz4InflatingReadBuffer()
bool Lz4InflatingReadBuffer::nextImpl()
{
- if (eof)
+ if (eof_flag)
return false;
if (!in_available)
@@ -66,7 +66,7 @@ bool Lz4InflatingReadBuffer::nextImpl()
if (in->eof())
{
- eof = true;
+ eof_flag = true;
return !working_buffer.empty();
}
diff --git a/src/IO/Lz4InflatingReadBuffer.h b/src/IO/Lz4InflatingReadBuffer.h
index 0462d85adf7..d4d81f8765c 100644
--- a/src/IO/Lz4InflatingReadBuffer.h
+++ b/src/IO/Lz4InflatingReadBuffer.h
@@ -35,7 +35,7 @@ private:
size_t in_available;
size_t out_available;
- bool eof = false;
+ bool eof_flag = false;
};
}
diff --git a/src/IO/ZlibInflatingReadBuffer.cpp b/src/IO/ZlibInflatingReadBuffer.cpp
index 472399dea3d..28426e920ef 100644
--- a/src/IO/ZlibInflatingReadBuffer.cpp
+++ b/src/IO/ZlibInflatingReadBuffer.cpp
@@ -16,7 +16,7 @@ ZlibInflatingReadBuffer::ZlibInflatingReadBuffer(
size_t alignment)
: BufferWithOwnMemory(buf_size, existing_memory, alignment)
, in(std::move(in_))
- , eof(false)
+ , eof_flag(false)
{
zstr.zalloc = nullptr;
zstr.zfree = nullptr;
@@ -54,7 +54,7 @@ bool ZlibInflatingReadBuffer::nextImpl()
do
{
/// if we already found eof, we shouldn't do anything
- if (eof)
+ if (eof_flag)
return false;
/// if there is no available bytes in zstr, move ptr to next available data
@@ -83,7 +83,7 @@ bool ZlibInflatingReadBuffer::nextImpl()
/// * false if there is no data in working buffer
if (in->eof())
{
- eof = true;
+ eof_flag = true;
return !working_buffer.empty();
}
/// If it is not end of file, we need to reset zstr and return true, because we still have some data to read
diff --git a/src/IO/ZlibInflatingReadBuffer.h b/src/IO/ZlibInflatingReadBuffer.h
index b8c141e9b9b..905ab0cd3fc 100644
--- a/src/IO/ZlibInflatingReadBuffer.h
+++ b/src/IO/ZlibInflatingReadBuffer.h
@@ -33,7 +33,7 @@ private:
std::unique_ptr in;
z_stream zstr;
- bool eof;
+ bool eof_flag;
};
}
diff --git a/src/IO/ZstdInflatingReadBuffer.cpp b/src/IO/ZstdInflatingReadBuffer.cpp
index ce89f09f955..6f244dc5a75 100644
--- a/src/IO/ZstdInflatingReadBuffer.cpp
+++ b/src/IO/ZstdInflatingReadBuffer.cpp
@@ -31,7 +31,7 @@ bool ZstdInflatingReadBuffer::nextImpl()
do
{
// If it is known that end of file was reached, return false
- if (eof)
+ if (eof_flag)
return false;
/// If end was reached, get next part
@@ -64,7 +64,7 @@ bool ZstdInflatingReadBuffer::nextImpl()
/// If end of file is reached, fill eof variable and return true if there is some data in buffer, otherwise return false
if (in->eof())
{
- eof = true;
+ eof_flag = true;
return !working_buffer.empty();
}
/// It is possible, that input buffer is not at eof yet, but nothing was decompressed in current iteration.
diff --git a/src/IO/ZstdInflatingReadBuffer.h b/src/IO/ZstdInflatingReadBuffer.h
index e6e2dad0ad5..ec80b860e0e 100644
--- a/src/IO/ZstdInflatingReadBuffer.h
+++ b/src/IO/ZstdInflatingReadBuffer.h
@@ -31,7 +31,7 @@ private:
ZSTD_DCtx * dctx;
ZSTD_inBuffer input;
ZSTD_outBuffer output;
- bool eof = false;
+ bool eof_flag = false;
};
}
diff --git a/tests/queries/0_stateless/02155_parse_date_lowcard_default_throw.reference b/tests/queries/0_stateless/02155_parse_date_lowcard_default_throw.reference
new file mode 100644
index 00000000000..e599dcc71e5
--- /dev/null
+++ b/tests/queries/0_stateless/02155_parse_date_lowcard_default_throw.reference
@@ -0,0 +1 @@
+2016-07-15 00:00:00
diff --git a/tests/queries/0_stateless/02155_parse_date_lowcard_default_throw.sql b/tests/queries/0_stateless/02155_parse_date_lowcard_default_throw.sql
new file mode 100644
index 00000000000..703cf1fed7a
--- /dev/null
+++ b/tests/queries/0_stateless/02155_parse_date_lowcard_default_throw.sql
@@ -0,0 +1 @@
+SELECT parseDateTimeBestEffort(toLowCardinality(materialize('15-JUL-16')));