mirror of
https://github.com/ClickHouse/ClickHouse.git
synced 2024-11-10 01:25:21 +00:00
Merge branch 'master' into add-ext-dict-redis
# Conflicts: # dbms/tests/external_dictionaries/generate_and_test.py # dbms/tests/external_dictionaries/run.sh
This commit is contained in:
commit
1e014060c9
14
.gitmodules
vendored
14
.gitmodules
vendored
@ -48,10 +48,16 @@
|
||||
url = https://github.com/ClickHouse-Extras/protobuf.git
|
||||
[submodule "contrib/boost"]
|
||||
path = contrib/boost
|
||||
url = https://github.com/ClickHouse-Extras/boost-extra.git
|
||||
url = https://github.com/ClickHouse-Extras/boost.git
|
||||
[submodule "contrib/base64"]
|
||||
path = contrib/base64
|
||||
url = https://github.com/aklomp/base64.git
|
||||
[submodule "contrib/arrow"]
|
||||
path = contrib/arrow
|
||||
url = https://github.com/apache/arrow
|
||||
[submodule "contrib/thrift"]
|
||||
path = contrib/thrift
|
||||
url = https://github.com/apache/thrift.git
|
||||
[submodule "contrib/libhdfs3"]
|
||||
path = contrib/libhdfs3
|
||||
url = https://github.com/ClickHouse-Extras/libhdfs3.git
|
||||
@ -61,6 +67,12 @@
|
||||
[submodule "contrib/libgsasl"]
|
||||
path = contrib/libgsasl
|
||||
url = https://github.com/ClickHouse-Extras/libgsasl.git
|
||||
[submodule "contrib/snappy"]
|
||||
path = contrib/snappy
|
||||
url = https://github.com/google/snappy
|
||||
[submodule "contrib/cppkafka"]
|
||||
path = contrib/cppkafka
|
||||
url = https://github.com/ClickHouse-Extras/cppkafka.git
|
||||
[submodule "contrib/brotli"]
|
||||
path = contrib/brotli
|
||||
url = https://github.com/google/brotli.git
|
||||
|
267
CHANGELOG.md
267
CHANGELOG.md
@ -1,3 +1,268 @@
|
||||
## ClickHouse release 19.4.0.49, 2019-03-09
|
||||
|
||||
### New Features
|
||||
* Added full support for `Protobuf` format (input and output, nested data structures). [#4174](https://github.com/yandex/ClickHouse/pull/4174) [#4493](https://github.com/yandex/ClickHouse/pull/4493) ([Vitaly Baranov](https://github.com/vitlibar))
|
||||
* Added bitmap functions with Roaring Bitmaps. [#4207](https://github.com/yandex/ClickHouse/pull/4207) ([Andy Yang](https://github.com/andyyzh)) [#4568](https://github.com/yandex/ClickHouse/pull/4568) ([Vitaly Baranov](https://github.com/vitlibar))
|
||||
* Parquet format support [#4448](https://github.com/yandex/ClickHouse/pull/4448) ([proller](https://github.com/proller))
|
||||
* N-gram distance was added for fuzzy string comparison. It is similar to q-gram metrics in R language. [#4466](https://github.com/yandex/ClickHouse/pull/4466) ([Danila Kutenin](https://github.com/danlark1))
|
||||
* Combine rules for graphite rollup from dedicated aggregation and retention patterns. [#4426](https://github.com/yandex/ClickHouse/pull/4426) ([Mikhail f. Shiryaev](https://github.com/Felixoid))
|
||||
* Added `max_execution_speed` and `max_execution_speed_bytes` to limit resource usage. Added `min_execution_speed_bytes` setting to complement the `min_execution_speed`. [#4430](https://github.com/yandex/ClickHouse/pull/4430) ([Winter Zhang](https://github.com/zhang2014))
|
||||
* Implemented function `flatten` [#4555](https://github.com/yandex/ClickHouse/pull/4555) [#4409](https://github.com/yandex/ClickHouse/pull/4409) ([alexey-milovidov](https://github.com/alexey-milovidov), [kzon](https://github.com/kzon))
|
||||
* Added functions `arrayEnumerateDenseRanked` and `arrayEnumerateUniqRanked` (it's like `arrayEnumerateUniq` but allows to fine tune array depth to look inside multidimensional arrays). [#4475](https://github.com/yandex/ClickHouse/pull/4475) ([proller](https://github.com/proller)) [#4601](https://github.com/yandex/ClickHouse/pull/4601) ([alexey-milovidov](https://github.com/alexey-milovidov))
|
||||
* Multiple JOINS with some restrictions: no asterisks, no complex aliases in ON/WHERE/GROUP BY/... [#4462](https://github.com/yandex/ClickHouse/pull/4462) ([Artem Zuikov](https://github.com/4ertus2))
|
||||
|
||||
### Bug Fixes
|
||||
* This release also contains all bug fixes from 19.3 and 19.1.
|
||||
* Fixed bug in data skipping indices: order of granules after INSERT was incorrect. [#4407](https://github.com/yandex/ClickHouse/pull/4407) ([Nikita Vasilev](https://github.com/nikvas0))
|
||||
* Fixed `set` index for `Nullable` and `LowCardinality` columns. Before it, `set` index with `Nullable` or `LowCardinality` column led to error `Data type must be deserialized with multiple streams` while selecting. [#4594](https://github.com/yandex/ClickHouse/pull/4594) ([Nikolai Kochetov](https://github.com/KochetovNicolai))
|
||||
* Correctly set update_time on full `executable` dictionary update. [#4551](https://github.com/yandex/ClickHouse/pull/4551) ([Tema Novikov](https://github.com/temoon))
|
||||
* Fix broken progress bar in 19.3 [#4627](https://github.com/yandex/ClickHouse/pull/4627) ([filimonov](https://github.com/filimonov))
|
||||
* Fixed inconsistent values of MemoryTracker when memory region was shrinked, in certain cases. [#4619](https://github.com/yandex/ClickHouse/pull/4619) ([alexey-milovidov](https://github.com/alexey-milovidov))
|
||||
* Fixed undefined behaviour in ThreadPool [#4612](https://github.com/yandex/ClickHouse/pull/4612) ([alexey-milovidov](https://github.com/alexey-milovidov))
|
||||
* Fixed a very rare crash with the message `mutex lock failed: Invalid argument` that could happen when a MergeTree table was dropped concurrently with a SELECT. [#4608](https://github.com/yandex/ClickHouse/pull/4608) ([Alex Zatelepin](https://github.com/ztlpn))
|
||||
* ODBC driver compatibility with `LowCardinality` data type [#4381](https://github.com/yandex/ClickHouse/pull/4381) ([proller](https://github.com/proller))
|
||||
* FreeBSD: Fixup for `AIOcontextPool: Found io_event with unknown id 0` error [#4438](https://github.com/yandex/ClickHouse/pull/4438) ([urgordeadbeef](https://github.com/urgordeadbeef))
|
||||
* `system.part_log` table was created regardless to configuration. [#4483](https://github.com/yandex/ClickHouse/pull/4483) ([alexey-milovidov](https://github.com/alexey-milovidov))
|
||||
* Fix undefined behaviour in `dictIsIn` function for cache dictionaries. [#4515](https://github.com/yandex/ClickHouse/pull/4515) ([alesapin](https://github.com/alesapin))
|
||||
* Fixed a deadlock when a SELECT query locks the same table multiple times (e.g. from different threads or when executing multiple subqueries) and there is a concurrent DDL query. [#4535](https://github.com/yandex/ClickHouse/pull/4535) ([Alex Zatelepin](https://github.com/ztlpn))
|
||||
* Disable compile_expressions by default until we get own `llvm` contrib and can test it with `clang` and `asan`. [#4579](https://github.com/yandex/ClickHouse/pull/4579) ([alesapin](https://github.com/alesapin))
|
||||
* Prevent `std::terminate` when `invalidate_query` for `clickhouse` external dictionary source has returned wrong resultset (empty or more than one row or more than one column). Fixed issue when the `invalidate_query` was performed every five seconds regardless to the `lifetime`. [#4583](https://github.com/yandex/ClickHouse/pull/4583) ([alexey-milovidov](https://github.com/alexey-milovidov))
|
||||
* Avoid deadlock when the `invalidate_query` for a dictionary with `clickhouse` source was involving `system.dictionaries` table or `Dictionaries` database (rare case). [#4599](https://github.com/yandex/ClickHouse/pull/4599) ([alexey-milovidov](https://github.com/alexey-milovidov))
|
||||
* Fixes for CROSS JOIN with empty WHERE [#4598](https://github.com/yandex/ClickHouse/pull/4598) ([Artem Zuikov](https://github.com/4ertus2))
|
||||
* Fixed segfault in function "replicate" when constant argument is passed. [#4603](https://github.com/yandex/ClickHouse/pull/4603) ([alexey-milovidov](https://github.com/alexey-milovidov))
|
||||
* Fix lambda function with predicate optimizer. [#4408](https://github.com/yandex/ClickHouse/pull/4408) ([Winter Zhang](https://github.com/zhang2014))
|
||||
* Multiple JOINs multiple fixes. [#4595](https://github.com/yandex/ClickHouse/pull/4595) ([Artem Zuikov](https://github.com/4ertus2))
|
||||
|
||||
### Improvements
|
||||
* Support aliases in JOIN ON section for right table columns [#4412](https://github.com/yandex/ClickHouse/pull/4412) ([Artem Zuikov](https://github.com/4ertus2))
|
||||
* Result of multiple JOINs need correct result names to be used in subselects. Replace flat aliases with source names in result. [#4474](https://github.com/yandex/ClickHouse/pull/4474) ([Artem Zuikov](https://github.com/4ertus2))
|
||||
* Improve push-down logic for joined statements. [#4387](https://github.com/yandex/ClickHouse/pull/4387) ([Ivan](https://github.com/abyss7))
|
||||
|
||||
### Performance Improvements
|
||||
* Improved heuristics of "move to PREWHERE" optimization. [#4405](https://github.com/yandex/ClickHouse/pull/4405) ([alexey-milovidov](https://github.com/alexey-milovidov))
|
||||
* Use proper lookup tables that uses HashTable's API for 8-bit and 16-bit keys. [#4536](https://github.com/yandex/ClickHouse/pull/4536) ([Amos Bird](https://github.com/amosbird))
|
||||
* Improved performance of string comparison. [#4564](https://github.com/yandex/ClickHouse/pull/4564) ([alexey-milovidov](https://github.com/alexey-milovidov))
|
||||
* Cleanup distributed DDL queue in a separate thread so that it doesn't slow down the main loop that processes distributed DDL tasks. [#4502](https://github.com/yandex/ClickHouse/pull/4502) ([Alex Zatelepin](https://github.com/ztlpn))
|
||||
* When `min_bytes_to_use_direct_io` is set to 1, not every file was opened with O_DIRECT mode because the data size to read was sometimes underestimated by the size of one compressed block. [#4526](https://github.com/yandex/ClickHouse/pull/4526) ([alexey-milovidov](https://github.com/alexey-milovidov))
|
||||
|
||||
### Build/Testing/Packaging Improvement
|
||||
* Added support for clang-9 [#4604](https://github.com/yandex/ClickHouse/pull/4604) ([alexey-milovidov](https://github.com/alexey-milovidov))
|
||||
* Fix wrong `__asm__` instructions (again) [#4621](https://github.com/yandex/ClickHouse/pull/4621) ([Konstantin Podshumok](https://github.com/podshumok))
|
||||
* Add ability to specify settings for `clickhouse-performance-test` from command line. [#4437](https://github.com/yandex/ClickHouse/pull/4437) ([alesapin](https://github.com/alesapin))
|
||||
* Add dictionaries tests to integration tests. [#4477](https://github.com/yandex/ClickHouse/pull/4477) ([alesapin](https://github.com/alesapin))
|
||||
* Added queries from the benchmark on the website to automated performance tests. [#4496](https://github.com/yandex/ClickHouse/pull/4496) ([alexey-milovidov](https://github.com/alexey-milovidov))
|
||||
* `xxhash.h` does not exist in external lz4 because it is an implementation detail and its symbols are namespaced with `XXH_NAMESPACE` macro. When lz4 is external, xxHash has to be external too, and the dependents have to link to it. [#4495](https://github.com/yandex/ClickHouse/pull/4495) ([Orivej Desh](https://github.com/orivej))
|
||||
* Fixed a case when `quantileTiming` aggregate function can be called with negative or floating point argument (this fixes fuzz test with undefined behaviour sanitizer). [#4506](https://github.com/yandex/ClickHouse/pull/4506) ([alexey-milovidov](https://github.com/alexey-milovidov))
|
||||
* Spelling error correction. [#4531](https://github.com/yandex/ClickHouse/pull/4531) ([sdk2](https://github.com/sdk2))
|
||||
* Fix compilation on Mac. [#4371](https://github.com/yandex/ClickHouse/pull/4371) ([Vitaly Baranov](https://github.com/vitlibar))
|
||||
* Build fixes for FreeBSD and various unusual build configurations. [#4444](https://github.com/yandex/ClickHouse/pull/4444) ([proller](https://github.com/proller))
|
||||
|
||||
|
||||
## ClickHouse release 19.3.7, 2019-03-12
|
||||
|
||||
### Bug fixes
|
||||
|
||||
* Fixed error in #3920. This error manifestate itself as random cache corruption (messages `Unknown codec family code`, `Cannot seek through file`) and segfaults. This bug first appeared in version 19.1 and is present in versions up to 19.1.10 and 19.3.6. [#4623](https://github.com/yandex/ClickHouse/pull/4623) ([alexey-milovidov](https://github.com/alexey-milovidov))
|
||||
|
||||
|
||||
## ClickHouse release 19.3.6, 2019-03-02
|
||||
|
||||
### Bug fixes
|
||||
|
||||
* When there are more than 1000 threads in a thread pool, `std::terminate` may happen on thread exit. [Azat Khuzhin](https://github.com/azat) [#4485](https://github.com/yandex/ClickHouse/pull/4485) [#4505](https://github.com/yandex/ClickHouse/pull/4505) ([alexey-milovidov](https://github.com/alexey-milovidov))
|
||||
* Now it's possible to create `ReplicatedMergeTree*` tables with comments on columns without defaults and tables with columns codecs without comments and defaults. Also fix comparison of codecs. [#4523](https://github.com/yandex/ClickHouse/pull/4523) ([alesapin](https://github.com/alesapin))
|
||||
* Fixed crash on JOIN with array or tuple. [#4552](https://github.com/yandex/ClickHouse/pull/4552) ([Artem Zuikov](https://github.com/4ertus2))
|
||||
* Fixed crash in clickhouse-copier with the message `ThreadStatus not created`. [#4540](https://github.com/yandex/ClickHouse/pull/4540) ([Artem Zuikov](https://github.com/4ertus2))
|
||||
* Fixed hangup on server shutdown if distributed DDLs were used. [#4472](https://github.com/yandex/ClickHouse/pull/4472) ([Alex Zatelepin](https://github.com/ztlpn))
|
||||
* Incorrect column numbers were printed in error message about text format parsing for columns with number greater than 10. [#4484](https://github.com/yandex/ClickHouse/pull/4484) ([alexey-milovidov](https://github.com/alexey-milovidov))
|
||||
|
||||
### Build/Testing/Packaging Improvements
|
||||
|
||||
* Fixed build with AVX enabled. [#4527](https://github.com/yandex/ClickHouse/pull/4527) ([alexey-milovidov](https://github.com/alexey-milovidov))
|
||||
* Enable extended accounting and IO accounting based on good known version instead of kernel under which it is compiled. [#4541](https://github.com/yandex/ClickHouse/pull/4541) ([nvartolomei](https://github.com/nvartolomei))
|
||||
* Allow to skip setting of core_dump.size_limit, warning instead of throw if limit set fail. [#4473](https://github.com/yandex/ClickHouse/pull/4473) ([proller](https://github.com/proller))
|
||||
* Removed the `inline` tags of `void readBinary(...)` in `Field.cpp`. Also merged redundant `namespace DB` blocks. [#4530](https://github.com/yandex/ClickHouse/pull/4530) ([hcz](https://github.com/hczhcz))
|
||||
|
||||
|
||||
## ClickHouse release 19.3.5, 2019-02-21
|
||||
|
||||
### Bug fixes
|
||||
* Fixed bug with large http insert queries processing. [#4454](https://github.com/yandex/ClickHouse/pull/4454) ([alesapin](https://github.com/alesapin))
|
||||
* Fixed backward incompatibility with old versions due to wrong implementation of `send_logs_level` setting. [#4445](https://github.com/yandex/ClickHouse/pull/4445) ([alexey-milovidov](https://github.com/alexey-milovidov))
|
||||
* Fixed backward incompatibility of table function `remote` introduced with column comments. [#4446](https://github.com/yandex/ClickHouse/pull/4446) ([alexey-milovidov](https://github.com/alexey-milovidov))
|
||||
|
||||
## ClickHouse release 19.3.4, 2019-02-16
|
||||
|
||||
### Improvements
|
||||
* Table index size is not accounted for memory limits when doing `ATTACH TABLE` query. Avoided the possibility that a table cannot be attached after being detached. [#4396](https://github.com/yandex/ClickHouse/pull/4396) ([alexey-milovidov](https://github.com/alexey-milovidov))
|
||||
* Slightly raised up the limit on max string and array size received from ZooKeeper. It allows to continue to work with increased size of `CLIENT_JVMFLAGS=-Djute.maxbuffer=...` on ZooKeeper. [#4398](https://github.com/yandex/ClickHouse/pull/4398) ([alexey-milovidov](https://github.com/alexey-milovidov))
|
||||
* Allow to repair abandoned replica even if it already has huge number of nodes in its queue. [#4399](https://github.com/yandex/ClickHouse/pull/4399) ([alexey-milovidov](https://github.com/alexey-milovidov))
|
||||
* Add one required argument to `SET` index (max stored rows number). [#4386](https://github.com/yandex/ClickHouse/pull/4386) ([Nikita Vasilev](https://github.com/nikvas0))
|
||||
|
||||
### Bug Fixes
|
||||
* Fixed `WITH ROLLUP` result for group by single `LowCardinality` key. [#4384](https://github.com/yandex/ClickHouse/pull/4384) ([Nikolai Kochetov](https://github.com/KochetovNicolai))
|
||||
* Fixed bug in the set index (dropping a granule if it contains more than `max_rows` rows). [#4386](https://github.com/yandex/ClickHouse/pull/4386) ([Nikita Vasilev](https://github.com/nikvas0))
|
||||
* A lot of FreeBSD build fixes. [#4397](https://github.com/yandex/ClickHouse/pull/4397) ([proller](https://github.com/proller))
|
||||
* Fixed aliases substitution in queries with subquery containing same alias (issue [#4110](https://github.com/yandex/ClickHouse/issues/4110)). [#4351](https://github.com/yandex/ClickHouse/pull/4351) ([Artem Zuikov](https://github.com/4ertus2))
|
||||
|
||||
### Build/Testing/Packaging Improvements
|
||||
* Add ability to run `clickhouse-server` for stateless tests in docker image. [#4347](https://github.com/yandex/ClickHouse/pull/4347) ([Vasily Nemkov](https://github.com/Enmk))
|
||||
|
||||
## ClickHouse release 19.3.3, 2019-02-13
|
||||
|
||||
### New Features
|
||||
* Added the `KILL MUTATION` statement that allows removing mutations that are for some reasons stuck. Added `latest_failed_part`, `latest_fail_time`, `latest_fail_reason` fields to the `system.mutations` table for easier troubleshooting. [#4287](https://github.com/yandex/ClickHouse/pull/4287) ([Alex Zatelepin](https://github.com/ztlpn))
|
||||
* Added aggregate function `entropy` which computes Shannon entropy. [#4238](https://github.com/yandex/ClickHouse/pull/4238) ([Quid37](https://github.com/Quid37))
|
||||
* Added ability to send queries `INSERT INTO tbl VALUES (....` to server without splitting on `query` and `data` parts. [#4301](https://github.com/yandex/ClickHouse/pull/4301) ([alesapin](https://github.com/alesapin))
|
||||
* Generic implementation of `arrayWithConstant` function was added. [#4322](https://github.com/yandex/ClickHouse/pull/4322) ([alexey-milovidov](https://github.com/alexey-milovidov))
|
||||
* Implented `NOT BETWEEN` comparison operator. [#4228](https://github.com/yandex/ClickHouse/pull/4228) ([Dmitry Naumov](https://github.com/nezed))
|
||||
* Implement `sumMapFiltered` in order to be able to limit the number of keys for which values will be summed by `sumMap`. [#4129](https://github.com/yandex/ClickHouse/pull/4129) ([Léo Ercolanelli](https://github.com/ercolanelli-leo))
|
||||
* Added support of `Nullable` types in `mysql` table function. [#4198](https://github.com/yandex/ClickHouse/pull/4198) ([Emmanuel Donin de Rosière](https://github.com/edonin))
|
||||
* Support for arbitrary constant expressions in `LIMIT` clause. [#4246](https://github.com/yandex/ClickHouse/pull/4246) ([k3box](https://github.com/k3box))
|
||||
* Added `topKWeighted` aggregate function that takes additional argument with (unsigned integer) weight. [#4245](https://github.com/yandex/ClickHouse/pull/4245) ([Andrew Golman](https://github.com/andrewgolman))
|
||||
* `StorageJoin` now supports `join_overwrite` setting that allows overwriting existing values of the same key. [#3973](https://github.com/yandex/ClickHouse/pull/3973) ([Amos Bird](https://github.com/amosbird)
|
||||
* Added function `toStartOfInterval`. [#4304](https://github.com/yandex/ClickHouse/pull/4304) ([Vitaly Baranov](https://github.com/vitlibar))
|
||||
* Added `RowBinaryWithNamesAndTypes` format. [#4200](https://github.com/yandex/ClickHouse/pull/4200) ([Oleg V. Kozlyuk](https://github.com/DarkWanderer))
|
||||
* Added `IPv4` and `IPv6` data types. More effective implementations of `IPv*` functions. [#3669](https://github.com/yandex/ClickHouse/pull/3669) ([Vasily Nemkov](https://github.com/Enmk))
|
||||
* Added function `toStartOfTenMinutes()`. [#4298](https://github.com/yandex/ClickHouse/pull/4298) ([Vitaly Baranov](https://github.com/vitlibar))
|
||||
* Added `Protobuf` output format. [#4005](https://github.com/yandex/ClickHouse/pull/4005) [#4158](https://github.com/yandex/ClickHouse/pull/4158) ([Vitaly Baranov](https://github.com/vitlibar))
|
||||
* Added brotli support for HTTP interface for data import (INSERTs). [#4235](https://github.com/yandex/ClickHouse/pull/4235) ([Mikhail ](https://github.com/fandyushin))
|
||||
* Added hints while user make typo in function name or type in command line client. [#4239](https://github.com/yandex/ClickHouse/pull/4239) ([Danila Kutenin](https://github.com/danlark1))
|
||||
* Added `Query-Id` to Server's HTTP Response header. [#4231](https://github.com/yandex/ClickHouse/pull/4231) ([Mikhail ](https://github.com/fandyushin))
|
||||
|
||||
### Experimental features
|
||||
* Added `minmax` and `set` data skipping indices for MergeTree table engines family. [#4143](https://github.com/yandex/ClickHouse/pull/4143) ([Nikita Vasilev](https://github.com/nikvas0))
|
||||
* Added conversion of `CROSS JOIN` to `INNER JOIN` if possible. [#4221](https://github.com/yandex/ClickHouse/pull/4221) [#4266](https://github.com/yandex/ClickHouse/pull/4266) ([Artem Zuikov](https://github.com/4ertus2))
|
||||
|
||||
### Bug Fixes
|
||||
* Fixed `Not found column` for duplicate columns in `JOIN ON` section. [#4279](https://github.com/yandex/ClickHouse/pull/4279) ([Artem Zuikov](https://github.com/4ertus2))
|
||||
* Make `START REPLICATED SENDS` command start replicated sends. [#4229](https://github.com/yandex/ClickHouse/pull/4229) ([nvartolomei](https://github.com/nvartolomei))
|
||||
* Fixed aggregate functions execution with `Array(LowCardinality)` arguments. [#4055](https://github.com/yandex/ClickHouse/pull/4055) ([KochetovNicolai](https://github.com/KochetovNicolai))
|
||||
* Fixed wrong behaviour when doing `INSERT ... SELECT ... FROM file(...)` query and file has `CSVWithNames` or `TSVWIthNames` format and the first data row is missing. [#4297](https://github.com/yandex/ClickHouse/pull/4297) ([alexey-milovidov](https://github.com/alexey-milovidov))
|
||||
* Fixed crash on dictionary reload if dictionary not available. This bug was appeared in 19.1.6. [#4188](https://github.com/yandex/ClickHouse/pull/4188) ([proller](https://github.com/proller))
|
||||
* Fixed `ALL JOIN` with duplicates in right table. [#4184](https://github.com/yandex/ClickHouse/pull/4184) ([Artem Zuikov](https://github.com/4ertus2))
|
||||
* Fixed segmentation fault with `use_uncompressed_cache=1` and exception with wrong uncompressed size. This bug was appeared in 19.1.6. [#4186](https://github.com/yandex/ClickHouse/pull/4186) ([alesapin](https://github.com/alesapin))
|
||||
* Fixed `compile_expressions` bug with comparison of big (more than int16) dates. [#4341](https://github.com/yandex/ClickHouse/pull/4341) ([alesapin](https://github.com/alesapin))
|
||||
* Fixed infinite loop when selecting from table function `numbers(0)`. [#4280](https://github.com/yandex/ClickHouse/pull/4280) ([alexey-milovidov](https://github.com/alexey-milovidov))
|
||||
* Temporarily disable predicate optimization for `ORDER BY`. [#3890](https://github.com/yandex/ClickHouse/pull/3890) ([Winter Zhang](https://github.com/zhang2014))
|
||||
* Fixed `Illegal instruction` error when using base64 functions on old CPUs. This error has been reproduced only when ClickHouse was compiled with gcc-8. [#4275](https://github.com/yandex/ClickHouse/pull/4275) ([alexey-milovidov](https://github.com/alexey-milovidov))
|
||||
* Fixed `No message received` error when interacting with PostgreSQL ODBC Driver through TLS connection. Also fixes segfault when using MySQL ODBC Driver. [#4170](https://github.com/yandex/ClickHouse/pull/4170) ([alexey-milovidov](https://github.com/alexey-milovidov))
|
||||
* Fixed incorrect result when `Date` and `DateTime` arguments are used in branches of conditional operator (function `if`). Added generic case for function `if`. [#4243](https://github.com/yandex/ClickHouse/pull/4243) ([alexey-milovidov](https://github.com/alexey-milovidov))
|
||||
* ClickHouse dictionaries now load within `clickhouse` process. [#4166](https://github.com/yandex/ClickHouse/pull/4166) ([alexey-milovidov](https://github.com/alexey-milovidov))
|
||||
* Fixed deadlock when `SELECT` from a table with `File` engine was retried after `No such file or directory` error. [#4161](https://github.com/yandex/ClickHouse/pull/4161) ([alexey-milovidov](https://github.com/alexey-milovidov))
|
||||
* Fixed race condition when selecting from `system.tables` may give `table doesn't exist` error. [#4313](https://github.com/yandex/ClickHouse/pull/4313) ([alexey-milovidov](https://github.com/alexey-milovidov))
|
||||
* `clickhouse-client` can segfault on exit while loading data for command line suggestions if it was run in interactive mode. [#4317](https://github.com/yandex/ClickHouse/pull/4317) ([alexey-milovidov](https://github.com/alexey-milovidov))
|
||||
* Fixed a bug when the execution of mutations containing `IN` operators was producing incorrect results. [#4099](https://github.com/yandex/ClickHouse/pull/4099) ([Alex Zatelepin](https://github.com/ztlpn))
|
||||
* Fixed error: if there is a database with `Dictionary` engine, all dictionaries forced to load at server startup, and if there is a dictionary with ClickHouse source from localhost, the dictionary cannot load. [#4255](https://github.com/yandex/ClickHouse/pull/4255) ([alexey-milovidov](https://github.com/alexey-milovidov))
|
||||
* Fixed error when system logs are tried to create again at server shutdown. [#4254](https://github.com/yandex/ClickHouse/pull/4254) ([alexey-milovidov](https://github.com/alexey-milovidov))
|
||||
* Correctly return the right type and properly handle locks in `joinGet` function. [#4153](https://github.com/yandex/ClickHouse/pull/4153) ([Amos Bird](https://github.com/amosbird))
|
||||
* Added `sumMapWithOverflow` function. [#4151](https://github.com/yandex/ClickHouse/pull/4151) ([Léo Ercolanelli](https://github.com/ercolanelli-leo))
|
||||
* Fixed segfault with `allow_experimental_multiple_joins_emulation`. [52de2c](https://github.com/yandex/ClickHouse/commit/52de2cd927f7b5257dd67e175f0a5560a48840d0) ([Artem Zuikov](https://github.com/4ertus2))
|
||||
* Fixed bug with incorrect `Date` and `DateTime` comparison. [#4237](https://github.com/yandex/ClickHouse/pull/4237) ([valexey](https://github.com/valexey))
|
||||
* Fixed fuzz test under undefined behavior sanitizer: added parameter type check for `quantile*Weighted` family of functions. [#4145](https://github.com/yandex/ClickHouse/pull/4145) ([alexey-milovidov](https://github.com/alexey-milovidov))
|
||||
* Fixed rare race condition when removing of old data parts can fail with `File not found` error. [#4378](https://github.com/yandex/ClickHouse/pull/4378) ([alexey-milovidov](https://github.com/alexey-milovidov))
|
||||
* Fix install package with missing /etc/clickhouse-server/config.xml. [#4343](https://github.com/yandex/ClickHouse/pull/4343) ([proller](https://github.com/proller))
|
||||
|
||||
|
||||
### Build/Testing/Packaging Improvements
|
||||
* Debian package: correct /etc/clickhouse-server/preprocessed link according to config. [#4205](https://github.com/yandex/ClickHouse/pull/4205) ([proller](https://github.com/proller))
|
||||
* Various build fixes for FreeBSD. [#4225](https://github.com/yandex/ClickHouse/pull/4225) ([proller](https://github.com/proller))
|
||||
* Added ability to create, fill and drop tables in perftest. [#4220](https://github.com/yandex/ClickHouse/pull/4220) ([alesapin](https://github.com/alesapin))
|
||||
* Added a script to check for duplicate includes. [#4326](https://github.com/yandex/ClickHouse/pull/4326) ([alexey-milovidov](https://github.com/alexey-milovidov))
|
||||
* Added ability to run queries by index in performance test. [#4264](https://github.com/yandex/ClickHouse/pull/4264) ([alesapin](https://github.com/alesapin))
|
||||
* Package with debug symbols is suggested to be installed. [#4274](https://github.com/yandex/ClickHouse/pull/4274) ([alexey-milovidov](https://github.com/alexey-milovidov))
|
||||
* Refactoring of performance-test. Better logging and signals handling. [#4171](https://github.com/yandex/ClickHouse/pull/4171) ([alesapin](https://github.com/alesapin))
|
||||
* Added docs to anonymized Yandex.Metrika datasets. [#4164](https://github.com/yandex/ClickHouse/pull/4164) ([alesapin](https://github.com/alesapin))
|
||||
* Аdded tool for converting an old month-partitioned part to the custom-partitioned format. [#4195](https://github.com/yandex/ClickHouse/pull/4195) ([Alex Zatelepin](https://github.com/ztlpn))
|
||||
* Added docs about two datasets in s3. [#4144](https://github.com/yandex/ClickHouse/pull/4144) ([alesapin](https://github.com/alesapin))
|
||||
* Added script which creates changelog from pull requests description. [#4169](https://github.com/yandex/ClickHouse/pull/4169) [#4173](https://github.com/yandex/ClickHouse/pull/4173) ([KochetovNicolai](https://github.com/KochetovNicolai)) ([KochetovNicolai](https://github.com/KochetovNicolai))
|
||||
* Added puppet module for Clickhouse. [#4182](https://github.com/yandex/ClickHouse/pull/4182) ([Maxim Fedotov](https://github.com/MaxFedotov))
|
||||
* Added docs for a group of undocumented functions. [#4168](https://github.com/yandex/ClickHouse/pull/4168) ([Winter Zhang](https://github.com/zhang2014))
|
||||
* ARM build fixes. [#4210](https://github.com/yandex/ClickHouse/pull/4210)[#4306](https://github.com/yandex/ClickHouse/pull/4306) [#4291](https://github.com/yandex/ClickHouse/pull/4291) ([proller](https://github.com/proller)) ([proller](https://github.com/proller))
|
||||
* Dictionary tests now able to run from `ctest`. [#4189](https://github.com/yandex/ClickHouse/pull/4189) ([proller](https://github.com/proller))
|
||||
* Now `/etc/ssl` is used as default directory with SSL certificates. [#4167](https://github.com/yandex/ClickHouse/pull/4167) ([alexey-milovidov](https://github.com/alexey-milovidov))
|
||||
* Added checking SSE and AVX instruction at start. [#4234](https://github.com/yandex/ClickHouse/pull/4234) ([Igr](https://github.com/igron99))
|
||||
* Init script will wait server until start. [#4281](https://github.com/yandex/ClickHouse/pull/4281) ([proller](https://github.com/proller))
|
||||
|
||||
### Backward Incompatible Changes
|
||||
* Removed `allow_experimental_low_cardinality_type` setting. `LowCardinality` data types are production ready. [#4323](https://github.com/yandex/ClickHouse/pull/4323) ([alexey-milovidov](https://github.com/alexey-milovidov))
|
||||
* Reduce mark cache size and uncompressed cache size accordingly to available memory amount. [#4240](https://github.com/yandex/ClickHouse/pull/4240) ([Lopatin Konstantin](https://github.com/k-lopatin)
|
||||
* Added keyword `INDEX` in `CREATE TABLE` query. A column with name `index` must be quoted with backticks or double quotes: `` `index` ``. [#4143](https://github.com/yandex/ClickHouse/pull/4143) ([Nikita Vasilev](https://github.com/nikvas0))
|
||||
* `sumMap` now promote result type instead of overflow. The old `sumMap` behavior can be obtained by using `sumMapWithOverflow` function. [#4151](https://github.com/yandex/ClickHouse/pull/4151) ([Léo Ercolanelli](https://github.com/ercolanelli-leo))
|
||||
|
||||
### Performance Impovements
|
||||
* `std::sort` replaced by `pdqsort` for queries without `LIMIT`. [#4236](https://github.com/yandex/ClickHouse/pull/4236) ([Evgenii Pravda](https://github.com/kvinty))
|
||||
* Now server reuse threads from global thread pool. This affects performance in some corner cases. [#4150](https://github.com/yandex/ClickHouse/pull/4150) ([alexey-milovidov](https://github.com/alexey-milovidov))
|
||||
|
||||
### Improvements
|
||||
* Implemented AIO support for FreeBSD. [#4305](https://github.com/yandex/ClickHouse/pull/4305) ([urgordeadbeef](https://github.com/urgordeadbeef))
|
||||
* `SELECT * FROM a JOIN b USING a, b` now return `a` and `b` columns only from the left table. [#4141](https://github.com/yandex/ClickHouse/pull/4141) ([Artem Zuikov](https://github.com/4ertus2))
|
||||
* Allow `-C` option of client to work as `-c` option. [#4232](https://github.com/yandex/ClickHouse/pull/4232) ([syominsergey](https://github.com/syominsergey))
|
||||
* Now option `--password` used without value requires password from stdin. [#4230](https://github.com/yandex/ClickHouse/pull/4230) ([BSD_Conqueror](https://github.com/bsd-conqueror))
|
||||
* Added highlighting of unescaped metacharacters in string literals that contain `LIKE` expressions or regexps. [#4327](https://github.com/yandex/ClickHouse/pull/4327) ([alexey-milovidov](https://github.com/alexey-milovidov))
|
||||
* Added cancelling of HTTP read only queries if client socket goes away. [#4213](https://github.com/yandex/ClickHouse/pull/4213) ([nvartolomei](https://github.com/nvartolomei))
|
||||
* Now server reports progress to keep client connections alive. [#4215](https://github.com/yandex/ClickHouse/pull/4215) ([Ivan](https://github.com/abyss7))
|
||||
* Slightly better message with reason for OPTIMIZE query with `optimize_throw_if_noop` setting enabled. [#4294](https://github.com/yandex/ClickHouse/pull/4294) ([alexey-milovidov](https://github.com/alexey-milovidov))
|
||||
* Added support of `--version` option for clickhouse server. [#4251](https://github.com/yandex/ClickHouse/pull/4251) ([Lopatin Konstantin](https://github.com/k-lopatin))
|
||||
* Added `--help/-h` option to `clickhouse-server`. [#4233](https://github.com/yandex/ClickHouse/pull/4233) ([Yuriy Baranov](https://github.com/yurriy))
|
||||
* Added support for scalar subqueries with aggregate function state result. [#4348](https://github.com/yandex/ClickHouse/pull/4348) ([Nikolai Kochetov](https://github.com/KochetovNicolai))
|
||||
* Improved server shutdown time and ALTERs waiting time. [#4372](https://github.com/yandex/ClickHouse/pull/4372) ([alexey-milovidov](https://github.com/alexey-milovidov))
|
||||
* Added info about the replicated_can_become_leader setting to system.replicas and add logging if the replica won't try to become leader. [#4379](https://github.com/yandex/ClickHouse/pull/4379) ([Alex Zatelepin](https://github.com/ztlpn))
|
||||
|
||||
|
||||
## ClickHouse release 19.1.14, 2019-03-14
|
||||
|
||||
* Fixed error `Column ... queried more than once` that may happen if the setting `asterisk_left_columns_only` is set to 1 in case of using `GLOBAL JOIN` with `SELECT *` (rare case). The issue does not exist in 19.3 and newer. [6bac7d8d](https://github.com/yandex/ClickHouse/pull/4692/commits/6bac7d8d11a9b0d6de0b32b53c47eb2f6f8e7062) ([Artem Zuikov](https://github.com/4ertus2))
|
||||
|
||||
## ClickHouse release 19.1.13, 2019-03-12
|
||||
|
||||
This release contains exactly the same set of patches as 19.3.7.
|
||||
|
||||
## ClickHouse release 19.1.10, 2019-03-03
|
||||
|
||||
This release contains exactly the same set of patches as 19.3.6.
|
||||
|
||||
|
||||
## ClickHouse release 19.1.9, 2019-02-21
|
||||
|
||||
### Bug fixes
|
||||
* Fixed backward incompatibility with old versions due to wrong implementation of `send_logs_level` setting. [#4445](https://github.com/yandex/ClickHouse/pull/4445) ([alexey-milovidov](https://github.com/alexey-milovidov))
|
||||
* Fixed backward incompatibility of table function `remote` introduced with column comments. [#4446](https://github.com/yandex/ClickHouse/pull/4446) ([alexey-milovidov](https://github.com/alexey-milovidov))
|
||||
|
||||
## ClickHouse release 19.1.8, 2019-02-16
|
||||
|
||||
### Bug Fixes
|
||||
* Fix install package with missing /etc/clickhouse-server/config.xml. [#4343](https://github.com/yandex/ClickHouse/pull/4343) ([proller](https://github.com/proller))
|
||||
|
||||
|
||||
## ClickHouse release 19.1.7, 2019-02-15
|
||||
|
||||
### Bug Fixes
|
||||
* Correctly return the right type and properly handle locks in `joinGet` function. [#4153](https://github.com/yandex/ClickHouse/pull/4153) ([Amos Bird](https://github.com/amosbird))
|
||||
* Fixed error when system logs are tried to create again at server shutdown. [#4254](https://github.com/yandex/ClickHouse/pull/4254) ([alexey-milovidov](https://github.com/alexey-milovidov))
|
||||
* Fixed error: if there is a database with `Dictionary` engine, all dictionaries forced to load at server startup, and if there is a dictionary with ClickHouse source from localhost, the dictionary cannot load. [#4255](https://github.com/yandex/ClickHouse/pull/4255) ([alexey-milovidov](https://github.com/alexey-milovidov))
|
||||
* Fixed a bug when the execution of mutations containing `IN` operators was producing incorrect results. [#4099](https://github.com/yandex/ClickHouse/pull/4099) ([Alex Zatelepin](https://github.com/ztlpn))
|
||||
* `clickhouse-client` can segfault on exit while loading data for command line suggestions if it was run in interactive mode. [#4317](https://github.com/yandex/ClickHouse/pull/4317) ([alexey-milovidov](https://github.com/alexey-milovidov))
|
||||
* Fixed race condition when selecting from `system.tables` may give `table doesn't exist` error. [#4313](https://github.com/yandex/ClickHouse/pull/4313) ([alexey-milovidov](https://github.com/alexey-milovidov))
|
||||
* Fixed deadlock when `SELECT` from a table with `File` engine was retried after `No such file or directory` error. [#4161](https://github.com/yandex/ClickHouse/pull/4161) ([alexey-milovidov](https://github.com/alexey-milovidov))
|
||||
* Fixed an issue: local ClickHouse dictionaries are loaded via TCP, but should load within process. [#4166](https://github.com/yandex/ClickHouse/pull/4166) ([alexey-milovidov](https://github.com/alexey-milovidov))
|
||||
* Fixed `No message received` error when interacting with PostgreSQL ODBC Driver through TLS connection. Also fixes segfault when using MySQL ODBC Driver. [#4170](https://github.com/yandex/ClickHouse/pull/4170) ([alexey-milovidov](https://github.com/alexey-milovidov))
|
||||
* Temporarily disable predicate optimization for `ORDER BY`. [#3890](https://github.com/yandex/ClickHouse/pull/3890) ([Winter Zhang](https://github.com/zhang2014))
|
||||
* Fixed infinite loop when selecting from table function `numbers(0)`. [#4280](https://github.com/yandex/ClickHouse/pull/4280) ([alexey-milovidov](https://github.com/alexey-milovidov))
|
||||
* Fixed `compile_expressions` bug with comparison of big (more than int16) dates. [#4341](https://github.com/yandex/ClickHouse/pull/4341) ([alesapin](https://github.com/alesapin))
|
||||
* Fixed segmentation fault with `uncompressed_cache=1` and exception with wrong uncompressed size. [#4186](https://github.com/yandex/ClickHouse/pull/4186) ([alesapin](https://github.com/alesapin))
|
||||
* Fixed `ALL JOIN` with duplicates in right table. [#4184](https://github.com/yandex/ClickHouse/pull/4184) ([Artem Zuikov](https://github.com/4ertus2))
|
||||
* Fixed wrong behaviour when doing `INSERT ... SELECT ... FROM file(...)` query and file has `CSVWithNames` or `TSVWIthNames` format and the first data row is missing. [#4297](https://github.com/yandex/ClickHouse/pull/4297) ([alexey-milovidov](https://github.com/alexey-milovidov))
|
||||
* Fixed aggregate functions execution with `Array(LowCardinality)` arguments. [#4055](https://github.com/yandex/ClickHouse/pull/4055) ([KochetovNicolai](https://github.com/KochetovNicolai))
|
||||
* Debian package: correct /etc/clickhouse-server/preprocessed link according to config. [#4205](https://github.com/yandex/ClickHouse/pull/4205) ([proller](https://github.com/proller))
|
||||
* Fixed fuzz test under undefined behavior sanitizer: added parameter type check for `quantile*Weighted` family of functions. [#4145](https://github.com/yandex/ClickHouse/pull/4145) ([alexey-milovidov](https://github.com/alexey-milovidov))
|
||||
* Make `START REPLICATED SENDS` command start replicated sends. [#4229](https://github.com/yandex/ClickHouse/pull/4229) ([nvartolomei](https://github.com/nvartolomei))
|
||||
* Fixed `Not found column` for duplicate columns in JOIN ON section. [#4279](https://github.com/yandex/ClickHouse/pull/4279) ([Artem Zuikov](https://github.com/4ertus2))
|
||||
* Now `/etc/ssl` is used as default directory with SSL certificates. [#4167](https://github.com/yandex/ClickHouse/pull/4167) ([alexey-milovidov](https://github.com/alexey-milovidov))
|
||||
* Fixed crash on dictionary reload if dictionary not available. [#4188](https://github.com/yandex/ClickHouse/pull/4188) ([proller](https://github.com/proller))
|
||||
* Fixed bug with incorrect `Date` and `DateTime` comparison. [#4237](https://github.com/yandex/ClickHouse/pull/4237) ([valexey](https://github.com/valexey))
|
||||
* Fixed incorrect result when `Date` and `DateTime` arguments are used in branches of conditional operator (function `if`). Added generic case for function `if`. [#4243](https://github.com/yandex/ClickHouse/pull/4243) ([alexey-milovidov](https://github.com/alexey-milovidov))
|
||||
|
||||
## ClickHouse release 19.1.6, 2019-01-24
|
||||
|
||||
### New Features
|
||||
@ -128,7 +393,7 @@
|
||||
|
||||
### New features:
|
||||
|
||||
* `DEFAULT` expressions are evaluated for missing fields when loading data in semi-structured input formats (`JSONEachRow`, `TSKV`). [#3555](https://github.com/yandex/ClickHouse/pull/3555)
|
||||
* `DEFAULT` expressions are evaluated for missing fields when loading data in semi-structured input formats (`JSONEachRow`, `TSKV`). The feature is enabled with the `insert_sample_with_metadata` setting. [#3555](https://github.com/yandex/ClickHouse/pull/3555)
|
||||
* The `ALTER TABLE` query now has the `MODIFY ORDER BY` action for changing the sorting key when adding or removing a table column. This is useful for tables in the `MergeTree` family that perform additional tasks when merging based on this sorting key, such as `SummingMergeTree`, `AggregatingMergeTree`, and so on. [#3581](https://github.com/yandex/ClickHouse/pull/3581) [#3755](https://github.com/yandex/ClickHouse/pull/3755)
|
||||
* For tables in the `MergeTree` family, now you can specify a different sorting key (`ORDER BY`) and index (`PRIMARY KEY`). The sorting key can be longer than the index. [#3581](https://github.com/yandex/ClickHouse/pull/3581)
|
||||
* Added the `hdfs` table function and the `HDFS` table engine for importing and exporting data to HDFS. [chenxing-xc](https://github.com/yandex/ClickHouse/pull/3617)
|
||||
|
179
CHANGELOG_RU.md
179
CHANGELOG_RU.md
@ -1,3 +1,180 @@
|
||||
## ClickHouse release 19.3.5, 2019-02-21
|
||||
|
||||
### Исправления ошибок:
|
||||
|
||||
* Исправлена ошибка обработки длинных http-запросов на вставку на стороне сервера. [#4454](https://github.com/yandex/ClickHouse/pull/4454) ([alesapin](https://github.com/alesapin))
|
||||
* Исправлена обратная несовместимость со старыми версиями, появившаяся из-за некорректной реализации настройки `send_logs_level`. [#4445](https://github.com/yandex/ClickHouse/pull/4445) ([alexey-milovidov](https://github.com/alexey-milovidov))
|
||||
* Исправлена обратная несовместимость табличной функции `remote`, появившаяся из-за добавления комментариев колонок. [#4446](https://github.com/yandex/ClickHouse/pull/4446) ([alexey-milovidov](https://github.com/alexey-milovidov))
|
||||
|
||||
## ClickHouse release 19.3.4, 2019-02-16
|
||||
|
||||
### Улучшения:
|
||||
|
||||
* При выполнении запроса `ATTACH TABLE` при проверке ограничений на используемую память теперь не учитывается память, занимаемая индексом таблицы. Это позволяет избежать ситуации, когда невозможно сделать `ATTACH TABLE` после соответствующего `DETACH TABLE`. [#4396](https://github.com/yandex/ClickHouse/pull/4396) ([alexey-milovidov](https://github.com/alexey-milovidov))
|
||||
* Немного увеличены ограничения на максимальный размер строки и массива, полученные от ZooKeeper. Это позволяет продолжать работу после увеличения настройки ZooKeeper `CLIENT_JVMFLAGS=-Djute.maxbuffer=...`. [#4398](https://github.com/yandex/ClickHouse/pull/4398) ([alexey-milovidov](https://github.com/alexey-milovidov))
|
||||
* Теперь реплику, отключенную на длительный период, можно восстановить, даже если в её очереди скопилось огромное число записей. [#4399](https://github.com/yandex/ClickHouse/pull/4399) ([alexey-milovidov](https://github.com/alexey-milovidov))
|
||||
* Для вторичных индексов типа `set` добавлен обязательный параметр (максимальное число хранимых значений). [#4386](https://github.com/yandex/ClickHouse/pull/4386) ([Nikita Vasilev](https://github.com/nikvas0))
|
||||
|
||||
### Исправления ошибок:
|
||||
|
||||
* Исправлен неверный результат запроса с модификатором `WITH ROLLUP` при группировке по единственному столбцу типа `LowCardinality`. [#4384](https://github.com/yandex/ClickHouse/pull/4384) ([Nikolai Kochetov](https://github.com/KochetovNicolai))
|
||||
* Исправлена ошибка во вторичном индексе типа `set` (гранулы, в которых было больше, чем `max_rows` строк, игнорировались). [#4386](https://github.com/yandex/ClickHouse/pull/4386) ([Nikita Vasilev](https://github.com/nikvas0))
|
||||
* Исправлена подстановка alias-ов в запросах с подзапросом, содержащим этот же alias ([#4110](https://github.com/yandex/ClickHouse/issues/4110)). [#4351](https://github.com/yandex/ClickHouse/pull/4351) ([Artem Zuikov](https://github.com/4ertus2))
|
||||
|
||||
### Улучшения сборки/тестирования/пакетирования:
|
||||
|
||||
* Множество исправлений для сборки под FreeBSD. [#4397](https://github.com/yandex/ClickHouse/pull/4397) ([proller](https://github.com/proller))
|
||||
* Возможность запускать `clickhouse-server` для stateless тестов из docker-образа. [#4347](https://github.com/yandex/ClickHouse/pull/4347) ([Vasily Nemkov](https://github.com/Enmk))
|
||||
|
||||
## ClickHouse release 19.3.3, 2019-02-13
|
||||
|
||||
### Новые возможности:
|
||||
|
||||
* Добавлен запрос `KILL MUTATION`, который позволяет удалять мутации, которые по какой-то причине не могут выполниться. В таблицу `system.mutations` для облегчения диагностики добавлены столбцы `latest_failed_part`, `latest_fail_time`, `latest_fail_reason`. [#4287](https://github.com/yandex/ClickHouse/pull/4287) ([Alex Zatelepin](https://github.com/ztlpn))
|
||||
* Добавлена агрегатная функция `entropy`, которая вычисляет энтропию Шеннона. [#4238](https://github.com/yandex/ClickHouse/pull/4238) ([Quid37](https://github.com/Quid37))
|
||||
* Добавлена обобщённая реализация функции `arrayWithConstant`. [#4322](https://github.com/yandex/ClickHouse/pull/4322) ([alexey-milovidov](https://github.com/alexey-milovidov))
|
||||
* Добавлен оператор сравнения `NOT BETWEEN`. [#4228](https://github.com/yandex/ClickHouse/pull/4228) ([Dmitry Naumov](https://github.com/nezed))
|
||||
* Добавлена функция `sumMapFiltered` - вариант `sumMap`, позволяющий указать набор ключей, по которым будет производиться суммирование. [#4129](https://github.com/yandex/ClickHouse/pull/4129) ([Léo Ercolanelli](https://github.com/ercolanelli-leo))
|
||||
* Добавлена функция `sumMapWithOverflow`. [#4151](https://github.com/yandex/ClickHouse/pull/4151) ([Léo Ercolanelli](https://github.com/ercolanelli-leo))
|
||||
* Добавлена поддержка `Nullable` типов в табличной функции `mysql`. [#4198](https://github.com/yandex/ClickHouse/pull/4198) ([Emmanuel Donin de Rosière](https://github.com/edonin))
|
||||
* Добавлена поддержка произвольных константных выражений в секции `LIMIT`. [#4246](https://github.com/yandex/ClickHouse/pull/4246) ([k3box](https://github.com/k3box))
|
||||
* Добавлена агрегатная функция `topKWeighted` - вариант `topK`, позволяющий задавать (целый неотрицательный) вес добавляемого значения. [#4245](https://github.com/yandex/ClickHouse/pull/4245) ([Andrew Golman](https://github.com/andrewgolman))
|
||||
* Движок `Join` теперь поддерживает настройку `join_overwrite`, которая позволяет перезаписывать значения для существующих ключей. [#3973](https://github.com/yandex/ClickHouse/pull/3973) ([Amos Bird](https://github.com/amosbird))
|
||||
* Добавлена функция `toStartOfInterval`. [#4304](https://github.com/yandex/ClickHouse/pull/4304) ([Vitaly Baranov](https://github.com/vitlibar))
|
||||
* Добавлена функция `toStartOfTenMinutes`. [#4298](https://github.com/yandex/ClickHouse/pull/4298) ([Vitaly Baranov](https://github.com/vitlibar))
|
||||
* Добавлен формат `RowBinaryWithNamesAndTypes`. [#4200](https://github.com/yandex/ClickHouse/pull/4200) ([Oleg V. Kozlyuk](https://github.com/DarkWanderer))
|
||||
* Добавлены типы `IPv4` и `IPv6`. Более эффективная реализация функций `IPv*`. [#3669](https://github.com/yandex/ClickHouse/pull/3669) ([Vasily Nemkov](https://github.com/Enmk))
|
||||
* Добавлен выходной формат `Protobuf`. [#4005](https://github.com/yandex/ClickHouse/pull/4005) [#4158](https://github.com/yandex/ClickHouse/pull/4158) ([Vitaly Baranov](https://github.com/vitlibar))
|
||||
* В HTTP-интерфейсе добавлена поддержка алгоритма сжатия brotli для вставляемых данных. [#4235](https://github.com/yandex/ClickHouse/pull/4235) ([Mikhail](https://github.com/fandyushin))
|
||||
* Клиент командной строки теперь подсказывает правильное имя, если пользователь опечатался в названии функции. [#4239](https://github.com/yandex/ClickHouse/pull/4239) ([Danila Kutenin](https://github.com/danlark1))
|
||||
* В HTTP-ответ сервера добавлен заголовок `Query-Id`. [#4231](https://github.com/yandex/ClickHouse/pull/4231) ([Mikhail](https://github.com/fandyushin))
|
||||
|
||||
### Экспериментальные возможности:
|
||||
|
||||
* Добавлена поддержка вторичных индексов типа `minmax` и `set` для таблиц семейства MergeTree (позволяют быстро пропускать целые блоки данных). [#4143](https://github.com/yandex/ClickHouse/pull/4143) ([Nikita Vasilev](https://github.com/nikvas0))
|
||||
* Добавлена поддержка преобразования `CROSS JOIN` в `INNER JOIN`, если это возможно. [#4221](https://github.com/yandex/ClickHouse/pull/4221) [#4266](https://github.com/yandex/ClickHouse/pull/4266) ([Artem Zuikov](https://github.com/4ertus2))
|
||||
|
||||
### Исправления ошибок:
|
||||
|
||||
* Исправлена ошибка `Not found column` для случая дублирующихся столбцов в секции `JOIN ON`. [#4279](https://github.com/yandex/ClickHouse/pull/4279) ([Artem Zuikov](https://github.com/4ertus2))
|
||||
* Команда `START REPLICATED SENDS` теперь действительно включает посылку кусков данных при репликации. [#4229](https://github.com/yandex/ClickHouse/pull/4229) ([nvartolomei](https://github.com/nvartolomei))
|
||||
* Исправлена агрегация столбцов типа `Array(LowCardinality)`. [#4055](https://github.com/yandex/ClickHouse/pull/4055) ([KochetovNicolai](https://github.com/KochetovNicolai))
|
||||
* Исправлена ошибка, приводившая к тому, что при исполнении запроса `INSERT ... SELECT ... FROM file(...)` терялась первая строчка файла, если он был в формате `CSVWithNames` или `TSVWIthNames`. [#4297](https://github.com/yandex/ClickHouse/pull/4297) ([alexey-milovidov](https://github.com/alexey-milovidov))
|
||||
* Исправлено падение при перезагрузке внешнего словаря, если словарь недоступен. Ошибка возникла в 19.1.6. [#4188](https://github.com/yandex/ClickHouse/pull/4188) ([proller](https://github.com/proller))
|
||||
* Исправлен неверный результат `ALL JOIN`, если в правой таблице присутствуют дубликаты ключа join. [#4184](https://github.com/yandex/ClickHouse/pull/4184) ([Artem Zuikov](https://github.com/4ertus2))
|
||||
* Исправлено падение сервера при включённой опции `use_uncompressed_cache`, а также исключение о неправильном размере разжатых данных. [#4186](https://github.com/yandex/ClickHouse/pull/4186) ([alesapin](https://github.com/alesapin))
|
||||
* Исправлена ошибка, приводящая к неправильному результату сравнения больших (не помещающихся в Int16) дат при включённой настройке `compile_expressions`. [#4341](https://github.com/yandex/ClickHouse/pull/4341) ([alesapin](https://github.com/alesapin))
|
||||
* Исправлен бесконечный цикл при запросе из табличной функции `numbers(0)`. [#4280](https://github.com/yandex/ClickHouse/pull/4280) ([alexey-milovidov](https://github.com/alexey-milovidov))
|
||||
* Временно отключён pushdown предикатов в подзапрос, если он содержит `ORDER BY`. [#3890](https://github.com/yandex/ClickHouse/pull/3890) ([Winter Zhang](https://github.com/zhang2014))
|
||||
* Исправлена ошибка `Illegal instruction` при использовании функций для работы с base64 на старых CPU. Ошибка проявлялась только, если ClickHouse был скомпилирован с gcc-8. [#4275](https://github.com/yandex/ClickHouse/pull/4275) ([alexey-milovidov](https://github.com/alexey-milovidov))
|
||||
* Исправлена ошибка `No message received` при запросах к PostgreSQL через ODBC-драйвер и TLS-соединение, исправлен segfault при использовании MySQL через ODBC-драйвер. [#4170](https://github.com/yandex/ClickHouse/pull/4170) ([alexey-milovidov](https://github.com/alexey-milovidov))
|
||||
* Исправлен неверный результат при использовании значений типа `Date` или `DateTime` в ветвях условного оператора (функции `if`). Функция `if` теперь работает для произвольного типа значений в ветвях. [#4243](https://github.com/yandex/ClickHouse/pull/4243) ([alexey-milovidov](https://github.com/alexey-milovidov))
|
||||
* Словари с источником из локального ClickHouse теперь исполняются локально, а не используя TCP-соединение. [#4166](https://github.com/yandex/ClickHouse/pull/4166) ([alexey-milovidov](https://github.com/alexey-milovidov))
|
||||
* Исправлено зависание запросов к таблице с движком `File` после того, как `SELECT` из этой таблицы завершился с ошибкой `No such file or directory`. [#4161](https://github.com/yandex/ClickHouse/pull/4161) ([alexey-milovidov](https://github.com/alexey-milovidov))
|
||||
* Исправлена ошибка, из-за которой при запросе к таблице `system.tables` могло возникать исключение `table doesn't exist`. [#4313](https://github.com/yandex/ClickHouse/pull/4313) ([alexey-milovidov](https://github.com/alexey-milovidov))
|
||||
* Исправлена ошибка, приводившая к падению `clickhouse-client` в интерактивном режиме, если успеть выйти из него во время загрузки подсказок командной строки. [#4317](https://github.com/yandex/ClickHouse/pull/4317) ([alexey-milovidov](https://github.com/alexey-milovidov))
|
||||
* Исправлена ошибка, приводившая к неверным результатам исполнения мутаций, содержащих оператор `IN`. [#4099](https://github.com/yandex/ClickHouse/pull/4099) ([Alex Zatelepin](https://github.com/ztlpn))
|
||||
* Исправлена ошибка, из-за которой, если была создана база данных с движком `Dictionary`, все словари загружались при старте сервера, а словари с источником из локального ClickHouse не могли загрузиться. [#4255](https://github.com/yandex/ClickHouse/pull/4255) ([alexey-milovidov](https://github.com/alexey-milovidov))
|
||||
* Исправлено повторное создание таблиц с системными логами (`system.query_log`, `system.part_log`) при остановке сервера. [#4254](https://github.com/yandex/ClickHouse/pull/4254) ([alexey-milovidov](https://github.com/alexey-milovidov))
|
||||
* Исправлен вывод типа возвращаемого значения, а также использование блокировок в функции `joinGet`. [#4153](https://github.com/yandex/ClickHouse/pull/4153) ([Amos Bird](https://github.com/amosbird))
|
||||
* Исправлено падение сервера при использовании настройки `allow_experimental_multiple_joins_emulation`. [52de2c](https://github.com/yandex/ClickHouse/commit/52de2cd927f7b5257dd67e175f0a5560a48840d0) ([Artem Zuikov](https://github.com/4ertus2))
|
||||
* Исправлено некорректное сравнение значений типа `Date` и `DateTime`. [#4237](https://github.com/yandex/ClickHouse/pull/4237) ([valexey](https://github.com/valexey))
|
||||
* Исправлена ошибка, проявлявшаяся при fuzz-тестировании с undefined behaviour-санитайзером: добавлена проверка типов параметров для семейства функций `quantile*Weighted`. [#4145](https://github.com/yandex/ClickHouse/pull/4145) ([alexey-milovidov](https://github.com/alexey-milovidov))
|
||||
* Исправлена редкая ошибка, из-за которой при удалении старых кусков данных может возникать ошибка `File not found`. [#4378](https://github.com/yandex/ClickHouse/pull/4378) ([alexey-milovidov](https://github.com/alexey-milovidov))
|
||||
* Исправлена установка пакета при отсутствующем файле /etc/clickhouse-server/config.xml. [#4343](https://github.com/yandex/ClickHouse/pull/4343) ([proller](https://github.com/proller))
|
||||
|
||||
### Улучшения сборки/тестирования/пакетирования:
|
||||
|
||||
* При установке Debian-пакета символическая ссылка /etc/clickhouse-server/preprocessed теперь создаётся, учитывая пути, прописанные в конфигурационном файле. [#4205](https://github.com/yandex/ClickHouse/pull/4205) ([proller](https://github.com/proller))
|
||||
* Исправления сборки под FreeBSD. [#4225](https://github.com/yandex/ClickHouse/pull/4225) ([proller](https://github.com/proller))
|
||||
* Добавлена возможность создавать, заполнять и удалять таблицы в тестах производительности. [#4220](https://github.com/yandex/ClickHouse/pull/4220) ([alesapin](https://github.com/alesapin))
|
||||
* Добавлен скрипт для поиска дублирующихся include-директив в исходных файлах. [#4326](https://github.com/yandex/ClickHouse/pull/4326) ([alexey-milovidov](https://github.com/alexey-milovidov))
|
||||
* В тестах производительности добавлена возможность запускать запросы по номеру. [#4264](https://github.com/yandex/ClickHouse/pull/4264) ([alesapin](https://github.com/alesapin))
|
||||
* Пакет с debug-символами добавлен в список рекомендованных для основного пакета. [#4274](https://github.com/yandex/ClickHouse/pull/4274) ([alexey-milovidov](https://github.com/alexey-milovidov))
|
||||
* Рефакторинг утилиты performance-test. Улучшено логирование и обработка сигналов. [#4171](https://github.com/yandex/ClickHouse/pull/4171) ([alesapin](https://github.com/alesapin))
|
||||
* Задокументирован анонимизированный датасет Яндекс.Метрики. [#4164](https://github.com/yandex/ClickHouse/pull/4164) ([alesapin](https://github.com/alesapin))
|
||||
* Добавлен инструмент для конвертирования кусков данных таблиц, созданных с использованием старого синтаксиса с помесячным партиционированием, в новый формат. [#4195](https://github.com/yandex/ClickHouse/pull/4195) ([Alex Zatelepin](https://github.com/ztlpn))
|
||||
* Добавлена документация для двух датасетов, загруженных в s3. [#4144](https://github.com/yandex/ClickHouse/pull/4144) ([alesapin](https://github.com/alesapin))
|
||||
* Добавлен инструмент, собирающий changelog из описаний pull request-ов. [#4169](https://github.com/yandex/ClickHouse/pull/4169) [#4173](https://github.com/yandex/ClickHouse/pull/4173) ([KochetovNicolai](https://github.com/KochetovNicolai)) ([KochetovNicolai](https://github.com/KochetovNicolai))
|
||||
* Добавлен puppet-модуль для Clickhouse. [#4182](https://github.com/yandex/ClickHouse/pull/4182) ([Maxim Fedotov](https://github.com/MaxFedotov))
|
||||
* Добавлена документация для нескольких недокументированных функций. [#4168](https://github.com/yandex/ClickHouse/pull/4168) ([Winter Zhang](https://github.com/zhang2014))
|
||||
* Исправления сборки под ARM. [#4210](https://github.com/yandex/ClickHouse/pull/4210)[#4306](https://github.com/yandex/ClickHouse/pull/4306) [#4291](https://github.com/yandex/ClickHouse/pull/4291) ([proller](https://github.com/proller)) ([proller](https://github.com/proller))
|
||||
* Добавлена возможность запускать тесты словарей из `ctest`. [#4189](https://github.com/yandex/ClickHouse/pull/4189) ([proller](https://github.com/proller))
|
||||
* Теперь директорией с SSL-сертификатами по умолчанию является `/etc/ssl`. [#4167](https://github.com/yandex/ClickHouse/pull/4167) ([alexey-milovidov](https://github.com/alexey-milovidov))
|
||||
* Добавлена проверка доступности SSE и AVX-инструкций на старте. [#4234](https://github.com/yandex/ClickHouse/pull/4234) ([Igr](https://github.com/igron99))
|
||||
* Init-скрипт теперь дожидается, пока сервер запустится. [#4281](https://github.com/yandex/ClickHouse/pull/4281) ([proller](https://github.com/proller))
|
||||
|
||||
### Обратно несовместимые изменения:
|
||||
|
||||
* Удалена настройка `allow_experimental_low_cardinality_type`. Семейство типов данных `LowCardinality` готово для использования в production. [#4323](https://github.com/yandex/ClickHouse/pull/4323) ([alexey-milovidov](https://github.com/alexey-milovidov))
|
||||
* Размер кэша засечек и кэша разжатых блоков теперь уменьшается в зависимости от доступного объёма памяти. [#4240](https://github.com/yandex/ClickHouse/pull/4240) ([Lopatin Konstantin](https://github.com/k-lopatin))
|
||||
* Для запроса `CREATE TABLE` добавлено ключевое слово `INDEX`. Имя столбца `index` теперь надо оборачивать в двойные или обратные кавычки: `` `index` ``. [#4143](https://github.com/yandex/ClickHouse/pull/4143) ([Nikita Vasilev](https://github.com/nikvas0))
|
||||
* Функция `sumMap` теперь возвращает тип с большей областью значений вместо переполнения. Если необходимо старое поведение, следует использовать добавленную функцию `sumMapWithOverflow`. [#4151](https://github.com/yandex/ClickHouse/pull/4151) ([Léo Ercolanelli](https://github.com/ercolanelli-leo))
|
||||
|
||||
### Улучшения производительности:
|
||||
|
||||
* Для запросов без секции `LIMIT` вместо `std::sort` теперь используется `pdqsort`. [#4236](https://github.com/yandex/ClickHouse/pull/4236) ([Evgenii Pravda](https://github.com/kvinty))
|
||||
* Теперь сервер переиспользует потоки для выполнения запросов из глобального пула потоков. В краевых случаях это влияет на производительность. [#4150](https://github.com/yandex/ClickHouse/pull/4150) ([alexey-milovidov](https://github.com/alexey-milovidov))
|
||||
|
||||
### Улучшения:
|
||||
|
||||
* Теперь, если в нативном протоколе послать запрос `INSERT INTO tbl VALUES (....` (с данными в запросе), отдельно посылать разобранные данные для вставки не нужно. [#4301](https://github.com/yandex/ClickHouse/pull/4301) ([alesapin](https://github.com/alesapin))
|
||||
* Добавлена поддержка AIO для FreeBSD. [#4305](https://github.com/yandex/ClickHouse/pull/4305) ([urgordeadbeef](https://github.com/urgordeadbeef))
|
||||
* Запрос `SELECT * FROM a JOIN b USING a, b` теперь возвращает столбцы `a` и `b` только из левой таблицы. [#4141](https://github.com/yandex/ClickHouse/pull/4141) ([Artem Zuikov](https://github.com/4ertus2))
|
||||
* Добавлена опция командной строки `-C` для клиента, которая работает так же, как и опция `-c`. [#4232](https://github.com/yandex/ClickHouse/pull/4232) ([syominsergey](https://github.com/syominsergey))
|
||||
* Если для опции `--password` клиента командной строки не указано значение, пароль запрашивается из стандартного входа. [#4230](https://github.com/yandex/ClickHouse/pull/4230) ([BSD_Conqueror](https://github.com/bsd-conqueror))
|
||||
* Добавлена подсветка метасимволов в строковых литералах, содержащих выражения для оператора `LIKE` и регулярные выражения. [#4327](https://github.com/yandex/ClickHouse/pull/4327) ([alexey-milovidov](https://github.com/alexey-milovidov))
|
||||
* Добавлена отмена HTTP-запроса, если сокет клиента отваливается. [#4213](https://github.com/yandex/ClickHouse/pull/4213) ([nvartolomei](https://github.com/nvartolomei))
|
||||
* Теперь сервер время от времени посылает пакеты Progress для поддержания соединения. [#4215](https://github.com/yandex/ClickHouse/pull/4215) ([Ivan](https://github.com/abyss7))
|
||||
* Немного улучшено сообщение о причине, почему запрос OPTIMIZE не может быть исполнен (если включена настройка `optimize_throw_if_noop`). [#4294](https://github.com/yandex/ClickHouse/pull/4294) ([alexey-milovidov](https://github.com/alexey-milovidov))
|
||||
* Добавлена поддержка опции `--version` для `clickhouse-server`. [#4251](https://github.com/yandex/ClickHouse/pull/4251) ([Lopatin Konstantin](https://github.com/k-lopatin))
|
||||
* Добавлена поддержка опции `--help/-h` для `clickhouse-server`. [#4233](https://github.com/yandex/ClickHouse/pull/4233) ([Yuriy Baranov](https://github.com/yurriy))
|
||||
* Добавлена поддержка скалярных подзапросов, возвращающих состояние агрегатной функции. [#4348](https://github.com/yandex/ClickHouse/pull/4348) ([Nikolai Kochetov](https://github.com/KochetovNicolai))
|
||||
* Уменьшено время ожидания завершения сервера и завершения запросов `ALTER`. [#4372](https://github.com/yandex/ClickHouse/pull/4372) ([alexey-milovidov](https://github.com/alexey-milovidov))
|
||||
* Добавлена информация о значении настройки `replicated_can_become_leader` в таблицу `system.replicas`. Добавлено логирование в случае, если реплика не собирается стать лидером. [#4379](https://github.com/yandex/ClickHouse/pull/4379) ([Alex Zatelepin](https://github.com/ztlpn))
|
||||
|
||||
## ClickHouse release 19.1.9, 2019-02-21
|
||||
|
||||
### Исправления ошибок:
|
||||
|
||||
* Исправлена обратная несовместимость со старыми версиями, появившаяся из-за некорректной реализации настройки `send_logs_level`. [#4445](https://github.com/yandex/ClickHouse/pull/4445) ([alexey-milovidov](https://github.com/alexey-milovidov))
|
||||
* Исправлена обратная несовместимость табличной функции `remote`, появившаяся из-за добавления комментариев колонок. [#4446](https://github.com/yandex/ClickHouse/pull/4446) ([alexey-milovidov](https://github.com/alexey-milovidov))
|
||||
|
||||
## ClickHouse release 19.1.8, 2019-02-16
|
||||
|
||||
### Исправления ошибок:
|
||||
|
||||
* Исправлена установка пакета при отсутствующем файле /etc/clickhouse-server/config.xml. [#4343](https://github.com/yandex/ClickHouse/pull/4343) ([proller](https://github.com/proller))
|
||||
|
||||
## ClickHouse release 19.1.7, 2019-02-15
|
||||
|
||||
### Исправления ошибок:
|
||||
|
||||
* Исправлен вывод типа возвращаемого значения, а также использование блокировок в функции `joinGet`. [#4153](https://github.com/yandex/ClickHouse/pull/4153) ([Amos Bird](https://github.com/amosbird))
|
||||
* Исправлено повторное создание таблиц с системными логами (`system.query_log`, `system.part_log`) при остановке сервера. [#4254](https://github.com/yandex/ClickHouse/pull/4254) ([alexey-milovidov](https://github.com/alexey-milovidov))
|
||||
* Исправлена ошибка, из-за которой, если была создана база данных с движком `Dictionary`, все словари загружались при старте сервера, а словари с источником из локального ClickHouse не могли загрузиться. [#4255](https://github.com/yandex/ClickHouse/pull/4255) ([alexey-milovidov](https://github.com/alexey-milovidov))
|
||||
* Исправлена ошибка, приводившая к неверным результатам исполнения мутаций, содержащих оператор `IN`. [#4099](https://github.com/yandex/ClickHouse/pull/4099) ([Alex Zatelepin](https://github.com/ztlpn))
|
||||
* Исправлена ошибка, приводившая к падению `clickhouse-client` в интерактивном режиме, если успеть выйти из него во время загрузки подсказок командной строки. [#4317](https://github.com/yandex/ClickHouse/pull/4317) ([alexey-milovidov](https://github.com/alexey-milovidov))
|
||||
* Исправлена ошибка, из-за которой при запросе к таблице `system.tables` могло возникать исключение `table doesn't exist`. [#4313](https://github.com/yandex/ClickHouse/pull/4313) ([alexey-milovidov](https://github.com/alexey-milovidov))
|
||||
* Исправлено зависание запросов к таблице с движком `File` после того, как `SELECT` из этой таблицы завершился с ошибкой `No such file or directory`. [#4161](https://github.com/yandex/ClickHouse/pull/4161) ([alexey-milovidov](https://github.com/alexey-milovidov))
|
||||
* Словари с источником из локального ClickHouse теперь исполняются локально, а не используя TCP-соединение. [#4166](https://github.com/yandex/ClickHouse/pull/4166) ([alexey-milovidov](https://github.com/alexey-milovidov))
|
||||
* Исправлена ошибка `No message received` при запросах к PostgreSQL через ODBC-драйвер и TLS-соединение, исправлен segfault при использовании MySQL через ODBC-драйвер. [#4170](https://github.com/yandex/ClickHouse/pull/4170) ([alexey-milovidov](https://github.com/alexey-milovidov))
|
||||
* Временно отключён pushdown предикатов в подзапрос, если он содержит `ORDER BY`. [#3890](https://github.com/yandex/ClickHouse/pull/3890) ([Winter Zhang](https://github.com/zhang2014))
|
||||
* Исправлен бесконечный цикл при запросе из табличной функции `numbers(0)`. [#4280](https://github.com/yandex/ClickHouse/pull/4280) ([alexey-milovidov](https://github.com/alexey-milovidov))
|
||||
* Исправлена ошибка, приводящая к неправильному результату сравнения больших (не помещающихся в Int16) дат при включённой настройке `compile_expressions`. [#4341](https://github.com/yandex/ClickHouse/pull/4341) ([alesapin](https://github.com/alesapin))
|
||||
* Исправлено падение сервера при включённой опции `uncompressed_cache`, а также исключение о неправильном размере разжатых данных. [#4186](https://github.com/yandex/ClickHouse/pull/4186) ([alesapin](https://github.com/alesapin))
|
||||
* Исправлен неверный результат `ALL JOIN`, если в правой таблице присутствуют дубликаты ключа join. [#4184](https://github.com/yandex/ClickHouse/pull/4184) ([Artem Zuikov](https://github.com/4ertus2))
|
||||
* Исправлена ошибка, приводившая к тому, что при исполнении запроса `INSERT ... SELECT ... FROM file(...)` терялась первая строчка файла, если он был в формате `CSVWithNames` или `TSVWIthNames`. [#4297](https://github.com/yandex/ClickHouse/pull/4297) ([alexey-milovidov](https://github.com/alexey-milovidov))
|
||||
* Исправлена агрегация столбцов типа `Array(LowCardinality)`. [#4055](https://github.com/yandex/ClickHouse/pull/4055) ([KochetovNicolai](https://github.com/KochetovNicolai))
|
||||
* При установке Debian-пакета символическая ссылка /etc/clickhouse-server/preprocessed теперь создаётся, учитывая пути, прописанные в конфигурационном файле. [#4205](https://github.com/yandex/ClickHouse/pull/4205) ([proller](https://github.com/proller))
|
||||
* Исправлена ошибка, проявлявшаяся при fuzz-тестировании с undefined behaviour-санитайзером: добавлена проверка типов параметров для семейства функций `quantile*Weighted`. [#4145](https://github.com/yandex/ClickHouse/pull/4145) ([alexey-milovidov](https://github.com/alexey-milovidov))
|
||||
* Команда `START REPLICATED SENDS` теперь действительно включает посылку кусков данных при репликации. [#4229](https://github.com/yandex/ClickHouse/pull/4229) ([nvartolomei](https://github.com/nvartolomei))
|
||||
* Исправлена ошибка `Not found column` для случая дублирующихся столбцов в секции `JOIN ON`. [#4279](https://github.com/yandex/ClickHouse/pull/4279) ([Artem Zuikov](https://github.com/4ertus2))
|
||||
* Теперь директорией с SSL-сертификатами по умолчанию является `/etc/ssl`. [#4167](https://github.com/yandex/ClickHouse/pull/4167) ([alexey-milovidov](https://github.com/alexey-milovidov))
|
||||
* Исправлено падение при перезагрузке внешнего словаря, если словарь недоступен. Ошибка возникла в 19.1.6. [#4188](https://github.com/yandex/ClickHouse/pull/4188) ([proller](https://github.com/proller))
|
||||
* Исправлено некорректное сравнение значений типа `Date` и `DateTime`. [#4237](https://github.com/yandex/ClickHouse/pull/4237) ([valexey](https://github.com/valexey))
|
||||
* Исправлен неверный результат при использовании значений типа `Date` или `DateTime` в ветвях условного оператора (функции `if`). Функция `if` теперь работает для произвольного типа значений в ветвях. [#4243](https://github.com/yandex/ClickHouse/pull/4243) ([alexey-milovidov](https://github.com/alexey-milovidov))
|
||||
|
||||
## ClickHouse release 19.1.6, 2019-01-24
|
||||
|
||||
### Новые возможности:
|
||||
@ -125,7 +302,7 @@
|
||||
|
||||
### Новые возможности:
|
||||
|
||||
* Вычисление `DEFAULT` выражений для отсутствующих полей при загрузке данных в полуструктурированных форматах (`JSONEachRow`, `TSKV`). [#3555](https://github.com/yandex/ClickHouse/pull/3555)
|
||||
* Вычисление `DEFAULT` выражений для отсутствующих полей при загрузке данных в полуструктурированных форматах (`JSONEachRow`, `TSKV`) (требуется включить настройку запроса `insert_sample_with_metadata`). [#3555](https://github.com/yandex/ClickHouse/pull/3555)
|
||||
* Для запроса `ALTER TABLE` добавлено действие `MODIFY ORDER BY` для изменения ключа сортировки при одновременном добавлении или удалении столбца таблицы. Это полезно для таблиц семейства `MergeTree`, выполняющих дополнительную работу при слияниях, согласно этому ключу сортировки, как например, `SummingMergeTree`, `AggregatingMergeTree` и т. п. [#3581](https://github.com/yandex/ClickHouse/pull/3581) [#3755](https://github.com/yandex/ClickHouse/pull/3755)
|
||||
* Для таблиц семейства `MergeTree` появилась возможность указать различный ключ сортировки (`ORDER BY`) и индекс (`PRIMARY KEY`). Ключ сортировки может быть длиннее, чем индекс. [#3581](https://github.com/yandex/ClickHouse/pull/3581)
|
||||
* Добавлена табличная функция `hdfs` и движок таблиц `HDFS` для импорта и экспорта данных в HDFS. [chenxing-xc](https://github.com/yandex/ClickHouse/pull/3617)
|
||||
|
105
CMakeLists.txt
105
CMakeLists.txt
@ -50,6 +50,7 @@ string(TOUPPER ${CMAKE_BUILD_TYPE} CMAKE_BUILD_TYPE_UC)
|
||||
message (STATUS "CMAKE_BUILD_TYPE: ${CMAKE_BUILD_TYPE}")
|
||||
|
||||
set (CMAKE_CONFIGURATION_TYPES "RelWithDebInfo;Debug;Release;MinSizeRel" CACHE STRING "" FORCE)
|
||||
set (CMAKE_DEBUG_POSTFIX "d" CACHE STRING "Generate debug library name with a postfix.") # To be consistent with CMakeLists from contrib libs.
|
||||
|
||||
option (USE_STATIC_LIBRARIES "Set to FALSE to use shared libraries" ON)
|
||||
option (MAKE_STATIC_LIBRARIES "Set to FALSE to make shared libraries" ${USE_STATIC_LIBRARIES})
|
||||
@ -98,10 +99,6 @@ if (CMAKE_SYSTEM_PROCESSOR MATCHES "amd64|x86_64")
|
||||
|
||||
if (OS_LINUX AND NOT UNBUNDLED AND MAKE_STATIC_LIBRARIES AND CMAKE_VERSION VERSION_GREATER "3.9.0")
|
||||
option (GLIBC_COMPATIBILITY "Set to TRUE to enable compatibility with older glibc libraries. Only for x86_64, Linux. Implies USE_INTERNAL_MEMCPY." ON)
|
||||
if (GLIBC_COMPATIBILITY)
|
||||
message (STATUS "Some symbols from glibc will be replaced for compatibility")
|
||||
link_libraries(glibc-compatibility)
|
||||
endif ()
|
||||
endif ()
|
||||
endif ()
|
||||
|
||||
@ -177,6 +174,60 @@ set (CMAKE_C_FLAGS_DEBUG "${CMAKE_C_FLAGS_DEBUG} -O0 -g3 -ggdb3
|
||||
|
||||
include (cmake/use_libcxx.cmake)
|
||||
|
||||
|
||||
# Set standard, system and compiler libraries explicitly.
|
||||
# This is intended for more control of what we are linking.
|
||||
|
||||
set (DEFAULT_LIBS "")
|
||||
if (OS_LINUX AND NOT UNBUNDLED)
|
||||
# Note: this probably has no effict, but I'm not an expert in CMake.
|
||||
set (CMAKE_C_IMPLICIT_LINK_LIBRARIES "")
|
||||
set (CMAKE_CXX_IMPLICIT_LINK_LIBRARIES "")
|
||||
|
||||
# Disable default linked libraries.
|
||||
set (DEFAULT_LIBS "-nodefaultlibs")
|
||||
|
||||
# Add C++ libraries.
|
||||
#
|
||||
# This consist of:
|
||||
# - C++ standard library (like implementation of std::string);
|
||||
# - C++ ABI implementation (functions for exceptions like __cxa_throw, RTTI, etc);
|
||||
# - functions for internal implementation of exception handling (stack unwinding based on DWARF info; TODO replace with bundled libunwind);
|
||||
# - compiler builtins (example: functions for implementation of __int128 operations);
|
||||
#
|
||||
# There are two variants of C++ library: libc++ (from LLVM compiler infrastructure) and libstdc++ (from GCC).
|
||||
if (USE_LIBCXX)
|
||||
set (BUILTINS_LIB_PATH "")
|
||||
if (COMPILER_CLANG)
|
||||
execute_process (COMMAND ${CMAKE_CXX_COMPILER} --print-file-name=libclang_rt.builtins-${CMAKE_SYSTEM_PROCESSOR}.a OUTPUT_VARIABLE BUILTINS_LIB_PATH OUTPUT_STRIP_TRAILING_WHITESPACE)
|
||||
endif ()
|
||||
|
||||
set (DEFAULT_LIBS "${DEFAULT_LIBS} -Wl,-Bstatic -lc++ -lc++abi -lgcc_eh ${BUILTINS_LIB_PATH} -Wl,-Bdynamic")
|
||||
else ()
|
||||
set (DEFAULT_LIBS "${DEFAULT_LIBS} -Wl,-Bstatic -lstdc++ -lgcc_eh -lgcc -Wl,-Bdynamic")
|
||||
endif ()
|
||||
|
||||
# Linking with GLIBC prevents portability of binaries to older systems.
|
||||
# We overcome this behaviour by statically linking with our own implementation of all new symbols (that don't exist in older Libc or have infamous "symbol versioning").
|
||||
# The order of linking is important: 'glibc-compatibility' must be before libc but after all other libraries.
|
||||
if (GLIBC_COMPATIBILITY)
|
||||
message (STATUS "Some symbols from glibc will be replaced for compatibility")
|
||||
|
||||
string (TOUPPER ${CMAKE_BUILD_TYPE} CMAKE_BUILD_TYPE_UC)
|
||||
set (CMAKE_POSTFIX_VARIABLE "CMAKE_${CMAKE_BUILD_TYPE_UC}_POSTFIX")
|
||||
|
||||
set (DEFAULT_LIBS "${DEFAULT_LIBS} libs/libglibc-compatibility/libglibc-compatibility${${CMAKE_POSTFIX_VARIABLE}}.a")
|
||||
endif ()
|
||||
|
||||
# Add Libc. GLIBC is actually a collection of interdependent libraries.
|
||||
set (DEFAULT_LIBS "${DEFAULT_LIBS} -lrt -ldl -lpthread -lm -lc")
|
||||
|
||||
# Note: we'd rather use Musl libc library, but it's little bit more difficult to use.
|
||||
|
||||
message(STATUS "Default libraries: ${DEFAULT_LIBS}")
|
||||
endif ()
|
||||
|
||||
|
||||
if (NOT MAKE_STATIC_LIBRARIES)
|
||||
set(CMAKE_POSITION_INDEPENDENT_CODE ON)
|
||||
endif ()
|
||||
@ -224,6 +275,7 @@ endif ()
|
||||
message (STATUS "Building for: ${CMAKE_SYSTEM} ${CMAKE_SYSTEM_PROCESSOR} ${CMAKE_LIBRARY_ARCHITECTURE} ; USE_STATIC_LIBRARIES=${USE_STATIC_LIBRARIES} MAKE_STATIC_LIBRARIES=${MAKE_STATIC_LIBRARIES} SPLIT_SHARED=${SPLIT_SHARED_LIBRARIES} UNBUNDLED=${UNBUNDLED} CCACHE=${CCACHE_FOUND} ${CCACHE_VERSION}")
|
||||
|
||||
include(GNUInstallDirs)
|
||||
include (cmake/find_contrib_lib.cmake)
|
||||
|
||||
include (cmake/find_ssl.cmake)
|
||||
include (cmake/lib_name.cmake)
|
||||
@ -252,21 +304,21 @@ if (NOT USE_CPUID)
|
||||
endif()
|
||||
include (cmake/find_libgsasl.cmake)
|
||||
include (cmake/find_libxml2.cmake)
|
||||
include (cmake/find_brotli.cmake)
|
||||
include (cmake/find_protobuf.cmake)
|
||||
include (cmake/find_pdqsort.cmake)
|
||||
include (cmake/find_hdfs3.cmake)
|
||||
include (cmake/find_hdfs3.cmake) # uses protobuf
|
||||
include (cmake/find_consistent-hashing.cmake)
|
||||
include (cmake/find_base64.cmake)
|
||||
if (ENABLE_TESTS)
|
||||
include (cmake/find_gtest.cmake)
|
||||
endif ()
|
||||
|
||||
include (cmake/find_contrib_lib.cmake)
|
||||
find_contrib_lib(cityhash)
|
||||
find_contrib_lib(farmhash)
|
||||
find_contrib_lib(metrohash)
|
||||
find_contrib_lib(btrie)
|
||||
find_contrib_lib(double-conversion)
|
||||
include (cmake/find_parquet.cmake)
|
||||
if (ENABLE_TESTS)
|
||||
include (cmake/find_gtest.cmake)
|
||||
endif ()
|
||||
|
||||
# Need to process before "contrib" dir:
|
||||
include (libs/libcommon/cmake/find_gperftools.cmake)
|
||||
@ -283,3 +335,36 @@ add_subdirectory (utils)
|
||||
add_subdirectory (dbms)
|
||||
|
||||
include (cmake/print_include_directories.cmake)
|
||||
|
||||
|
||||
if (DEFAULT_LIBS)
|
||||
# Add default libs to all targets as the last dependency.
|
||||
# I have found no better way to specify default libs in CMake that will appear single time in specific order at the end of linker arguments.
|
||||
|
||||
function(add_default_libs target_name)
|
||||
if (TARGET ${target_name})
|
||||
# message(STATUS "Has target ${target_name}")
|
||||
set_property(TARGET ${target_name} APPEND PROPERTY LINK_LIBRARIES "${DEFAULT_LIBS}")
|
||||
set_property(TARGET ${target_name} APPEND PROPERTY INTERFACE_LINK_LIBRARIES "${DEFAULT_LIBS}")
|
||||
if (GLIBC_COMPATIBILITY)
|
||||
add_dependencies(${target_name} glibc-compatibility)
|
||||
endif ()
|
||||
endif ()
|
||||
endfunction ()
|
||||
|
||||
add_default_libs(ltdl)
|
||||
add_default_libs(zlibstatic)
|
||||
add_default_libs(jemalloc)
|
||||
add_default_libs(unwind)
|
||||
add_default_libs(memcpy)
|
||||
add_default_libs(Foundation)
|
||||
add_default_libs(common)
|
||||
add_default_libs(gtest)
|
||||
add_default_libs(lz4)
|
||||
add_default_libs(zstd)
|
||||
add_default_libs(snappy)
|
||||
add_default_libs(arrow)
|
||||
add_default_libs(protoc)
|
||||
add_default_libs(thrift_static)
|
||||
add_default_libs(boost_regex_internal)
|
||||
endif ()
|
||||
|
@ -13,5 +13,4 @@ ClickHouse is an open-source column-oriented database management system that all
|
||||
|
||||
## Upcoming Events
|
||||
|
||||
* [ClickHouse Community Meetup](https://www.eventbrite.com/e/meetup-clickhouse-in-the-wild-deployment-success-stories-registration-55305051899) in San Francisco on February 19.
|
||||
* [ClickHouse Community Meetup](https://www.eventbrite.com/e/clickhouse-meetup-in-madrid-registration-55376746339) in Madrid on April 2.
|
||||
|
159
cmake/Modules/FindArrow.cmake
Normal file
159
cmake/Modules/FindArrow.cmake
Normal file
@ -0,0 +1,159 @@
|
||||
# https://github.com/apache/arrow/blob/master/cpp/cmake_modules/FindArrow.cmake
|
||||
|
||||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
# - Find ARROW (arrow/api.h, libarrow.a, libarrow.so)
|
||||
# This module defines
|
||||
# ARROW_INCLUDE_DIR, directory containing headers
|
||||
# ARROW_LIBS, directory containing arrow libraries
|
||||
# ARROW_STATIC_LIB, path to libarrow.a
|
||||
# ARROW_SHARED_LIB, path to libarrow's shared library
|
||||
# ARROW_SHARED_IMP_LIB, path to libarrow's import library (MSVC only)
|
||||
# ARROW_FOUND, whether arrow has been found
|
||||
|
||||
include(FindPkgConfig)
|
||||
include(GNUInstallDirs)
|
||||
|
||||
if ("$ENV{ARROW_HOME}" STREQUAL "")
|
||||
pkg_check_modules(ARROW arrow)
|
||||
if (ARROW_FOUND)
|
||||
pkg_get_variable(ARROW_SO_VERSION arrow so_version)
|
||||
set(ARROW_ABI_VERSION ${ARROW_SO_VERSION})
|
||||
message(STATUS "Arrow SO and ABI version: ${ARROW_SO_VERSION}")
|
||||
pkg_get_variable(ARROW_FULL_SO_VERSION arrow full_so_version)
|
||||
message(STATUS "Arrow full SO version: ${ARROW_FULL_SO_VERSION}")
|
||||
if ("${ARROW_INCLUDE_DIRS}" STREQUAL "")
|
||||
set(ARROW_INCLUDE_DIRS "/usr/${CMAKE_INSTALL_INCLUDEDIR}")
|
||||
endif()
|
||||
if ("${ARROW_LIBRARY_DIRS}" STREQUAL "")
|
||||
set(ARROW_LIBRARY_DIRS "/usr/${CMAKE_INSTALL_LIBDIR}")
|
||||
if (EXISTS "/etc/debian_version" AND CMAKE_LIBRARY_ARCHITECTURE)
|
||||
set(ARROW_LIBRARY_DIRS
|
||||
"${ARROW_LIBRARY_DIRS}/${CMAKE_LIBRARY_ARCHITECTURE}")
|
||||
endif()
|
||||
endif()
|
||||
set(ARROW_INCLUDE_DIR ${ARROW_INCLUDE_DIRS})
|
||||
set(ARROW_LIBS ${ARROW_LIBRARY_DIRS})
|
||||
set(ARROW_SEARCH_LIB_PATH ${ARROW_LIBRARY_DIRS})
|
||||
endif()
|
||||
else()
|
||||
set(ARROW_HOME "$ENV{ARROW_HOME}")
|
||||
|
||||
set(ARROW_SEARCH_HEADER_PATHS
|
||||
${ARROW_HOME}/include
|
||||
)
|
||||
|
||||
set(ARROW_SEARCH_LIB_PATH
|
||||
${ARROW_HOME}/lib
|
||||
)
|
||||
|
||||
find_path(ARROW_INCLUDE_DIR arrow/array.h PATHS
|
||||
${ARROW_SEARCH_HEADER_PATHS}
|
||||
# make sure we don't accidentally pick up a different version
|
||||
NO_DEFAULT_PATH
|
||||
)
|
||||
endif()
|
||||
|
||||
find_library(ARROW_LIB_PATH NAMES arrow
|
||||
PATHS
|
||||
${ARROW_SEARCH_LIB_PATH}
|
||||
NO_DEFAULT_PATH)
|
||||
get_filename_component(ARROW_LIBS ${ARROW_LIB_PATH} DIRECTORY)
|
||||
|
||||
find_library(ARROW_PYTHON_LIB_PATH NAMES arrow_python
|
||||
PATHS
|
||||
${ARROW_SEARCH_LIB_PATH}
|
||||
NO_DEFAULT_PATH)
|
||||
get_filename_component(ARROW_PYTHON_LIBS ${ARROW_PYTHON_LIB_PATH} DIRECTORY)
|
||||
|
||||
if (MSVC)
|
||||
SET(CMAKE_FIND_LIBRARY_SUFFIXES ".lib" ".dll")
|
||||
|
||||
if (MSVC AND NOT DEFINED ARROW_MSVC_STATIC_LIB_SUFFIX)
|
||||
set(ARROW_MSVC_STATIC_LIB_SUFFIX "_static")
|
||||
endif()
|
||||
|
||||
find_library(ARROW_SHARED_LIBRARIES NAMES arrow
|
||||
PATHS ${ARROW_HOME} NO_DEFAULT_PATH
|
||||
PATH_SUFFIXES "bin" )
|
||||
|
||||
find_library(ARROW_PYTHON_SHARED_LIBRARIES NAMES arrow_python
|
||||
PATHS ${ARROW_HOME} NO_DEFAULT_PATH
|
||||
PATH_SUFFIXES "bin" )
|
||||
get_filename_component(ARROW_SHARED_LIBS ${ARROW_SHARED_LIBRARIES} PATH )
|
||||
get_filename_component(ARROW_PYTHON_SHARED_LIBS ${ARROW_PYTHON_SHARED_LIBRARIES} PATH )
|
||||
endif ()
|
||||
|
||||
if (ARROW_INCLUDE_DIR AND ARROW_LIBS)
|
||||
set(ARROW_FOUND TRUE)
|
||||
set(ARROW_LIB_NAME arrow)
|
||||
set(ARROW_PYTHON_LIB_NAME arrow_python)
|
||||
if (MSVC)
|
||||
set(ARROW_STATIC_LIB ${ARROW_LIBS}/${ARROW_LIB_NAME}${ARROW_MSVC_STATIC_LIB_SUFFIX}${CMAKE_STATIC_LIBRARY_SUFFIX})
|
||||
set(ARROW_PYTHON_STATIC_LIB ${ARROW_PYTHON_LIBS}/${ARROW_PYTHON_LIB_NAME}${ARROW_MSVC_STATIC_LIB_SUFFIX}${CMAKE_STATIC_LIBRARY_SUFFIX})
|
||||
set(ARROW_SHARED_LIB ${ARROW_SHARED_LIBS}/${ARROW_LIB_NAME}${CMAKE_SHARED_LIBRARY_SUFFIX})
|
||||
set(ARROW_PYTHON_SHARED_LIB ${ARROW_PYTHON_SHARED_LIBS}/${ARROW_PYTHON_LIB_NAME}${CMAKE_SHARED_LIBRARY_SUFFIX})
|
||||
set(ARROW_SHARED_IMP_LIB ${ARROW_LIBS}/${ARROW_LIB_NAME}.lib)
|
||||
set(ARROW_PYTHON_SHARED_IMP_LIB ${ARROW_PYTHON_LIBS}/${ARROW_PYTHON_LIB_NAME}.lib)
|
||||
else()
|
||||
set(ARROW_STATIC_LIB ${ARROW_LIBS}/lib${ARROW_LIB_NAME}.a)
|
||||
set(ARROW_PYTHON_STATIC_LIB ${ARROW_LIBS}/lib${ARROW_PYTHON_LIB_NAME}.a)
|
||||
|
||||
set(ARROW_SHARED_LIB ${ARROW_LIBS}/lib${ARROW_LIB_NAME}${CMAKE_SHARED_LIBRARY_SUFFIX})
|
||||
set(ARROW_PYTHON_SHARED_LIB ${ARROW_LIBS}/lib${ARROW_PYTHON_LIB_NAME}${CMAKE_SHARED_LIBRARY_SUFFIX})
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if (ARROW_FOUND)
|
||||
if (NOT Arrow_FIND_QUIETLY)
|
||||
message(STATUS "Found the Arrow core library: ${ARROW_LIB_PATH}")
|
||||
message(STATUS "Found the Arrow Python library: ${ARROW_PYTHON_LIB_PATH}")
|
||||
endif ()
|
||||
else ()
|
||||
if (NOT Arrow_FIND_QUIETLY)
|
||||
set(ARROW_ERR_MSG "Could not find the Arrow library. Looked for headers")
|
||||
set(ARROW_ERR_MSG "${ARROW_ERR_MSG} in ${ARROW_SEARCH_HEADER_PATHS}, and for libs")
|
||||
set(ARROW_ERR_MSG "${ARROW_ERR_MSG} in ${ARROW_SEARCH_LIB_PATH}")
|
||||
if (Arrow_FIND_REQUIRED)
|
||||
message(FATAL_ERROR "${ARROW_ERR_MSG}")
|
||||
else (Arrow_FIND_REQUIRED)
|
||||
message(STATUS "${ARROW_ERR_MSG}")
|
||||
endif (Arrow_FIND_REQUIRED)
|
||||
endif ()
|
||||
set(ARROW_FOUND FALSE)
|
||||
endif ()
|
||||
|
||||
if (MSVC)
|
||||
mark_as_advanced(
|
||||
ARROW_INCLUDE_DIR
|
||||
ARROW_STATIC_LIB
|
||||
ARROW_SHARED_LIB
|
||||
ARROW_SHARED_IMP_LIB
|
||||
ARROW_PYTHON_STATIC_LIB
|
||||
ARROW_PYTHON_SHARED_LIB
|
||||
ARROW_PYTHON_SHARED_IMP_LIB
|
||||
)
|
||||
else()
|
||||
mark_as_advanced(
|
||||
ARROW_INCLUDE_DIR
|
||||
ARROW_STATIC_LIB
|
||||
ARROW_SHARED_LIB
|
||||
ARROW_PYTHON_STATIC_LIB
|
||||
ARROW_PYTHON_SHARED_LIB
|
||||
)
|
||||
endif()
|
147
cmake/Modules/FindParquet.cmake
Normal file
147
cmake/Modules/FindParquet.cmake
Normal file
@ -0,0 +1,147 @@
|
||||
# https://github.com/apache/arrow/blob/master/cpp/cmake_modules/FindParquet.cmake
|
||||
|
||||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
# - Find PARQUET (parquet/parquet.h, libparquet.a, libparquet.so)
|
||||
# This module defines
|
||||
# PARQUET_INCLUDE_DIR, directory containing headers
|
||||
# PARQUET_LIBS, directory containing parquet libraries
|
||||
# PARQUET_STATIC_LIB, path to libparquet.a
|
||||
# PARQUET_SHARED_LIB, path to libparquet's shared library
|
||||
# PARQUET_SHARED_IMP_LIB, path to libparquet's import library (MSVC only)
|
||||
# PARQUET_FOUND, whether parquet has been found
|
||||
|
||||
include(FindPkgConfig)
|
||||
|
||||
if(NOT "$ENV{PARQUET_HOME}" STREQUAL "")
|
||||
set(PARQUET_HOME "$ENV{PARQUET_HOME}")
|
||||
endif()
|
||||
|
||||
if (MSVC)
|
||||
SET(CMAKE_FIND_LIBRARY_SUFFIXES ".lib" ".dll")
|
||||
|
||||
if (MSVC AND NOT DEFINED PARQUET_MSVC_STATIC_LIB_SUFFIX)
|
||||
set(PARQUET_MSVC_STATIC_LIB_SUFFIX "_static")
|
||||
endif()
|
||||
|
||||
find_library(PARQUET_SHARED_LIBRARIES NAMES parquet
|
||||
PATHS ${PARQUET_HOME} NO_DEFAULT_PATH
|
||||
PATH_SUFFIXES "bin" )
|
||||
|
||||
get_filename_component(PARQUET_SHARED_LIBS ${PARQUET_SHARED_LIBRARIES} PATH )
|
||||
endif ()
|
||||
|
||||
if(PARQUET_HOME)
|
||||
set(PARQUET_SEARCH_HEADER_PATHS
|
||||
${PARQUET_HOME}/include
|
||||
)
|
||||
set(PARQUET_SEARCH_LIB_PATH
|
||||
${PARQUET_HOME}/lib
|
||||
)
|
||||
find_path(PARQUET_INCLUDE_DIR parquet/api/reader.h PATHS
|
||||
${PARQUET_SEARCH_HEADER_PATHS}
|
||||
# make sure we don't accidentally pick up a different version
|
||||
NO_DEFAULT_PATH
|
||||
)
|
||||
find_library(PARQUET_LIBRARIES NAMES parquet
|
||||
PATHS ${PARQUET_HOME} NO_DEFAULT_PATH
|
||||
PATH_SUFFIXES "lib")
|
||||
get_filename_component(PARQUET_LIBS ${PARQUET_LIBRARIES} PATH )
|
||||
|
||||
# Try to autodiscover the Parquet ABI version
|
||||
get_filename_component(PARQUET_LIB_REALPATH ${PARQUET_LIBRARIES} REALPATH)
|
||||
get_filename_component(PARQUET_EXT_REALPATH ${PARQUET_LIB_REALPATH} EXT)
|
||||
string(REGEX MATCH ".([0-9]+.[0-9]+.[0-9]+)" HAS_ABI_VERSION ${PARQUET_EXT_REALPATH})
|
||||
if (HAS_ABI_VERSION)
|
||||
if (APPLE)
|
||||
string(REGEX REPLACE ".([0-9]+.[0-9]+.[0-9]+).dylib" "\\1" PARQUET_ABI_VERSION ${PARQUET_EXT_REALPATH})
|
||||
else()
|
||||
string(REGEX REPLACE ".so.([0-9]+.[0-9]+.[0-9]+)" "\\1" PARQUET_ABI_VERSION ${PARQUET_EXT_REALPATH})
|
||||
endif()
|
||||
string(REGEX REPLACE "([0-9]+).[0-9]+.[0-9]+" "\\1" PARQUET_SO_VERSION ${PARQUET_ABI_VERSION})
|
||||
else()
|
||||
set(PARQUET_ABI_VERSION "1.0.0")
|
||||
set(PARQUET_SO_VERSION "1")
|
||||
endif()
|
||||
else()
|
||||
pkg_check_modules(PARQUET parquet)
|
||||
if (PARQUET_FOUND)
|
||||
pkg_get_variable(PARQUET_ABI_VERSION parquet abi_version)
|
||||
message(STATUS "Parquet C++ ABI version: ${PARQUET_ABI_VERSION}")
|
||||
pkg_get_variable(PARQUET_SO_VERSION parquet so_version)
|
||||
message(STATUS "Parquet C++ SO version: ${PARQUET_SO_VERSION}")
|
||||
set(PARQUET_INCLUDE_DIR ${PARQUET_INCLUDE_DIRS})
|
||||
set(PARQUET_LIBS ${PARQUET_LIBRARY_DIRS})
|
||||
set(PARQUET_SEARCH_LIB_PATH ${PARQUET_LIBRARY_DIRS})
|
||||
message(STATUS "Searching for parquet libs in: ${PARQUET_SEARCH_LIB_PATH}")
|
||||
find_library(PARQUET_LIBRARIES NAMES parquet
|
||||
PATHS ${PARQUET_SEARCH_LIB_PATH} NO_DEFAULT_PATH)
|
||||
else()
|
||||
find_path(PARQUET_INCLUDE_DIR NAMES parquet/api/reader.h )
|
||||
find_library(PARQUET_LIBRARIES NAMES parquet)
|
||||
get_filename_component(PARQUET_LIBS ${PARQUET_LIBRARIES} PATH )
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if (PARQUET_INCLUDE_DIR AND PARQUET_LIBRARIES)
|
||||
set(PARQUET_FOUND TRUE)
|
||||
set(PARQUET_LIB_NAME parquet)
|
||||
if (MSVC)
|
||||
set(PARQUET_STATIC_LIB "${PARQUET_LIBS}/${PARQUET_LIB_NAME}${PARQUET_MSVC_STATIC_LIB_SUFFIX}${CMAKE_STATIC_LIBRARY_SUFFIX}")
|
||||
set(PARQUET_SHARED_LIB "${PARQUET_SHARED_LIBS}/${PARQUET_LIB_NAME}${CMAKE_SHARED_LIBRARY_SUFFIX}")
|
||||
set(PARQUET_SHARED_IMP_LIB "${PARQUET_LIBS}/${PARQUET_LIB_NAME}.lib")
|
||||
else()
|
||||
set(PARQUET_STATIC_LIB ${PARQUET_LIBS}/${CMAKE_STATIC_LIBRARY_PREFIX}${PARQUET_LIB_NAME}.a)
|
||||
set(PARQUET_SHARED_LIB ${PARQUET_LIBS}/${CMAKE_SHARED_LIBRARY_PREFIX}${PARQUET_LIB_NAME}${CMAKE_SHARED_LIBRARY_SUFFIX})
|
||||
endif()
|
||||
else ()
|
||||
set(PARQUET_FOUND FALSE)
|
||||
endif ()
|
||||
|
||||
if (PARQUET_FOUND)
|
||||
if (NOT Parquet_FIND_QUIETLY)
|
||||
message(STATUS "Found the Parquet library: ${PARQUET_LIBRARIES}")
|
||||
endif ()
|
||||
else ()
|
||||
if (NOT Parquet_FIND_QUIETLY)
|
||||
if (NOT PARQUET_FOUND)
|
||||
set(PARQUET_ERR_MSG "${PARQUET_ERR_MSG} Could not find the parquet library.")
|
||||
endif()
|
||||
|
||||
set(PARQUET_ERR_MSG "${PARQUET_ERR_MSG} Looked in ")
|
||||
if ( _parquet_roots )
|
||||
set(PARQUET_ERR_MSG "${PARQUET_ERR_MSG} in ${_parquet_roots}.")
|
||||
else ()
|
||||
set(PARQUET_ERR_MSG "${PARQUET_ERR_MSG} system search paths.")
|
||||
endif ()
|
||||
if (Parquet_FIND_REQUIRED)
|
||||
message(FATAL_ERROR "${PARQUET_ERR_MSG}")
|
||||
else (Parquet_FIND_REQUIRED)
|
||||
message(STATUS "${PARQUET_ERR_MSG}")
|
||||
endif (Parquet_FIND_REQUIRED)
|
||||
endif ()
|
||||
endif ()
|
||||
|
||||
mark_as_advanced(
|
||||
PARQUET_FOUND
|
||||
PARQUET_INCLUDE_DIR
|
||||
PARQUET_LIBS
|
||||
PARQUET_LIBRARIES
|
||||
PARQUET_STATIC_LIB
|
||||
PARQUET_SHARED_LIB
|
||||
)
|
@ -9,7 +9,7 @@ endif ()
|
||||
if (NOT USE_INTERNAL_BOOST_LIBRARY)
|
||||
set (Boost_USE_STATIC_LIBS ${USE_STATIC_LIBRARIES})
|
||||
set (BOOST_ROOT "/usr/local")
|
||||
find_package (Boost 1.60 COMPONENTS program_options system filesystem thread)
|
||||
find_package (Boost 1.60 COMPONENTS program_options system filesystem thread regex)
|
||||
# incomplete, no include search, who use it?
|
||||
if (NOT Boost_FOUND)
|
||||
# # Try to find manually.
|
||||
@ -29,9 +29,12 @@ if (NOT Boost_SYSTEM_LIBRARY)
|
||||
set (Boost_SYSTEM_LIBRARY boost_system_internal)
|
||||
set (Boost_PROGRAM_OPTIONS_LIBRARY boost_program_options_internal)
|
||||
set (Boost_FILESYSTEM_LIBRARY boost_filesystem_internal ${Boost_SYSTEM_LIBRARY})
|
||||
set (Boost_REGEX_LIBRARY boost_regex_internal)
|
||||
|
||||
set (Boost_INCLUDE_DIRS)
|
||||
|
||||
set (BOOST_ROOT "${ClickHouse_SOURCE_DIR}/contrib/boost")
|
||||
|
||||
# For boost from github:
|
||||
file (GLOB Boost_INCLUDE_DIRS_ "${ClickHouse_SOURCE_DIR}/contrib/boost/libs/*/include")
|
||||
list (APPEND Boost_INCLUDE_DIRS ${Boost_INCLUDE_DIRS_})
|
||||
@ -44,4 +47,4 @@ if (NOT Boost_SYSTEM_LIBRARY)
|
||||
|
||||
endif ()
|
||||
|
||||
message (STATUS "Using Boost: ${Boost_INCLUDE_DIRS} : ${Boost_PROGRAM_OPTIONS_LIBRARY},${Boost_SYSTEM_LIBRARY},${Boost_FILESYSTEM_LIBRARY}")
|
||||
message (STATUS "Using Boost: ${Boost_INCLUDE_DIRS} : ${Boost_PROGRAM_OPTIONS_LIBRARY},${Boost_SYSTEM_LIBRARY},${Boost_FILESYSTEM_LIBRARY},${Boost_REGEX_LIBRARY}")
|
||||
|
36
cmake/find_brotli.cmake
Normal file
36
cmake/find_brotli.cmake
Normal file
@ -0,0 +1,36 @@
|
||||
option (ENABLE_BROTLI "Enable brotli" ON)
|
||||
|
||||
if (ENABLE_BROTLI)
|
||||
|
||||
option (USE_INTERNAL_BROTLI_LIBRARY "Set to FALSE to use system libbrotli library instead of bundled" ${NOT_UNBUNDLED})
|
||||
|
||||
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/brotli/c/include/brotli/decode.h")
|
||||
if (USE_INTERNAL_BROTLI_LIBRARY)
|
||||
message (WARNING "submodule contrib/brotli is missing. to fix try run: \n git submodule update --init --recursive")
|
||||
set (USE_INTERNAL_BROTLI_LIBRARY 0)
|
||||
endif ()
|
||||
set (MISSING_INTERNAL_BROTLI_LIBRARY 1)
|
||||
endif ()
|
||||
|
||||
if(NOT USE_INTERNAL_BROTLI_LIBRARY)
|
||||
find_library(BROTLI_LIBRARY_COMMON brotlicommon)
|
||||
find_library(BROTLI_LIBRARY_DEC brotlidec)
|
||||
find_library(BROTLI_LIBRARY_ENC brotlienc)
|
||||
find_path(BROTLI_INCLUDE_DIR NAMES brotli/decode.h brotli/encode.h brotli/port.h brotli/types.h PATHS ${BROTLI_INCLUDE_PATHS})
|
||||
if(BROTLI_LIBRARY_DEC AND BROTLI_LIBRARY_ENC AND BROTLI_LIBRARY_COMMON)
|
||||
set(BROTLI_LIBRARY ${BROTLI_LIBRARY_DEC} ${BROTLI_LIBRARY_ENC} ${BROTLI_LIBRARY_COMMON})
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if (BROTLI_LIBRARY AND BROTLI_INCLUDE_DIR)
|
||||
set (USE_BROTLI 1)
|
||||
elseif (NOT MISSING_INTERNAL_BROTLI_LIBRARY)
|
||||
set (BROTLI_INCLUDE_DIR ${ClickHouse_SOURCE_DIR}/contrib/brotli/c/include)
|
||||
set (USE_INTERNAL_BROTLI_LIBRARY 1)
|
||||
set (BROTLI_LIBRARY brotli)
|
||||
set (USE_BROTLI 1)
|
||||
endif ()
|
||||
|
||||
endif()
|
||||
|
||||
message (STATUS "Using brotli=${USE_BROTLI}: ${BROTLI_INCLUDE_DIR} : ${BROTLI_LIBRARY}")
|
@ -19,11 +19,13 @@ endif()
|
||||
if (NOT GTEST_SRC_DIR AND NOT GTEST_INCLUDE_DIRS AND NOT MISSING_INTERNAL_GTEST_LIBRARY)
|
||||
set (USE_INTERNAL_GTEST_LIBRARY 1)
|
||||
set (GTEST_MAIN_LIBRARIES gtest_main)
|
||||
set (GTEST_LIBRARIES gtest)
|
||||
set (GTEST_BOTH_LIBRARIES ${GTEST_MAIN_LIBRARIES} ${GTEST_LIBRARIES})
|
||||
set (GTEST_INCLUDE_DIRS ${ClickHouse_SOURCE_DIR}/contrib/googletest/googletest)
|
||||
endif ()
|
||||
|
||||
if((GTEST_INCLUDE_DIRS AND GTEST_MAIN_LIBRARIES) OR GTEST_SRC_DIR)
|
||||
if((GTEST_INCLUDE_DIRS AND GTEST_BOTH_LIBRARIES) OR GTEST_SRC_DIR)
|
||||
set(USE_GTEST 1)
|
||||
endif()
|
||||
|
||||
message (STATUS "Using gtest=${USE_GTEST}: ${GTEST_INCLUDE_DIRS} : ${GTEST_MAIN_LIBRARIES} : ${GTEST_SRC_DIR}")
|
||||
message (STATUS "Using gtest=${USE_GTEST}: ${GTEST_INCLUDE_DIRS} : ${GTEST_BOTH_LIBRARIES} : ${GTEST_SRC_DIR}")
|
||||
|
@ -1,4 +1,4 @@
|
||||
if (NOT ARCH_ARM AND NOT OS_FREEBSD AND NOT APPLE)
|
||||
if (NOT ARCH_ARM AND NOT OS_FREEBSD AND NOT APPLE AND USE_PROTOBUF)
|
||||
option (ENABLE_HDFS "Enable HDFS" ${NOT_UNBUNDLED})
|
||||
endif ()
|
||||
|
||||
|
@ -9,8 +9,9 @@ if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/libxml2/libxml.h")
|
||||
endif ()
|
||||
|
||||
if (NOT USE_INTERNAL_LIBXML2_LIBRARY)
|
||||
find_library (LIBXML2_LIBRARY libxml2)
|
||||
find_path (LIBXML2_INCLUDE_DIR NAMES libxml.h PATHS ${LIBXML2_INCLUDE_PATHS})
|
||||
find_package (LibXml2)
|
||||
#find_library (LIBXML2_LIBRARY libxml2)
|
||||
#find_path (LIBXML2_INCLUDE_DIR NAMES libxml.h PATHS ${LIBXML2_INCLUDE_PATHS})
|
||||
endif ()
|
||||
|
||||
if (LIBXML2_LIBRARY AND LIBXML2_INCLUDE_DIR)
|
||||
|
@ -22,6 +22,9 @@ if (ENABLE_EMBEDDED_COMPILER)
|
||||
if (NOT LLVM_FOUND)
|
||||
find_package (LLVM 6 CONFIG PATHS ${LLVM_PATHS})
|
||||
endif ()
|
||||
if (NOT LLVM_FOUND)
|
||||
find_package (LLVM 8 CONFIG PATHS ${LLVM_PATHS})
|
||||
endif ()
|
||||
if (NOT LLVM_FOUND)
|
||||
find_package (LLVM 5 CONFIG PATHS ${LLVM_PATHS})
|
||||
endif ()
|
||||
|
@ -1,8 +1,11 @@
|
||||
option (USE_INTERNAL_LZ4_LIBRARY "Set to FALSE to use system lz4 library instead of bundled" ${NOT_UNBUNDLED})
|
||||
|
||||
if (USE_INTERNAL_LZ4_LIBRARY AND NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/lz4/lib/lz4.h")
|
||||
message (WARNING "submodule contrib/lz4 is missing. to fix try run: \n git submodule update --init --recursive")
|
||||
set (USE_INTERNAL_LZ4_LIBRARY 0)
|
||||
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/lz4/lib/lz4.h")
|
||||
if (USE_INTERNAL_LZ4_LIBRARY)
|
||||
message (WARNING "submodule contrib/lz4 is missing. to fix try run: \n git submodule update --init --recursive")
|
||||
set (USE_INTERNAL_LZ4_LIBRARY 0)
|
||||
endif ()
|
||||
set (MISSING_INTERNAL_LZ4_LIBRARY 1)
|
||||
endif ()
|
||||
|
||||
if (NOT USE_INTERNAL_LZ4_LIBRARY)
|
||||
@ -11,7 +14,7 @@ if (NOT USE_INTERNAL_LZ4_LIBRARY)
|
||||
endif ()
|
||||
|
||||
if (LZ4_LIBRARY AND LZ4_INCLUDE_DIR)
|
||||
else ()
|
||||
elseif (NOT MISSING_INTERNAL_LZ4_LIBRARY)
|
||||
set (LZ4_INCLUDE_DIR ${ClickHouse_SOURCE_DIR}/contrib/lz4/lib)
|
||||
set (USE_INTERNAL_LZ4_LIBRARY 1)
|
||||
set (LZ4_LIBRARY lz4)
|
||||
|
74
cmake/find_parquet.cmake
Normal file
74
cmake/find_parquet.cmake
Normal file
@ -0,0 +1,74 @@
|
||||
option (ENABLE_PARQUET "Enable parquet" ON)
|
||||
|
||||
if (ENABLE_PARQUET)
|
||||
|
||||
if (NOT OS_FREEBSD) # Freebsd: ../contrib/arrow/cpp/src/arrow/util/bit-util.h:27:10: fatal error: endian.h: No such file or directory
|
||||
option(USE_INTERNAL_PARQUET_LIBRARY "Set to FALSE to use system parquet library instead of bundled" ${NOT_UNBUNDLED})
|
||||
endif()
|
||||
|
||||
if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/arrow/cpp/CMakeLists.txt")
|
||||
if(USE_INTERNAL_PARQUET_LIBRARY)
|
||||
message(WARNING "submodule contrib/arrow (required for Parquet) is missing. to fix try run: \n git submodule update --init --recursive")
|
||||
endif()
|
||||
set(USE_INTERNAL_PARQUET_LIBRARY 0)
|
||||
set(MISSING_INTERNAL_PARQUET_LIBRARY 1)
|
||||
endif()
|
||||
|
||||
if(NOT USE_INTERNAL_PARQUET_LIBRARY)
|
||||
find_package(Arrow)
|
||||
find_package(Parquet)
|
||||
endif()
|
||||
|
||||
if(ARROW_INCLUDE_DIR AND PARQUET_INCLUDE_DIR)
|
||||
elseif(NOT MISSING_INTERNAL_PARQUET_LIBRARY AND NOT OS_FREEBSD)
|
||||
include(cmake/find_snappy.cmake)
|
||||
set(CAN_USE_INTERNAL_PARQUET_LIBRARY 1)
|
||||
include(CheckCXXSourceCompiles)
|
||||
if(NOT USE_INTERNAL_DOUBLE_CONVERSION_LIBRARY)
|
||||
set(CMAKE_REQUIRED_LIBRARIES ${DOUBLE_CONVERSION_LIBRARIES})
|
||||
set(CMAKE_REQUIRED_INCLUDES ${DOUBLE_CONVERSION_INCLUDE_DIR})
|
||||
check_cxx_source_compiles("
|
||||
#include <double-conversion/double-conversion.h>
|
||||
int main() { static const int flags_ = double_conversion::StringToDoubleConverter::ALLOW_CASE_INSENSIBILITY; return 0;}
|
||||
" HAVE_DOUBLE_CONVERSION_ALLOW_CASE_INSENSIBILITY)
|
||||
|
||||
if(NOT HAVE_DOUBLE_CONVERSION_ALLOW_CASE_INSENSIBILITY) # HAVE_STD_RANDOM_SHUFFLE
|
||||
message(STATUS "Disabling internal parquet library because arrow is broken (can't use old double_conversion)")
|
||||
set(CAN_USE_INTERNAL_PARQUET_LIBRARY 0)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if(NOT CAN_USE_INTERNAL_PARQUET_LIBRARY)
|
||||
set(USE_INTERNAL_PARQUET_LIBRARY 0)
|
||||
else()
|
||||
set(USE_INTERNAL_PARQUET_LIBRARY 1)
|
||||
|
||||
if(USE_INTERNAL_PARQUET_LIBRARY_NATIVE_CMAKE)
|
||||
set(ARROW_INCLUDE_DIR "${ClickHouse_SOURCE_DIR}/contrib/arrow/cpp/src")
|
||||
set(PARQUET_INCLUDE_DIR "${ClickHouse_SOURCE_DIR}/contrib/arrow/cpp/src" ${ClickHouse_BINARY_DIR}/contrib/arrow/cpp/src)
|
||||
endif()
|
||||
|
||||
if(${USE_STATIC_LIBRARIES})
|
||||
set(ARROW_LIBRARY arrow_static)
|
||||
set(PARQUET_LIBRARY parquet_static)
|
||||
set(THRIFT_LIBRARY thrift_static)
|
||||
else()
|
||||
set(ARROW_LIBRARY arrow_shared)
|
||||
set(PARQUET_LIBRARY parquet_shared)
|
||||
if(USE_INTERNAL_PARQUET_LIBRARY_NATIVE_CMAKE)
|
||||
list(APPEND PARQUET_LIBRARY ${Boost_REGEX_LIBRARY})
|
||||
endif()
|
||||
set(THRIFT_LIBRARY thrift)
|
||||
endif()
|
||||
|
||||
set(USE_PARQUET 1)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
endif()
|
||||
|
||||
if(USE_PARQUET)
|
||||
message(STATUS "Using Parquet: ${ARROW_LIBRARY}:${ARROW_INCLUDE_DIR} ; ${PARQUET_LIBRARY}:${PARQUET_INCLUDE_DIR} ; ${THRIFT_LIBRARY}")
|
||||
else()
|
||||
message(STATUS "Building without Parquet support")
|
||||
endif()
|
@ -1,10 +1,8 @@
|
||||
option(USE_INTERNAL_PROTOBUF_LIBRARY "Set to FALSE to use system protobuf instead of bundled" ${NOT_UNBUNDLED})
|
||||
option (ENABLE_PROTOBUF "Enable protobuf" ON)
|
||||
|
||||
if(OS_FREEBSD AND SANITIZE STREQUAL "address")
|
||||
# ../contrib/protobuf/src/google/protobuf/arena_impl.h:45:10: fatal error: 'sanitizer/asan_interface.h' file not found
|
||||
set(MISSING_INTERNAL_PROTOBUF_LIBRARY 1)
|
||||
set(USE_INTERNAL_PROTOBUF_LIBRARY 0)
|
||||
endif()
|
||||
if (ENABLE_PROTOBUF)
|
||||
|
||||
option(USE_INTERNAL_PROTOBUF_LIBRARY "Set to FALSE to use system protobuf instead of bundled" ${NOT_UNBUNDLED})
|
||||
|
||||
if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/protobuf/cmake/CMakeLists.txt")
|
||||
if(USE_INTERNAL_PROTOBUF_LIBRARY)
|
||||
@ -94,4 +92,16 @@ elseif(NOT MISSING_INTERNAL_PROTOBUF_LIBRARY)
|
||||
endfunction()
|
||||
endif()
|
||||
|
||||
if(OS_FREEBSD AND SANITIZE STREQUAL "address")
|
||||
# ../contrib/protobuf/src/google/protobuf/arena_impl.h:45:10: fatal error: 'sanitizer/asan_interface.h' file not found
|
||||
# #include <sanitizer/asan_interface.h>
|
||||
if(LLVM_INCLUDE_DIRS)
|
||||
set(Protobuf_INCLUDE_DIR ${Protobuf_INCLUDE_DIR} ${LLVM_INCLUDE_DIRS})
|
||||
else()
|
||||
set(USE_PROTOBUF 0)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
endif()
|
||||
|
||||
message(STATUS "Using protobuf=${USE_PROTOBUF}: ${Protobuf_INCLUDE_DIR} : ${Protobuf_LIBRARY}")
|
||||
|
@ -1,4 +1,5 @@
|
||||
if (NOT ARCH_ARM AND NOT ARCH_32 AND NOT APPLE)
|
||||
# Freebsd: contrib/cppkafka/include/cppkafka/detail/endianness.h:53:23: error: 'betoh16' was not declared in this scope
|
||||
if (NOT ARCH_ARM AND NOT ARCH_32 AND NOT APPLE AND NOT OS_FREEBSD)
|
||||
option (ENABLE_RDKAFKA "Enable kafka" ON)
|
||||
endif ()
|
||||
|
||||
|
27
cmake/find_snappy.cmake
Normal file
27
cmake/find_snappy.cmake
Normal file
@ -0,0 +1,27 @@
|
||||
option(USE_INTERNAL_SNAPPY_LIBRARY "Set to FALSE to use system snappy library instead of bundled" ${NOT_UNBUNDLED})
|
||||
|
||||
if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/snappy/snappy.h")
|
||||
if(USE_INTERNAL_SNAPPY_LIBRARY)
|
||||
message(WARNING "submodule contrib/snappy is missing. to fix try run: \n git submodule update --init --recursive")
|
||||
set(USE_INTERNAL_SNAPPY_LIBRARY 0)
|
||||
endif()
|
||||
set(MISSING_INTERNAL_SNAPPY_LIBRARY 1)
|
||||
endif()
|
||||
|
||||
if(NOT USE_INTERNAL_SNAPPY_LIBRARY)
|
||||
find_library(SNAPPY_LIBRARY snappy)
|
||||
find_path(SNAPPY_INCLUDE_DIR NAMES snappy.h PATHS ${SNAPPY_INCLUDE_PATHS})
|
||||
endif()
|
||||
|
||||
if(SNAPPY_LIBRARY AND SNAPPY_INCLUDE_DIR)
|
||||
elseif(NOT MISSING_INTERNAL_SNAPPY_LIBRARY)
|
||||
set(SNAPPY_INCLUDE_DIR ${ClickHouse_SOURCE_DIR}/contrib/snappy)
|
||||
set(USE_INTERNAL_SNAPPY_LIBRARY 1)
|
||||
set(SNAPPY_LIBRARY snappy)
|
||||
endif()
|
||||
|
||||
if(SNAPPY_LIBRARY AND SNAPPY_INCLUDE_DIR)
|
||||
set(USE_SNAPPY 1)
|
||||
endif()
|
||||
|
||||
message(STATUS "Using snappy=${USE_SNAPPY}: ${SNAPPY_INCLUDE_DIR} : ${SNAPPY_LIBRARY}")
|
@ -1,10 +1,22 @@
|
||||
if (LZ4_INCLUDE_DIR)
|
||||
if (NOT EXISTS "${LZ4_INCLUDE_DIR}/xxhash.h")
|
||||
message (WARNING "LZ4 library does not have XXHash. Support for XXHash will be disabled.")
|
||||
set (USE_XXHASH 0)
|
||||
else ()
|
||||
set (USE_XXHASH 1)
|
||||
endif ()
|
||||
option (USE_INTERNAL_XXHASH_LIBRARY "Set to FALSE to use system xxHash library instead of bundled" ${NOT_UNBUNDLED})
|
||||
|
||||
if (USE_INTERNAL_XXHASH_LIBRARY AND NOT USE_INTERNAL_LZ4_LIBRARY)
|
||||
message (WARNING "can not use internal xxhash without internal lz4")
|
||||
set (USE_INTERNAL_XXHASH_LIBRARY 0)
|
||||
endif ()
|
||||
|
||||
message (STATUS "Using xxhash=${USE_XXHASH}")
|
||||
if (USE_INTERNAL_XXHASH_LIBRARY)
|
||||
set (XXHASH_LIBRARY lz4)
|
||||
set (XXHASH_INCLUDE_DIR ${ClickHouse_SOURCE_DIR}/contrib/lz4/lib)
|
||||
else ()
|
||||
find_library (XXHASH_LIBRARY xxhash)
|
||||
find_path (XXHASH_INCLUDE_DIR NAMES xxhash.h PATHS ${XXHASH_INCLUDE_PATHS})
|
||||
endif ()
|
||||
|
||||
if (XXHASH_LIBRARY AND XXHASH_INCLUDE_DIR)
|
||||
set (USE_XXHASH 1)
|
||||
else ()
|
||||
set (USE_XXHASH 0)
|
||||
endif ()
|
||||
|
||||
message (STATUS "Using xxhash=${USE_XXHASH}: ${XXHASH_INCLUDE_DIR} : ${XXHASH_LIBRARY}")
|
||||
|
@ -13,6 +13,9 @@ list(APPEND dirs ${dirs1})
|
||||
get_property (dirs1 TARGET cityhash PROPERTY INCLUDE_DIRECTORIES)
|
||||
list(APPEND dirs ${dirs1})
|
||||
|
||||
get_property (dirs1 TARGET roaring PROPERTY INCLUDE_DIRECTORIES)
|
||||
list(APPEND dirs ${dirs1})
|
||||
|
||||
if (USE_INTERNAL_BOOST_LIBRARY)
|
||||
get_property (dirs1 TARGET ${Boost_PROGRAM_OPTIONS_LIBRARY} PROPERTY INCLUDE_DIRECTORIES)
|
||||
list(APPEND dirs ${dirs1})
|
||||
|
@ -11,38 +11,13 @@ if (OS_LINUX AND COMPILER_CLANG)
|
||||
set (CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS}")
|
||||
|
||||
option (USE_LIBCXX "Use libc++ and libc++abi instead of libstdc++ (only make sense on Linux with Clang)" ${HAVE_LIBCXX})
|
||||
set (LIBCXX_PATH "" CACHE STRING "Use custom path for libc++. It should be used for MSan.")
|
||||
|
||||
if (USE_LIBCXX)
|
||||
set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -stdlib=libc++") # Ok for clang6, for older can cause 'not used option' warning
|
||||
set (CMAKE_CXX_FLAGS_DEBUG "${CMAKE_CXX_FLAGS_DEBUG} -D_LIBCPP_DEBUG=0") # More checks in debug build.
|
||||
if (MAKE_STATIC_LIBRARIES)
|
||||
execute_process (COMMAND ${CMAKE_CXX_COMPILER} --print-file-name=libclang_rt.builtins-${CMAKE_SYSTEM_PROCESSOR}.a OUTPUT_VARIABLE BUILTINS_LIB_PATH OUTPUT_STRIP_TRAILING_WHITESPACE)
|
||||
link_libraries (-nodefaultlibs -Wl,-Bstatic -stdlib=libc++ c++ c++abi gcc_eh ${BUILTINS_LIB_PATH} rt -Wl,-Bdynamic dl pthread m c)
|
||||
else ()
|
||||
link_libraries (-stdlib=libc++ c++ c++abi)
|
||||
endif ()
|
||||
|
||||
if (LIBCXX_PATH)
|
||||
# include_directories (SYSTEM BEFORE "${LIBCXX_PATH}/include" "${LIBCXX_PATH}/include/c++/v1")
|
||||
link_directories ("${LIBCXX_PATH}/lib")
|
||||
endif ()
|
||||
endif ()
|
||||
endif ()
|
||||
|
||||
if (USE_LIBCXX)
|
||||
set (STATIC_STDLIB_FLAGS "")
|
||||
else ()
|
||||
set (STATIC_STDLIB_FLAGS "-static-libgcc -static-libstdc++")
|
||||
endif ()
|
||||
|
||||
if (MAKE_STATIC_LIBRARIES AND NOT APPLE AND NOT (COMPILER_CLANG AND OS_FREEBSD))
|
||||
set (CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} ${STATIC_STDLIB_FLAGS}")
|
||||
|
||||
# Along with executables, we also build example of shared library for "library dictionary source"; and it also should be self-contained.
|
||||
set (CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} ${STATIC_STDLIB_FLAGS}")
|
||||
endif ()
|
||||
|
||||
if (USE_STATIC_LIBRARIES AND HAVE_NO_PIE)
|
||||
set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${FLAG_NO_PIE}")
|
||||
set (CMAKE_C_FLAGS "${CMAKE_C_FLAGS} ${FLAG_NO_PIE}")
|
||||
|
73
contrib/CMakeLists.txt
vendored
73
contrib/CMakeLists.txt
vendored
@ -46,6 +46,7 @@ if (USE_INTERNAL_METROHASH_LIBRARY)
|
||||
endif ()
|
||||
|
||||
add_subdirectory (murmurhash)
|
||||
add_subdirectory (croaring)
|
||||
|
||||
if (USE_INTERNAL_BTRIE_LIBRARY)
|
||||
add_subdirectory (libbtrie)
|
||||
@ -150,6 +151,66 @@ if (USE_INTERNAL_CAPNP_LIBRARY)
|
||||
target_include_directories(${CAPNP_LIBRARY} PUBLIC $<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}/capnproto/c++/src>)
|
||||
endif ()
|
||||
|
||||
if (USE_INTERNAL_PARQUET_LIBRARY)
|
||||
if (USE_INTERNAL_PARQUET_LIBRARY_NATIVE_CMAKE)
|
||||
# We dont use arrow's cmakefiles because they uses too many depends and download some libs in compile time
|
||||
# But this mode can be used for updating auto-generated parquet files:
|
||||
# cmake -DUSE_INTERNAL_PARQUET_LIBRARY_NATIVE_CMAKE=1 -DUSE_STATIC_LIBRARIES=0
|
||||
# copy {BUILD_DIR}/contrib/arrow/cpp/src/parquet/*.cpp,*.h -> /contrib/arrow-cmake/cpp/src/parquet/
|
||||
|
||||
# Also useful parquet reader:
|
||||
# cd contrib/arrow/cpp/build && mkdir -p build && cmake .. -DPARQUET_BUILD_EXECUTABLES=1 && make -j8
|
||||
# contrib/arrow/cpp/build/debug/parquet-reader some_file.parquet
|
||||
|
||||
set (ARROW_COMPUTE ON CACHE INTERNAL "")
|
||||
set (ARROW_PARQUET ON CACHE INTERNAL "")
|
||||
set (ARROW_VERBOSE_THIRDPARTY_BUILD ON CACHE INTERNAL "")
|
||||
set (ARROW_BUILD_SHARED 1 CACHE INTERNAL "")
|
||||
set (ARROW_BOOST_HEADER_ONLY ON CACHE INTERNAL "")
|
||||
#set (BOOST_INCLUDEDIR Boost_INCLUDE_DIRS)
|
||||
set (Boost_FOUND 1 CACHE INTERNAL "")
|
||||
#set (ZLIB_HOME ${ZLIB_INCLUDE_DIR})
|
||||
#set (ZLIB_FOUND 1)
|
||||
if (MAKE_STATIC_LIBRARIES)
|
||||
set (PARQUET_ARROW_LINKAGE "static" CACHE INTERNAL "")
|
||||
set (ARROW_TEST_LINKAGE "static" CACHE INTERNAL "")
|
||||
set (ARROW_BUILD_STATIC ${MAKE_STATIC_LIBRARIES} CACHE INTERNAL "")
|
||||
else()
|
||||
set (PARQUET_ARROW_LINKAGE "shared" CACHE INTERNAL "")
|
||||
set (ARROW_TEST_LINKAGE "shared" CACHE INTERNAL "")
|
||||
endif()
|
||||
|
||||
if(CMAKE_BUILD_TYPE STREQUAL "RELWITHDEBINFO")
|
||||
set(_save_build_type ${CMAKE_BUILD_TYPE})
|
||||
set(CMAKE_BUILD_TYPE RELEASE)
|
||||
endif()
|
||||
|
||||
# Because Arrow uses CMAKE_SOURCE_DIR as a project path
|
||||
# Hopefully will be fixed in https://github.com/apache/arrow/pull/2676
|
||||
set (CMAKE_MODULE_PATH ${CMAKE_MODULE_PATH} "${ClickHouse_SOURCE_DIR}/contrib/arrow/cpp/cmake_modules")
|
||||
add_subdirectory (arrow/cpp)
|
||||
|
||||
if(_save_build_type)
|
||||
set(CMAKE_BUILD_TYPE ${_save_build_type})
|
||||
endif()
|
||||
|
||||
else()
|
||||
|
||||
if(USE_INTERNAL_SNAPPY_LIBRARY)
|
||||
set(SNAPPY_BUILD_TESTS 0 CACHE INTERNAL "")
|
||||
if (NOT MAKE_STATIC_LIBRARIES)
|
||||
set(BUILD_SHARED_LIBS 1) # TODO: set at root dir
|
||||
endif()
|
||||
add_subdirectory(snappy)
|
||||
if(SANITIZE STREQUAL "undefined")
|
||||
target_compile_options(${SNAPPY_LIBRARY} PRIVATE -fno-sanitize=undefined)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
add_subdirectory(arrow-cmake)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if (USE_INTERNAL_POCO_LIBRARY)
|
||||
set (POCO_VERBOSE_MESSAGES 0 CACHE INTERNAL "")
|
||||
set (save_CMAKE_CXX_FLAGS ${CMAKE_CXX_FLAGS})
|
||||
@ -217,10 +278,18 @@ if (USE_INTERNAL_LIBXML2_LIBRARY)
|
||||
add_subdirectory(libxml2-cmake)
|
||||
endif ()
|
||||
|
||||
if (USE_INTERNAL_BROTLI_LIBRARY)
|
||||
add_subdirectory(brotli-cmake)
|
||||
endif ()
|
||||
|
||||
if (USE_INTERNAL_PROTOBUF_LIBRARY)
|
||||
set(protobuf_BUILD_TESTS OFF CACHE INTERNAL "" FORCE)
|
||||
set(protobuf_BUILD_SHARED_LIBS OFF CACHE INTERNAL "" FORCE)
|
||||
if (MAKE_STATIC_LIBRARIES)
|
||||
set(protobuf_BUILD_SHARED_LIBS OFF CACHE INTERNAL "" FORCE)
|
||||
else ()
|
||||
set(protobuf_BUILD_SHARED_LIBS ON CACHE INTERNAL "" FORCE)
|
||||
endif ()
|
||||
set(protobuf_WITH_ZLIB 0 CACHE INTERNAL "" FORCE) # actually will use zlib, but skip find
|
||||
set(protobuf_BUILD_TESTS OFF CACHE INTERNAL "" FORCE)
|
||||
add_subdirectory(protobuf/cmake)
|
||||
endif ()
|
||||
|
||||
|
1
contrib/arrow
vendored
Submodule
1
contrib/arrow
vendored
Submodule
@ -0,0 +1 @@
|
||||
Subproject commit 87ac6fddaf21d0b4ee8b8090533ff293db0da1b4
|
216
contrib/arrow-cmake/CMakeLists.txt
Normal file
216
contrib/arrow-cmake/CMakeLists.txt
Normal file
@ -0,0 +1,216 @@
|
||||
# === thrift
|
||||
|
||||
set(LIBRARY_DIR ${ClickHouse_SOURCE_DIR}/contrib/thrift/lib/cpp)
|
||||
# contrib/thrift/lib/cpp/CMakeLists.txt
|
||||
set(thriftcpp_SOURCES
|
||||
${LIBRARY_DIR}/src/thrift/TApplicationException.cpp
|
||||
${LIBRARY_DIR}/src/thrift/TOutput.cpp
|
||||
${LIBRARY_DIR}/src/thrift/async/TAsyncChannel.cpp
|
||||
${LIBRARY_DIR}/src/thrift/async/TAsyncProtocolProcessor.cpp
|
||||
${LIBRARY_DIR}/src/thrift/async/TConcurrentClientSyncInfo.h
|
||||
${LIBRARY_DIR}/src/thrift/async/TConcurrentClientSyncInfo.cpp
|
||||
${LIBRARY_DIR}/src/thrift/concurrency/ThreadManager.cpp
|
||||
${LIBRARY_DIR}/src/thrift/concurrency/TimerManager.cpp
|
||||
${LIBRARY_DIR}/src/thrift/concurrency/Util.cpp
|
||||
${LIBRARY_DIR}/src/thrift/processor/PeekProcessor.cpp
|
||||
${LIBRARY_DIR}/src/thrift/protocol/TBase64Utils.cpp
|
||||
${LIBRARY_DIR}/src/thrift/protocol/TDebugProtocol.cpp
|
||||
${LIBRARY_DIR}/src/thrift/protocol/TJSONProtocol.cpp
|
||||
${LIBRARY_DIR}/src/thrift/protocol/TMultiplexedProtocol.cpp
|
||||
${LIBRARY_DIR}/src/thrift/protocol/TProtocol.cpp
|
||||
${LIBRARY_DIR}/src/thrift/transport/TTransportException.cpp
|
||||
${LIBRARY_DIR}/src/thrift/transport/TFDTransport.cpp
|
||||
${LIBRARY_DIR}/src/thrift/transport/TSimpleFileTransport.cpp
|
||||
${LIBRARY_DIR}/src/thrift/transport/THttpTransport.cpp
|
||||
${LIBRARY_DIR}/src/thrift/transport/THttpClient.cpp
|
||||
${LIBRARY_DIR}/src/thrift/transport/THttpServer.cpp
|
||||
${LIBRARY_DIR}/src/thrift/transport/TSocket.cpp
|
||||
${LIBRARY_DIR}/src/thrift/transport/TSocketPool.cpp
|
||||
${LIBRARY_DIR}/src/thrift/transport/TServerSocket.cpp
|
||||
${LIBRARY_DIR}/src/thrift/transport/TTransportUtils.cpp
|
||||
${LIBRARY_DIR}/src/thrift/transport/TBufferTransports.cpp
|
||||
${LIBRARY_DIR}/src/thrift/server/TConnectedClient.cpp
|
||||
${LIBRARY_DIR}/src/thrift/server/TServerFramework.cpp
|
||||
${LIBRARY_DIR}/src/thrift/server/TSimpleServer.cpp
|
||||
${LIBRARY_DIR}/src/thrift/server/TThreadPoolServer.cpp
|
||||
${LIBRARY_DIR}/src/thrift/server/TThreadedServer.cpp
|
||||
)
|
||||
set( thriftcpp_threads_SOURCES
|
||||
${LIBRARY_DIR}/src/thrift/concurrency/ThreadFactory.cpp
|
||||
${LIBRARY_DIR}/src/thrift/concurrency/Thread.cpp
|
||||
${LIBRARY_DIR}/src/thrift/concurrency/Monitor.cpp
|
||||
${LIBRARY_DIR}/src/thrift/concurrency/Mutex.cpp
|
||||
)
|
||||
add_library(${THRIFT_LIBRARY} ${LINK_MODE} ${thriftcpp_SOURCES} ${thriftcpp_threads_SOURCES})
|
||||
set_target_properties(${THRIFT_LIBRARY} PROPERTIES CXX_STANDARD 14) # REMOVE after https://github.com/apache/thrift/pull/1641
|
||||
target_include_directories(${THRIFT_LIBRARY} SYSTEM PUBLIC ${ClickHouse_SOURCE_DIR}/contrib/thrift/lib/cpp/src PRIVATE ${Boost_INCLUDE_DIRS})
|
||||
|
||||
|
||||
|
||||
# === arrow
|
||||
|
||||
set(LIBRARY_DIR ${ClickHouse_SOURCE_DIR}/contrib/arrow/cpp/src/arrow)
|
||||
# arrow/cpp/src/arrow/CMakeLists.txt
|
||||
set(ARROW_SRCS
|
||||
${LIBRARY_DIR}/array.cc
|
||||
|
||||
${LIBRARY_DIR}/builder.cc
|
||||
${LIBRARY_DIR}/array/builder_adaptive.cc
|
||||
${LIBRARY_DIR}/array/builder_base.cc
|
||||
${LIBRARY_DIR}/array/builder_binary.cc
|
||||
${LIBRARY_DIR}/array/builder_decimal.cc
|
||||
${LIBRARY_DIR}/array/builder_dict.cc
|
||||
${LIBRARY_DIR}/array/builder_nested.cc
|
||||
${LIBRARY_DIR}/array/builder_primitive.cc
|
||||
|
||||
${LIBRARY_DIR}/buffer.cc
|
||||
${LIBRARY_DIR}/compare.cc
|
||||
${LIBRARY_DIR}/memory_pool.cc
|
||||
${LIBRARY_DIR}/pretty_print.cc
|
||||
${LIBRARY_DIR}/record_batch.cc
|
||||
${LIBRARY_DIR}/status.cc
|
||||
${LIBRARY_DIR}/table.cc
|
||||
${LIBRARY_DIR}/table_builder.cc
|
||||
${LIBRARY_DIR}/tensor.cc
|
||||
${LIBRARY_DIR}/sparse_tensor.cc
|
||||
${LIBRARY_DIR}/type.cc
|
||||
${LIBRARY_DIR}/visitor.cc
|
||||
|
||||
${LIBRARY_DIR}/csv/converter.cc
|
||||
${LIBRARY_DIR}/csv/chunker.cc
|
||||
${LIBRARY_DIR}/csv/column-builder.cc
|
||||
${LIBRARY_DIR}/csv/options.cc
|
||||
${LIBRARY_DIR}/csv/parser.cc
|
||||
${LIBRARY_DIR}/csv/reader.cc
|
||||
|
||||
${LIBRARY_DIR}/io/buffered.cc
|
||||
${LIBRARY_DIR}/io/compressed.cc
|
||||
${LIBRARY_DIR}/io/file.cc
|
||||
${LIBRARY_DIR}/io/interfaces.cc
|
||||
${LIBRARY_DIR}/io/memory.cc
|
||||
${LIBRARY_DIR}/io/readahead.cc
|
||||
|
||||
${LIBRARY_DIR}/util/bit-util.cc
|
||||
${LIBRARY_DIR}/util/compression.cc
|
||||
${LIBRARY_DIR}/util/cpu-info.cc
|
||||
${LIBRARY_DIR}/util/decimal.cc
|
||||
${LIBRARY_DIR}/util/int-util.cc
|
||||
${LIBRARY_DIR}/util/io-util.cc
|
||||
${LIBRARY_DIR}/util/logging.cc
|
||||
${LIBRARY_DIR}/util/key_value_metadata.cc
|
||||
${LIBRARY_DIR}/util/task-group.cc
|
||||
${LIBRARY_DIR}/util/thread-pool.cc
|
||||
${LIBRARY_DIR}/util/trie.cc
|
||||
${LIBRARY_DIR}/util/utf8.cc
|
||||
)
|
||||
|
||||
set(ARROW_SRCS ${ARROW_SRCS}
|
||||
${LIBRARY_DIR}/compute/context.cc
|
||||
${LIBRARY_DIR}/compute/kernels/boolean.cc
|
||||
${LIBRARY_DIR}/compute/kernels/cast.cc
|
||||
${LIBRARY_DIR}/compute/kernels/hash.cc
|
||||
${LIBRARY_DIR}/compute/kernels/util-internal.cc
|
||||
)
|
||||
|
||||
if (LZ4_INCLUDE_DIR AND LZ4_LIBRARY)
|
||||
set(ARROW_WITH_LZ4 1)
|
||||
endif()
|
||||
|
||||
if(SNAPPY_INCLUDE_DIR AND SNAPPY_LIBRARY)
|
||||
set(ARROW_WITH_SNAPPY 1)
|
||||
endif()
|
||||
|
||||
if(ZLIB_INCLUDE_DIR AND ZLIB_LIBRARIES)
|
||||
set(ARROW_WITH_ZLIB 1)
|
||||
endif()
|
||||
|
||||
if (ZSTD_INCLUDE_DIR AND ZSTD_LIBRARY)
|
||||
set(ARROW_WITH_ZSTD 1)
|
||||
endif()
|
||||
|
||||
if (ARROW_WITH_LZ4)
|
||||
add_definitions(-DARROW_WITH_LZ4)
|
||||
SET(ARROW_SRCS ${LIBRARY_DIR}/util/compression_lz4.cc ${ARROW_SRCS})
|
||||
endif()
|
||||
|
||||
if (ARROW_WITH_SNAPPY)
|
||||
add_definitions(-DARROW_WITH_SNAPPY)
|
||||
SET(ARROW_SRCS ${LIBRARY_DIR}/util/compression_snappy.cc ${ARROW_SRCS})
|
||||
endif()
|
||||
|
||||
if (ARROW_WITH_ZLIB)
|
||||
add_definitions(-DARROW_WITH_ZLIB)
|
||||
SET(ARROW_SRCS ${LIBRARY_DIR}/util/compression_zlib.cc ${ARROW_SRCS})
|
||||
endif()
|
||||
|
||||
if (ARROW_WITH_ZSTD)
|
||||
add_definitions(-DARROW_WITH_ZSTD)
|
||||
SET(ARROW_SRCS ${LIBRARY_DIR}/util/compression_zstd.cc ${ARROW_SRCS})
|
||||
endif()
|
||||
|
||||
|
||||
add_library(${ARROW_LIBRARY} ${LINK_MODE} ${ARROW_SRCS})
|
||||
target_include_directories(${ARROW_LIBRARY} SYSTEM PUBLIC ${ClickHouse_SOURCE_DIR}/contrib/arrow/cpp/src PRIVATE ${CMAKE_CURRENT_SOURCE_DIR}/cpp/src ${Boost_INCLUDE_DIRS})
|
||||
target_link_libraries(${ARROW_LIBRARY} PRIVATE ${DOUBLE_CONVERSION_LIBRARIES} Threads::Threads)
|
||||
if (ARROW_WITH_LZ4)
|
||||
target_link_libraries(${ARROW_LIBRARY} PRIVATE ${LZ4_LIBRARY})
|
||||
endif()
|
||||
if (ARROW_WITH_SNAPPY)
|
||||
target_link_libraries(${ARROW_LIBRARY} PRIVATE ${SNAPPY_LIBRARY})
|
||||
endif()
|
||||
if (ARROW_WITH_ZLIB)
|
||||
target_link_libraries(${ARROW_LIBRARY} PRIVATE ${ZLIB_LIBRARIES})
|
||||
endif()
|
||||
if (ARROW_WITH_ZSTD)
|
||||
target_link_libraries(${ARROW_LIBRARY} PRIVATE ${ZSTD_LIBRARY})
|
||||
endif()
|
||||
|
||||
|
||||
# === parquet
|
||||
|
||||
set(LIBRARY_DIR ${ClickHouse_SOURCE_DIR}/contrib/arrow/cpp/src/parquet)
|
||||
# arrow/cpp/src/parquet/CMakeLists.txt
|
||||
set(PARQUET_SRCS
|
||||
${LIBRARY_DIR}/arrow/reader.cc
|
||||
${LIBRARY_DIR}/arrow/record_reader.cc
|
||||
${LIBRARY_DIR}/arrow/schema.cc
|
||||
${LIBRARY_DIR}/arrow/writer.cc
|
||||
${LIBRARY_DIR}/bloom_filter.cc
|
||||
${LIBRARY_DIR}/column_reader.cc
|
||||
${LIBRARY_DIR}/column_scanner.cc
|
||||
${LIBRARY_DIR}/column_writer.cc
|
||||
${LIBRARY_DIR}/file_reader.cc
|
||||
${LIBRARY_DIR}/file_writer.cc
|
||||
${LIBRARY_DIR}/metadata.cc
|
||||
${LIBRARY_DIR}/murmur3.cc
|
||||
${LIBRARY_DIR}/printer.cc
|
||||
${LIBRARY_DIR}/schema.cc
|
||||
${LIBRARY_DIR}/statistics.cc
|
||||
${LIBRARY_DIR}/types.cc
|
||||
${LIBRARY_DIR}/util/comparison.cc
|
||||
${LIBRARY_DIR}/util/memory.cc
|
||||
)
|
||||
#list(TRANSFORM PARQUET_SRCS PREPEND ${LIBRARY_DIR}/) # cmake 3.12
|
||||
list(APPEND PARQUET_SRCS
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/cpp/src/parquet/parquet_constants.cpp
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/cpp/src/parquet/parquet_types.cpp
|
||||
)
|
||||
add_library(${PARQUET_LIBRARY} ${LINK_MODE} ${PARQUET_SRCS})
|
||||
target_include_directories(${PARQUET_LIBRARY} SYSTEM PUBLIC ${ClickHouse_SOURCE_DIR}/contrib/arrow/cpp/src ${CMAKE_CURRENT_SOURCE_DIR}/cpp/src)
|
||||
include(${ClickHouse_SOURCE_DIR}/contrib/thrift/build/cmake/ConfigureChecks.cmake) # makes config.h
|
||||
target_link_libraries(${PARQUET_LIBRARY} PUBLIC ${ARROW_LIBRARY} PRIVATE ${THRIFT_LIBRARY} ${Boost_REGEX_LIBRARY})
|
||||
target_include_directories(${PARQUET_LIBRARY} PRIVATE ${Boost_INCLUDE_DIRS})
|
||||
|
||||
if(SANITIZE STREQUAL "undefined")
|
||||
target_compile_options(${PARQUET_LIBRARY} PRIVATE -fno-sanitize=undefined)
|
||||
target_compile_options(${ARROW_LIBRARY} PRIVATE -fno-sanitize=undefined)
|
||||
endif()
|
||||
|
||||
# === tools
|
||||
|
||||
set(TOOLS_DIR ${ClickHouse_SOURCE_DIR}/contrib/arrow/cpp/tools/parquet)
|
||||
set(PARQUET_TOOLS parquet-dump-schema parquet-reader parquet-scan)
|
||||
foreach(TOOL ${PARQUET_TOOLS})
|
||||
add_executable(${TOOL} ${TOOLS_DIR}/${TOOL}.cc)
|
||||
target_link_libraries(${TOOL} PRIVATE ${PARQUET_LIBRARY})
|
||||
endforeach()
|
1
contrib/arrow-cmake/build/cmake/config.h.in
Symbolic link
1
contrib/arrow-cmake/build/cmake/config.h.in
Symbolic link
@ -0,0 +1 @@
|
||||
../../../thrift/build/cmake/config.h.in
|
17
contrib/arrow-cmake/cpp/src/parquet/parquet_constants.cpp
Normal file
17
contrib/arrow-cmake/cpp/src/parquet/parquet_constants.cpp
Normal file
@ -0,0 +1,17 @@
|
||||
/**
|
||||
* Autogenerated by Thrift Compiler (0.11.0)
|
||||
*
|
||||
* DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
|
||||
* @generated
|
||||
*/
|
||||
#include "parquet_constants.h"
|
||||
|
||||
namespace parquet { namespace format {
|
||||
|
||||
const parquetConstants g_parquet_constants;
|
||||
|
||||
parquetConstants::parquetConstants() {
|
||||
}
|
||||
|
||||
}} // namespace
|
||||
|
24
contrib/arrow-cmake/cpp/src/parquet/parquet_constants.h
Normal file
24
contrib/arrow-cmake/cpp/src/parquet/parquet_constants.h
Normal file
@ -0,0 +1,24 @@
|
||||
/**
|
||||
* Autogenerated by Thrift Compiler (0.11.0)
|
||||
*
|
||||
* DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
|
||||
* @generated
|
||||
*/
|
||||
#ifndef parquet_CONSTANTS_H
|
||||
#define parquet_CONSTANTS_H
|
||||
|
||||
#include "parquet_types.h"
|
||||
|
||||
namespace parquet { namespace format {
|
||||
|
||||
class parquetConstants {
|
||||
public:
|
||||
parquetConstants();
|
||||
|
||||
};
|
||||
|
||||
extern const parquetConstants g_parquet_constants;
|
||||
|
||||
}} // namespace
|
||||
|
||||
#endif
|
6501
contrib/arrow-cmake/cpp/src/parquet/parquet_types.cpp
Normal file
6501
contrib/arrow-cmake/cpp/src/parquet/parquet_types.cpp
Normal file
File diff suppressed because it is too large
Load Diff
2523
contrib/arrow-cmake/cpp/src/parquet/parquet_types.h
Normal file
2523
contrib/arrow-cmake/cpp/src/parquet/parquet_types.h
Normal file
File diff suppressed because it is too large
Load Diff
24
contrib/arrow-cmake/cpp/src/parquet/parquet_version.h
Normal file
24
contrib/arrow-cmake/cpp/src/parquet/parquet_version.h
Normal file
@ -0,0 +1,24 @@
|
||||
// Licensed to the Apache Software Foundation (ASF) under one
|
||||
// or more contributor license agreements. See the NOTICE file
|
||||
// distributed with this work for additional information
|
||||
// regarding copyright ownership. The ASF licenses this file
|
||||
// to you under the Apache License, Version 2.0 (the
|
||||
// "License"); you may not use this file except in compliance
|
||||
// with the License. You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing,
|
||||
// software distributed under the License is distributed on an
|
||||
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
// KIND, either express or implied. See the License for the
|
||||
// specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
#ifndef PARQUET_VERSION_H
|
||||
#define PARQUET_VERSION_H
|
||||
|
||||
// define the parquet created by version
|
||||
#define CREATED_BY_VERSION "parquet-cpp version 1.5.1-SNAPSHOT"
|
||||
|
||||
#endif // PARQUET_VERSION_H
|
11
contrib/arrow-cmake/cpp/src/thrift/stdcxx.h
Normal file
11
contrib/arrow-cmake/cpp/src/thrift/stdcxx.h
Normal file
@ -0,0 +1,11 @@
|
||||
/*
|
||||
|
||||
Temporary hack caused by 17355425 - THRIFT-4735: Remove Qt4 build support
|
||||
|
||||
Fixes
|
||||
../contrib/arrow-cmake/cpp/src/parquet/parquet_types.h:18:10: fatal error: thrift/stdcxx.h: No such file or directory
|
||||
#include <thrift/stdcxx.h>
|
||||
|
||||
Delete me.
|
||||
|
||||
*/
|
2
contrib/boost
vendored
2
contrib/boost
vendored
@ -1 +1 @@
|
||||
Subproject commit 6883b40449f378019aec792f9983ce3afc7ff16e
|
||||
Subproject commit 6a96e8b59f76148eb8ad54a9d15259f8ce84c606
|
@ -10,49 +10,30 @@
|
||||
# Important boost patch: 094c18b
|
||||
#
|
||||
|
||||
set (LIBRARY_DIR ${ClickHouse_SOURCE_DIR}/contrib/boost)
|
||||
include(${ClickHouse_SOURCE_DIR}/cmake/dbms_glob_sources.cmake)
|
||||
|
||||
if (NOT MSVC)
|
||||
set(LIBRARY_DIR ${ClickHouse_SOURCE_DIR}/contrib/boost)
|
||||
|
||||
if(NOT MSVC)
|
||||
add_definitions(-Wno-unused-variable -Wno-deprecated-declarations)
|
||||
endif ()
|
||||
endif()
|
||||
|
||||
add_library(boost_program_options_internal ${LINK_MODE}
|
||||
${LIBRARY_DIR}/libs/program_options/src/cmdline.cpp
|
||||
${LIBRARY_DIR}/libs/program_options/src/config_file.cpp
|
||||
${LIBRARY_DIR}/libs/program_options/src/convert.cpp
|
||||
${LIBRARY_DIR}/libs/program_options/src/options_description.cpp
|
||||
${LIBRARY_DIR}/libs/program_options/src/parsers.cpp
|
||||
${LIBRARY_DIR}/libs/program_options/src/positional_options.cpp
|
||||
${LIBRARY_DIR}/libs/program_options/src/split.cpp
|
||||
${LIBRARY_DIR}/libs/program_options/src/utf8_codecvt_facet.cpp
|
||||
${LIBRARY_DIR}/libs/program_options/src/value_semantic.cpp
|
||||
${LIBRARY_DIR}/libs/program_options/src/variables_map.cpp
|
||||
${LIBRARY_DIR}/libs/program_options/src/winmain.cpp)
|
||||
macro(add_boost_lib lib_name)
|
||||
add_headers_and_sources(boost_${lib_name} ${LIBRARY_DIR}/libs/${lib_name}/src)
|
||||
add_library(boost_${lib_name}_internal ${LINK_MODE} ${boost_${lib_name}_sources})
|
||||
target_include_directories(boost_${lib_name}_internal SYSTEM BEFORE PUBLIC ${Boost_INCLUDE_DIRS})
|
||||
target_compile_definitions(boost_${lib_name}_internal PUBLIC BOOST_SYSTEM_NO_DEPRECATED)
|
||||
endmacro()
|
||||
|
||||
add_library(boost_filesystem_internal ${LINK_MODE}
|
||||
${LIBRARY_DIR}/libs/filesystem/src/codecvt_error_category.cpp
|
||||
${LIBRARY_DIR}/libs/filesystem/src/operations.cpp
|
||||
${LIBRARY_DIR}/libs/filesystem/src/path.cpp
|
||||
${LIBRARY_DIR}/libs/filesystem/src/path_traits.cpp
|
||||
${LIBRARY_DIR}/libs/filesystem/src/portability.cpp
|
||||
${LIBRARY_DIR}/libs/filesystem/src/unique_path.cpp
|
||||
${LIBRARY_DIR}/libs/filesystem/src/utf8_codecvt_facet.cpp
|
||||
${LIBRARY_DIR}/libs/filesystem/src/windows_file_codecvt.cpp)
|
||||
add_boost_lib(system)
|
||||
|
||||
add_library(boost_system_internal ${LINK_MODE}
|
||||
${LIBRARY_DIR}/libs/system/src/error_code.cpp)
|
||||
add_boost_lib(program_options)
|
||||
|
||||
add_library(boost_random_internal ${LINK_MODE}
|
||||
${LIBRARY_DIR}/libs/random/src/random_device.cpp)
|
||||
add_boost_lib(filesystem)
|
||||
target_link_libraries(boost_filesystem_internal PRIVATE boost_system_internal)
|
||||
|
||||
target_link_libraries (boost_filesystem_internal PUBLIC boost_system_internal)
|
||||
#add_boost_lib(random)
|
||||
|
||||
target_include_directories (boost_program_options_internal SYSTEM BEFORE PUBLIC ${Boost_INCLUDE_DIRS})
|
||||
target_include_directories (boost_filesystem_internal SYSTEM BEFORE PUBLIC ${Boost_INCLUDE_DIRS})
|
||||
target_include_directories (boost_system_internal SYSTEM BEFORE PUBLIC ${Boost_INCLUDE_DIRS})
|
||||
target_include_directories (boost_random_internal SYSTEM BEFORE PUBLIC ${Boost_INCLUDE_DIRS})
|
||||
|
||||
target_compile_definitions (boost_program_options_internal PUBLIC BOOST_SYSTEM_NO_DEPRECATED)
|
||||
target_compile_definitions (boost_filesystem_internal PUBLIC BOOST_SYSTEM_NO_DEPRECATED)
|
||||
target_compile_definitions (boost_system_internal PUBLIC BOOST_SYSTEM_NO_DEPRECATED)
|
||||
target_compile_definitions (boost_random_internal PUBLIC BOOST_SYSTEM_NO_DEPRECATED)
|
||||
if (USE_INTERNAL_PARQUET_LIBRARY)
|
||||
add_boost_lib(regex)
|
||||
endif()
|
||||
|
1
contrib/brotli
vendored
Submodule
1
contrib/brotli
vendored
Submodule
@ -0,0 +1 @@
|
||||
Subproject commit 5805f99a533a8f8118699c0100d8c102f3605f65
|
33
contrib/brotli-cmake/CMakeLists.txt
Normal file
33
contrib/brotli-cmake/CMakeLists.txt
Normal file
@ -0,0 +1,33 @@
|
||||
set(BROTLI_SOURCE_DIR ${CMAKE_SOURCE_DIR}/contrib/brotli/c)
|
||||
set(BROTLI_BINARY_DIR ${CMAKE_BINARY_DIR}/contrib/brotli/c)
|
||||
|
||||
set(SRCS
|
||||
${BROTLI_SOURCE_DIR}/dec/bit_reader.c
|
||||
${BROTLI_SOURCE_DIR}/dec/state.c
|
||||
${BROTLI_SOURCE_DIR}/dec/huffman.c
|
||||
${BROTLI_SOURCE_DIR}/dec/decode.c
|
||||
${BROTLI_SOURCE_DIR}/enc/encode.c
|
||||
${BROTLI_SOURCE_DIR}/enc/dictionary_hash.c
|
||||
${BROTLI_SOURCE_DIR}/enc/cluster.c
|
||||
${BROTLI_SOURCE_DIR}/enc/entropy_encode.c
|
||||
${BROTLI_SOURCE_DIR}/enc/literal_cost.c
|
||||
${BROTLI_SOURCE_DIR}/enc/compress_fragment_two_pass.c
|
||||
${BROTLI_SOURCE_DIR}/enc/static_dict.c
|
||||
${BROTLI_SOURCE_DIR}/enc/compress_fragment.c
|
||||
${BROTLI_SOURCE_DIR}/enc/block_splitter.c
|
||||
${BROTLI_SOURCE_DIR}/enc/backward_references_hq.c
|
||||
${BROTLI_SOURCE_DIR}/enc/histogram.c
|
||||
${BROTLI_SOURCE_DIR}/enc/brotli_bit_stream.c
|
||||
${BROTLI_SOURCE_DIR}/enc/utf8_util.c
|
||||
${BROTLI_SOURCE_DIR}/enc/encoder_dict.c
|
||||
${BROTLI_SOURCE_DIR}/enc/backward_references.c
|
||||
${BROTLI_SOURCE_DIR}/enc/bit_cost.c
|
||||
${BROTLI_SOURCE_DIR}/enc/metablock.c
|
||||
${BROTLI_SOURCE_DIR}/enc/memory.c
|
||||
${BROTLI_SOURCE_DIR}/common/dictionary.c
|
||||
${BROTLI_SOURCE_DIR}/common/transform.c
|
||||
)
|
||||
|
||||
add_library(brotli ${LINK_MODE} ${SRCS})
|
||||
|
||||
target_include_directories(brotli PUBLIC ${BROTLI_SOURCE_DIR}/include)
|
2
contrib/cppkafka
vendored
2
contrib/cppkafka
vendored
@ -1 +1 @@
|
||||
Subproject commit 860c90e92eee6690aa74a2ca7b7c5c6930dffecd
|
||||
Subproject commit 9b184d881c15cc50784b28688c7c99d3d764db24
|
6
contrib/croaring/CMakeLists.txt
Normal file
6
contrib/croaring/CMakeLists.txt
Normal file
@ -0,0 +1,6 @@
|
||||
add_library(roaring
|
||||
roaring.c
|
||||
roaring.h
|
||||
roaring.hh)
|
||||
|
||||
target_include_directories (roaring PUBLIC ${CMAKE_CURRENT_SOURCE_DIR})
|
202
contrib/croaring/LICENSE
Normal file
202
contrib/croaring/LICENSE
Normal file
@ -0,0 +1,202 @@
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "{}"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright 2016 The CRoaring authors
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
|
2
contrib/croaring/README.txt
Normal file
2
contrib/croaring/README.txt
Normal file
@ -0,0 +1,2 @@
|
||||
download from https://github.com/RoaringBitmap/CRoaring/archive/v0.2.57.tar.gz
|
||||
and use ./amalgamation.sh generate
|
11093
contrib/croaring/roaring.c
Normal file
11093
contrib/croaring/roaring.c
Normal file
File diff suppressed because it is too large
Load Diff
7187
contrib/croaring/roaring.h
Normal file
7187
contrib/croaring/roaring.h
Normal file
File diff suppressed because it is too large
Load Diff
1732
contrib/croaring/roaring.hh
Normal file
1732
contrib/croaring/roaring.hh
Normal file
File diff suppressed because it is too large
Load Diff
2
contrib/libhdfs3
vendored
2
contrib/libhdfs3
vendored
@ -1 +1 @@
|
||||
Subproject commit bd6505cbb0c130b0db695305b9a38546fa880e5a
|
||||
Subproject commit e2131aa752d7e95441e08f9a18304c1445f2576a
|
@ -36,7 +36,9 @@ set(PROTO_FILES
|
||||
${HDFS3_SOURCE_DIR}/proto/datatransfer.proto
|
||||
)
|
||||
|
||||
PROTOBUF_GENERATE_CPP(PROTO_SOURCES PROTO_HEADERS ${PROTO_FILES})
|
||||
if(USE_PROTOBUF)
|
||||
PROTOBUF_GENERATE_CPP(PROTO_SOURCES PROTO_HEADERS ${PROTO_FILES})
|
||||
endif()
|
||||
|
||||
configure_file(${HDFS3_SOURCE_DIR}/platform.h.in ${CMAKE_CURRENT_BINARY_DIR}/platform.h)
|
||||
|
||||
|
2
contrib/librdkafka
vendored
2
contrib/librdkafka
vendored
@ -1 +1 @@
|
||||
Subproject commit 363dcad5a23dc29381cc626620e68ae418b3af19
|
||||
Subproject commit 73295a702cd1c85c11749ade500d713db7099cca
|
@ -51,10 +51,6 @@ set(SRCS
|
||||
${RDKAFKA_SOURCE_DIR}/snappy.c
|
||||
${RDKAFKA_SOURCE_DIR}/tinycthread.c
|
||||
${RDKAFKA_SOURCE_DIR}/tinycthread_extra.c
|
||||
${RDKAFKA_SOURCE_DIR}/xxhash.c
|
||||
${RDKAFKA_SOURCE_DIR}/lz4.c
|
||||
${RDKAFKA_SOURCE_DIR}/lz4frame.c
|
||||
${RDKAFKA_SOURCE_DIR}/lz4hc.c
|
||||
${RDKAFKA_SOURCE_DIR}/rdgz.c
|
||||
)
|
||||
|
||||
@ -62,4 +58,4 @@ add_library(rdkafka ${LINK_MODE} ${SRCS})
|
||||
target_include_directories(rdkafka SYSTEM PUBLIC include)
|
||||
target_include_directories(rdkafka SYSTEM PUBLIC ${RDKAFKA_SOURCE_DIR}) # Because weird logic with "include_next" is used.
|
||||
target_include_directories(rdkafka SYSTEM PRIVATE ${ZSTD_INCLUDE_DIR}/common) # Because wrong path to "zstd_errors.h" is used.
|
||||
target_link_libraries(rdkafka PUBLIC ${ZLIB_LIBRARIES} ${ZSTD_LIBRARY} ${OPENSSL_SSL_LIBRARY} ${OPENSSL_CRYPTO_LIBRARY})
|
||||
target_link_libraries(rdkafka PUBLIC ${ZLIB_LIBRARIES} ${ZSTD_LIBRARY} ${LZ4_LIBRARY} ${OPENSSL_SSL_LIBRARY} ${OPENSSL_CRYPTO_LIBRARY})
|
||||
|
@ -2,6 +2,8 @@
|
||||
#ifndef _CONFIG_H_
|
||||
#define _CONFIG_H_
|
||||
#define ARCH "x86_64"
|
||||
#define BUILT_WITH "GCC GXX PKGCONFIG OSXLD LIBDL PLUGINS ZLIB SSL SASL_CYRUS ZSTD HDRHISTOGRAM LZ4_EXT SNAPPY SOCKEM SASL_SCRAM CRC32C_HW"
|
||||
|
||||
#define CPU "generic"
|
||||
#define WITHOUT_OPTIMIZATION 0
|
||||
#define ENABLE_DEVEL 0
|
||||
|
@ -3,6 +3,10 @@ SET(LIBRARY_DIR ${ClickHouse_SOURCE_DIR}/contrib/lz4/lib)
|
||||
add_library (lz4
|
||||
${LIBRARY_DIR}/lz4.c
|
||||
${LIBRARY_DIR}/lz4hc.c
|
||||
${LIBRARY_DIR}/lz4frame.c
|
||||
${LIBRARY_DIR}/lz4frame.h
|
||||
${LIBRARY_DIR}/xxhash.c
|
||||
${LIBRARY_DIR}/xxhash.h
|
||||
|
||||
${LIBRARY_DIR}/lz4.h
|
||||
${LIBRARY_DIR}/lz4hc.h
|
||||
|
1
contrib/snappy
vendored
Submodule
1
contrib/snappy
vendored
Submodule
@ -0,0 +1 @@
|
||||
Subproject commit 3f194acb57e0487531c96b97af61dcbd025a78a3
|
1
contrib/thrift
vendored
Submodule
1
contrib/thrift
vendored
Submodule
@ -0,0 +1 @@
|
||||
Subproject commit 010ccf0a0c7023fea0f6bf4e4078ebdff7e61982
|
@ -58,9 +58,11 @@ if (CMAKE_CXX_COMPILER_ID STREQUAL "Clang")
|
||||
|
||||
if (NOT CMAKE_CXX_COMPILER_VERSION VERSION_LESS 8)
|
||||
set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wextra-semi-stmt -Wshadow-field -Wstring-plus-int")
|
||||
endif ()
|
||||
|
||||
if (NOT CMAKE_CXX_COMPILER_VERSION VERSION_LESS 9)
|
||||
if (WEVERYTHING)
|
||||
set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}")
|
||||
set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wno-ctad-maybe-unsupported")
|
||||
endif ()
|
||||
endif ()
|
||||
endif ()
|
||||
@ -102,9 +104,6 @@ add_headers_and_sources(dbms src/Interpreters/ClusterProxy)
|
||||
add_headers_and_sources(dbms src/Columns)
|
||||
add_headers_and_sources(dbms src/Storages)
|
||||
add_headers_and_sources(dbms src/Storages/Distributed)
|
||||
if(USE_RDKAFKA)
|
||||
add_headers_and_sources(dbms src/Storages/Kafka)
|
||||
endif()
|
||||
add_headers_and_sources(dbms src/Storages/MergeTree)
|
||||
add_headers_and_sources(dbms src/Client)
|
||||
add_headers_and_sources(dbms src/Formats)
|
||||
@ -131,7 +130,9 @@ list (APPEND dbms_headers
|
||||
src/AggregateFunctions/parseAggregateFunctionParameters.h)
|
||||
|
||||
list (APPEND dbms_sources src/TableFunctions/ITableFunction.cpp src/TableFunctions/TableFunctionFactory.cpp)
|
||||
list (APPEND dbms_headers src/TableFunctions/ITableFunction.h src/TableFunctions/TableFunctionFactory.h)
|
||||
list (APPEND dbms_headers src/TableFunctions/ITableFunction.h src/TableFunctions/TableFunctionFactory.h)
|
||||
list (APPEND dbms_sources src/Dictionaries/DictionaryFactory.cpp src/Dictionaries/DictionarySourceFactory.cpp src/Dictionaries/DictionaryStructure.cpp)
|
||||
list (APPEND dbms_headers src/Dictionaries/DictionaryFactory.h src/Dictionaries/DictionarySourceFactory.h src/Dictionaries/DictionaryStructure.h)
|
||||
|
||||
add_library(clickhouse_common_io ${LINK_MODE} ${clickhouse_common_io_headers} ${clickhouse_common_io_sources})
|
||||
|
||||
@ -142,12 +143,10 @@ endif ()
|
||||
add_subdirectory(src/Common/ZooKeeper)
|
||||
add_subdirectory(src/Common/Config)
|
||||
|
||||
if (MAKE_STATIC_LIBRARIES)
|
||||
if (MAKE_STATIC_LIBRARIES OR NOT SPLIT_SHARED_LIBRARIES)
|
||||
add_library(dbms ${dbms_headers} ${dbms_sources})
|
||||
else ()
|
||||
add_library(dbms SHARED ${dbms_headers} ${dbms_sources})
|
||||
set_target_properties (dbms PROPERTIES SOVERSION ${VERSION_MAJOR}.${VERSION_MINOR} VERSION ${VERSION_SO} OUTPUT_NAME clickhouse)
|
||||
install (TARGETS dbms LIBRARY DESTINATION ${CMAKE_INSTALL_LIBDIR} COMPONENT clickhouse)
|
||||
endif ()
|
||||
|
||||
if (USE_EMBEDDED_COMPILER)
|
||||
@ -185,7 +184,9 @@ target_link_libraries (clickhouse_common_io
|
||||
string_utils
|
||||
widechar_width
|
||||
${LINK_LIBRARIES_ONLY_ON_X86_64}
|
||||
PUBLIC
|
||||
${DOUBLE_CONVERSION_LIBRARIES}
|
||||
PRIVATE
|
||||
pocoext
|
||||
PUBLIC
|
||||
${Poco_Net_LIBRARY}
|
||||
@ -206,9 +207,10 @@ target_link_libraries (clickhouse_common_io
|
||||
Threads::Threads
|
||||
PRIVATE
|
||||
${CMAKE_DL_LIBS}
|
||||
PUBLIC
|
||||
roaring
|
||||
)
|
||||
|
||||
target_include_directories(clickhouse_common_io SYSTEM BEFORE PUBLIC ${PDQSORT_INCLUDE_DIR})
|
||||
|
||||
target_include_directories(clickhouse_common_io SYSTEM BEFORE PUBLIC ${RE2_INCLUDE_DIR})
|
||||
|
||||
@ -228,7 +230,6 @@ target_link_libraries (dbms
|
||||
PUBLIC
|
||||
clickhouse_common_io
|
||||
PRIVATE
|
||||
clickhouse_dictionaries
|
||||
clickhouse_dictionaries_embedded
|
||||
PUBLIC
|
||||
pocoext
|
||||
@ -241,6 +242,7 @@ target_link_libraries (dbms
|
||||
Threads::Threads
|
||||
)
|
||||
|
||||
target_include_directories(dbms SYSTEM BEFORE PUBLIC ${PDQSORT_INCLUDE_DIR})
|
||||
|
||||
if (NOT USE_INTERNAL_BOOST_LIBRARY)
|
||||
target_include_directories (clickhouse_common_io SYSTEM BEFORE PUBLIC ${Boost_INCLUDE_DIRS})
|
||||
@ -297,10 +299,13 @@ if (USE_CAPNP)
|
||||
endif ()
|
||||
|
||||
if (USE_RDKAFKA)
|
||||
target_link_libraries (dbms PRIVATE ${RDKAFKA_LIBRARY})
|
||||
target_link_libraries (dbms PRIVATE ${CPPKAFKA_LIBRARY})
|
||||
if (NOT USE_INTERNAL_RDKAFKA_LIBRARY)
|
||||
target_include_directories (dbms SYSTEM BEFORE PRIVATE ${RDKAFKA_INCLUDE_DIR})
|
||||
target_link_libraries (dbms PRIVATE clickhouse_storage_kafka)
|
||||
endif ()
|
||||
|
||||
if (USE_PARQUET)
|
||||
target_link_libraries(dbms PRIVATE ${PARQUET_LIBRARY})
|
||||
if (NOT USE_INTERNAL_PARQUET_LIBRARY OR USE_INTERNAL_PARQUET_LIBRARY_NATIVE_CMAKE)
|
||||
target_include_directories (dbms SYSTEM BEFORE PRIVATE ${PARQUET_INCLUDE_DIR} ${ARROW_INCLUDE_DIR})
|
||||
endif ()
|
||||
endif ()
|
||||
|
||||
@ -319,6 +324,11 @@ if (USE_HDFS)
|
||||
target_include_directories (clickhouse_common_io SYSTEM BEFORE PRIVATE ${HDFS3_INCLUDE_DIR})
|
||||
endif()
|
||||
|
||||
if (USE_BROTLI)
|
||||
target_link_libraries (clickhouse_common_io PRIVATE ${BROTLI_LIBRARY})
|
||||
target_include_directories (clickhouse_common_io SYSTEM BEFORE PRIVATE ${BROTLI_INCLUDE_DIR})
|
||||
endif()
|
||||
|
||||
if (USE_JEMALLOC)
|
||||
target_include_directories (dbms SYSTEM BEFORE PRIVATE ${JEMALLOC_INCLUDE_DIR}) # used in Interpreters/AsynchronousMetrics.cpp
|
||||
endif ()
|
||||
@ -343,6 +353,6 @@ if (ENABLE_TESTS AND USE_GTEST)
|
||||
# attach all dbms gtest sources
|
||||
grep_gtest_sources(${ClickHouse_SOURCE_DIR}/dbms dbms_gtest_sources)
|
||||
add_executable(unit_tests_dbms ${dbms_gtest_sources})
|
||||
target_link_libraries(unit_tests_dbms PRIVATE gtest_main dbms clickhouse_common_zookeeper)
|
||||
target_link_libraries(unit_tests_dbms PRIVATE ${GTEST_BOTH_LIBRARIES} dbms clickhouse_common_zookeeper)
|
||||
add_check(unit_tests_dbms)
|
||||
endif ()
|
||||
|
@ -21,7 +21,7 @@ SELECT UserID FROM {table} WHERE UserID = 12345678901234567890;
|
||||
SELECT count() FROM {table} WHERE URL LIKE '%metrika%';
|
||||
SELECT SearchPhrase, any(URL), count() AS c FROM {table} WHERE URL LIKE '%metrika%' AND SearchPhrase != '' GROUP BY SearchPhrase ORDER BY c DESC LIMIT 10;
|
||||
SELECT SearchPhrase, any(URL), any(Title), count() AS c, uniq(UserID) FROM {table} WHERE Title LIKE '%Яндекс%' AND URL NOT LIKE '%.yandex.%' AND SearchPhrase != '' GROUP BY SearchPhrase ORDER BY c DESC LIMIT 10;
|
||||
SELECT * FROM {table} PREWHERE URL LIKE '%metrika%' ORDER BY EventTime LIMIT 10;
|
||||
SELECT * FROM {table} WHERE URL LIKE '%metrika%' ORDER BY EventTime LIMIT 10;
|
||||
SELECT SearchPhrase FROM {table} WHERE SearchPhrase != '' ORDER BY EventTime LIMIT 10;
|
||||
SELECT SearchPhrase FROM {table} WHERE SearchPhrase != '' ORDER BY SearchPhrase LIMIT 10;
|
||||
SELECT SearchPhrase FROM {table} WHERE SearchPhrase != '' ORDER BY EventTime, SearchPhrase LIMIT 10;
|
||||
|
@ -1,11 +1,11 @@
|
||||
# This strings autochanged from release_lib.sh:
|
||||
set(VERSION_REVISION 54415)
|
||||
set(VERSION_REVISION 54417)
|
||||
set(VERSION_MAJOR 19)
|
||||
set(VERSION_MINOR 3)
|
||||
set(VERSION_PATCH 0)
|
||||
set(VERSION_GITHASH 1db4bd8c2a1a0cd610c8a6564e8194dca5265562)
|
||||
set(VERSION_DESCRIBE v19.3.0-testing)
|
||||
set(VERSION_STRING 19.3.0)
|
||||
set(VERSION_MINOR 5)
|
||||
set(VERSION_PATCH 1)
|
||||
set(VERSION_GITHASH 628ed349c335b79a441a1bd6e4bc791d61dfe62c)
|
||||
set(VERSION_DESCRIBE v19.5.1.1-testing)
|
||||
set(VERSION_STRING 19.5.1.1)
|
||||
# end of autochange
|
||||
|
||||
set(VERSION_EXTRA "" CACHE STRING "")
|
||||
|
@ -7,7 +7,7 @@ option (ENABLE_CLICKHOUSE_SERVER "Enable clickhouse-server" ${ENABLE_CLICKHOUSE_
|
||||
option (ENABLE_CLICKHOUSE_CLIENT "Enable clickhouse-client" ${ENABLE_CLICKHOUSE_ALL})
|
||||
option (ENABLE_CLICKHOUSE_LOCAL "Enable clickhouse-local" ${ENABLE_CLICKHOUSE_ALL})
|
||||
option (ENABLE_CLICKHOUSE_BENCHMARK "Enable clickhouse-benchmark" ${ENABLE_CLICKHOUSE_ALL})
|
||||
option (ENABLE_CLICKHOUSE_PERFORMANCE "Enable clickhouse-performance-test" ${ENABLE_CLICKHOUSE_ALL})
|
||||
option (ENABLE_CLICKHOUSE_PERFORMANCE_TEST "Enable clickhouse-performance-test" ${ENABLE_CLICKHOUSE_ALL})
|
||||
option (ENABLE_CLICKHOUSE_EXTRACT_FROM_CONFIG "Enable clickhouse-extract-from-config" ${ENABLE_CLICKHOUSE_ALL})
|
||||
option (ENABLE_CLICKHOUSE_COMPRESSOR "Enable clickhouse-compressor" ${ENABLE_CLICKHOUSE_ALL})
|
||||
option (ENABLE_CLICKHOUSE_COPIER "Enable clickhouse-copier" ${ENABLE_CLICKHOUSE_ALL})
|
||||
@ -15,8 +15,63 @@ option (ENABLE_CLICKHOUSE_FORMAT "Enable clickhouse-format" ${ENABLE_CLICKHOUSE_
|
||||
option (ENABLE_CLICKHOUSE_OBFUSCATOR "Enable clickhouse-obfuscator" ${ENABLE_CLICKHOUSE_ALL})
|
||||
option (ENABLE_CLICKHOUSE_ODBC_BRIDGE "Enable clickhouse-odbc-bridge" ${ENABLE_CLICKHOUSE_ALL})
|
||||
|
||||
if(NOT (MAKE_STATIC_LIBRARIES OR SPLIT_SHARED_LIBRARIES))
|
||||
set(CLICKHOUSE_ONE_SHARED 1)
|
||||
endif()
|
||||
|
||||
configure_file (config_tools.h.in ${CMAKE_CURRENT_BINARY_DIR}/config_tools.h)
|
||||
|
||||
|
||||
macro(clickhouse_target_link_split_lib target name)
|
||||
if(NOT CLICKHOUSE_ONE_SHARED)
|
||||
target_link_libraries(${target} PRIVATE clickhouse-${name}-lib)
|
||||
else()
|
||||
target_link_libraries(${target} PRIVATE clickhouse-lib)
|
||||
endif()
|
||||
endmacro()
|
||||
|
||||
macro(clickhouse_program_link_split_binary name)
|
||||
clickhouse_target_link_split_lib(clickhouse-${name} ${name})
|
||||
endmacro()
|
||||
|
||||
macro(clickhouse_program_add_library name)
|
||||
string(TOUPPER ${name} name_uc)
|
||||
string(REPLACE "-" "_" name_uc ${name_uc})
|
||||
|
||||
# Some dark magic
|
||||
set(CLICKHOUSE_${name_uc}_SOURCES ${CLICKHOUSE_${name_uc}_SOURCES} PARENT_SCOPE)
|
||||
set(CLICKHOUSE_${name_uc}_LINK ${CLICKHOUSE_${name_uc}_LINK} PARENT_SCOPE)
|
||||
set(CLICKHOUSE_${name_uc}_INCLUDE ${CLICKHOUSE_${name_uc}_INCLUDE} PARENT_SCOPE)
|
||||
|
||||
if(NOT CLICKHOUSE_ONE_SHARED)
|
||||
add_library(clickhouse-${name}-lib ${LINK_MODE} ${CLICKHOUSE_${name_uc}_SOURCES})
|
||||
|
||||
set(_link ${CLICKHOUSE_${name_uc}_LINK}) # can't use ${} in if()
|
||||
if(_link)
|
||||
target_link_libraries(clickhouse-${name}-lib ${CLICKHOUSE_${name_uc}_LINK})
|
||||
endif()
|
||||
|
||||
set(_include ${CLICKHOUSE_${name_uc}_INCLUDE}) # can't use ${} in if()
|
||||
if (_include)
|
||||
target_include_directories(clickhouse-${name}-lib ${CLICKHOUSE_${name_uc}_INCLUDE})
|
||||
endif()
|
||||
endif()
|
||||
endmacro()
|
||||
|
||||
macro(clickhouse_program_add_executable name)
|
||||
if(CLICKHOUSE_SPLIT_BINARY)
|
||||
add_executable(clickhouse-${name} clickhouse-${name}.cpp)
|
||||
clickhouse_program_link_split_binary(${name})
|
||||
install(TARGETS clickhouse-${name} ${CLICKHOUSE_ALL_TARGETS} RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR} COMPONENT clickhouse)
|
||||
endif()
|
||||
endmacro()
|
||||
|
||||
macro(clickhouse_program_add name)
|
||||
clickhouse_program_add_library(${name})
|
||||
clickhouse_program_add_executable(${name})
|
||||
endmacro()
|
||||
|
||||
|
||||
add_subdirectory (server)
|
||||
add_subdirectory (client)
|
||||
add_subdirectory (local)
|
||||
@ -33,6 +88,13 @@ if (ENABLE_CLICKHOUSE_ODBC_BRIDGE)
|
||||
add_subdirectory (odbc-bridge)
|
||||
endif ()
|
||||
|
||||
if (CLICKHOUSE_ONE_SHARED)
|
||||
add_library(clickhouse-lib SHARED ${CLICKHOUSE_SERVER_SOURCES} ${CLICKHOUSE_CLIENT_SOURCES} ${CLICKHOUSE_LOCAL_SOURCES} ${CLICKHOUSE_BENCHMARK_SOURCES} ${CLICKHOUSE_PERFORMANCE_TEST_SOURCES} ${CLICKHOUSE_COPIER_SOURCES} ${CLICKHOUSE_EXTRACT_FROM_CONFIG_SOURCES} ${CLICKHOUSE_COMPRESSOR_SOURCES} ${CLICKHOUSE_FORMAT_SOURCES} ${CLICKHOUSE_OBFUSCATOR_SOURCES} ${CLICKHOUSE_COMPILER_SOURCES} ${CLICKHOUSE_ODBC_BRIDGE_SOURCES})
|
||||
target_link_libraries(clickhouse-lib ${CLICKHOUSE_SERVER_LINK} ${CLICKHOUSE_CLIENT_LINK} ${CLICKHOUSE_LOCAL_LINK} ${CLICKHOUSE_BENCHMARK_LINK} ${CLICKHOUSE_PERFORMANCE_TEST_LINK} ${CLICKHOUSE_COPIER_LINK} ${CLICKHOUSE_EXTRACT_FROM_CONFIG_LINK} ${CLICKHOUSE_COMPRESSOR_LINK} ${CLICKHOUSE_FORMAT_LINK} ${CLICKHOUSE_OBFUSCATOR_LINK} ${CLICKHOUSE_COMPILER_LINK} ${CLICKHOUSE_ODBC_BRIDGE_LINK})
|
||||
target_include_directories(clickhouse-lib ${CLICKHOUSE_SERVER_INCLUDE} ${CLICKHOUSE_CLIENT_INCLUDE} ${CLICKHOUSE_LOCAL_INCLUDE} ${CLICKHOUSE_BENCHMARK_INCLUDE} ${CLICKHOUSE_PERFORMANCE_TEST_INCLUDE} ${CLICKHOUSE_COPIER_INCLUDE} ${CLICKHOUSE_EXTRACT_FROM_CONFIG_INCLUDE} ${CLICKHOUSE_COMPRESSOR_INCLUDE} ${CLICKHOUSE_FORMAT_INCLUDE} ${CLICKHOUSE_OBFUSCATOR_INCLUDE} ${CLICKHOUSE_COMPILER_INCLUDE} ${CLICKHOUSE_ODBC_BRIDGE_INCLUDE})
|
||||
set_target_properties(clickhouse-lib PROPERTIES SOVERSION ${VERSION_MAJOR}.${VERSION_MINOR} VERSION ${VERSION_SO} OUTPUT_NAME clickhouse DEBUG_POSTFIX "")
|
||||
endif()
|
||||
|
||||
if (CLICKHOUSE_SPLIT_BINARY)
|
||||
set (CLICKHOUSE_ALL_TARGETS clickhouse-server clickhouse-client clickhouse-local clickhouse-benchmark clickhouse-performance-test
|
||||
clickhouse-extract-from-config clickhouse-compressor clickhouse-format clickhouse-copier)
|
||||
@ -49,6 +111,8 @@ if (CLICKHOUSE_SPLIT_BINARY)
|
||||
|
||||
add_custom_target (clickhouse-bundle ALL DEPENDS ${CLICKHOUSE_ALL_TARGETS})
|
||||
add_custom_target (clickhouse ALL DEPENDS clickhouse-bundle)
|
||||
|
||||
install(PROGRAMS clickhouse-split-helper DESTINATION ${CMAKE_INSTALL_BINDIR} RENAME clickhouse COMPONENT clickhouse)
|
||||
else ()
|
||||
if (USE_EMBEDDED_COMPILER)
|
||||
# before add_executable !
|
||||
@ -60,37 +124,37 @@ else ()
|
||||
target_include_directories (clickhouse PRIVATE ${CMAKE_CURRENT_BINARY_DIR})
|
||||
|
||||
if (ENABLE_CLICKHOUSE_SERVER)
|
||||
target_link_libraries (clickhouse PRIVATE clickhouse-server-lib)
|
||||
clickhouse_target_link_split_lib(clickhouse server)
|
||||
endif ()
|
||||
if (ENABLE_CLICKHOUSE_CLIENT)
|
||||
target_link_libraries (clickhouse PRIVATE clickhouse-client-lib)
|
||||
clickhouse_target_link_split_lib(clickhouse client)
|
||||
endif ()
|
||||
if (ENABLE_CLICKHOUSE_LOCAL)
|
||||
target_link_libraries (clickhouse PRIVATE clickhouse-local-lib)
|
||||
clickhouse_target_link_split_lib(clickhouse local)
|
||||
endif ()
|
||||
if (ENABLE_CLICKHOUSE_BENCHMARK)
|
||||
target_link_libraries (clickhouse PRIVATE clickhouse-benchmark-lib)
|
||||
clickhouse_target_link_split_lib(clickhouse benchmark)
|
||||
endif ()
|
||||
if (ENABLE_CLICKHOUSE_PERFORMANCE)
|
||||
target_link_libraries (clickhouse PRIVATE clickhouse-performance-test-lib)
|
||||
if (ENABLE_CLICKHOUSE_PERFORMANCE_TEST)
|
||||
clickhouse_target_link_split_lib(clickhouse performance-test)
|
||||
endif ()
|
||||
if (ENABLE_CLICKHOUSE_COPIER)
|
||||
target_link_libraries (clickhouse PRIVATE clickhouse-copier-lib)
|
||||
clickhouse_target_link_split_lib(clickhouse copier)
|
||||
endif ()
|
||||
if (ENABLE_CLICKHOUSE_EXTRACT_FROM_CONFIG)
|
||||
target_link_libraries (clickhouse PRIVATE clickhouse-extract-from-config-lib)
|
||||
clickhouse_target_link_split_lib(clickhouse extract-from-config)
|
||||
endif ()
|
||||
if (ENABLE_CLICKHOUSE_COMPRESSOR)
|
||||
target_link_libraries (clickhouse PRIVATE clickhouse-compressor-lib)
|
||||
clickhouse_target_link_split_lib(clickhouse compressor)
|
||||
endif ()
|
||||
if (ENABLE_CLICKHOUSE_FORMAT)
|
||||
target_link_libraries (clickhouse PRIVATE clickhouse-format-lib)
|
||||
clickhouse_target_link_split_lib(clickhouse format)
|
||||
endif ()
|
||||
if (ENABLE_CLICKHOUSE_OBFUSCATOR)
|
||||
target_link_libraries (clickhouse PRIVATE clickhouse-obfuscator-lib)
|
||||
clickhouse_target_link_split_lib(clickhouse obfuscator)
|
||||
endif ()
|
||||
if (USE_EMBEDDED_COMPILER)
|
||||
target_link_libraries (clickhouse PRIVATE clickhouse-compiler-lib)
|
||||
clickhouse_target_link_split_lib(clickhouse compiler)
|
||||
endif ()
|
||||
|
||||
set (CLICKHOUSE_BUNDLE)
|
||||
@ -114,7 +178,7 @@ else ()
|
||||
install (FILES ${CMAKE_CURRENT_BINARY_DIR}/clickhouse-benchmark DESTINATION ${CMAKE_INSTALL_BINDIR} COMPONENT clickhouse)
|
||||
list(APPEND CLICKHOUSE_BUNDLE clickhouse-benchmark)
|
||||
endif ()
|
||||
if (ENABLE_CLICKHOUSE_PERFORMANCE)
|
||||
if (ENABLE_CLICKHOUSE_PERFORMANCE_TEST)
|
||||
add_custom_target (clickhouse-performance-test ALL COMMAND ${CMAKE_COMMAND} -E create_symlink clickhouse clickhouse-performance-test DEPENDS clickhouse)
|
||||
install (FILES ${CMAKE_CURRENT_BINARY_DIR}/clickhouse-performance-test DESTINATION ${CMAKE_INSTALL_BINDIR} COMPONENT clickhouse)
|
||||
list(APPEND CLICKHOUSE_BUNDLE clickhouse-performance-test)
|
||||
|
@ -1,9 +1,9 @@
|
||||
add_library (clickhouse-benchmark-lib ${LINK_MODE} Benchmark.cpp)
|
||||
target_link_libraries (clickhouse-benchmark-lib PRIVATE clickhouse_aggregate_functions clickhouse-client-lib clickhouse_common_config clickhouse_common_io ${Boost_PROGRAM_OPTIONS_LIBRARY})
|
||||
target_include_directories (clickhouse-benchmark-lib SYSTEM PRIVATE ${PCG_RANDOM_INCLUDE_DIR})
|
||||
set(CLICKHOUSE_BENCHMARK_SOURCES ${CMAKE_CURRENT_SOURCE_DIR}/Benchmark.cpp)
|
||||
set(CLICKHOUSE_BENCHMARK_LINK PRIVATE clickhouse_aggregate_functions clickhouse_common_config clickhouse_common_io ${Boost_PROGRAM_OPTIONS_LIBRARY})
|
||||
set(CLICKHOUSE_BENCHMARK_INCLUDE SYSTEM PRIVATE ${PCG_RANDOM_INCLUDE_DIR})
|
||||
|
||||
if (CLICKHOUSE_SPLIT_BINARY)
|
||||
add_executable (clickhouse-benchmark clickhouse-benchmark.cpp)
|
||||
target_link_libraries (clickhouse-benchmark PRIVATE clickhouse-benchmark-lib clickhouse_aggregate_functions)
|
||||
install (TARGETS clickhouse-benchmark ${CLICKHOUSE_ALL_TARGETS} RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR} COMPONENT clickhouse)
|
||||
endif ()
|
||||
clickhouse_program_add(benchmark)
|
||||
|
||||
if(NOT CLICKHOUSE_ONE_SHARED)
|
||||
target_link_libraries (clickhouse-benchmark-lib PRIVATE clickhouse-client-lib)
|
||||
endif()
|
||||
|
6
dbms/programs/clickhouse-split-helper
Executable file
6
dbms/programs/clickhouse-split-helper
Executable file
@ -0,0 +1,6 @@
|
||||
#!/bin/sh
|
||||
|
||||
set -e
|
||||
CMD=$1
|
||||
shift
|
||||
clickhouse-$CMD $*
|
@ -1,13 +1,7 @@
|
||||
add_library (clickhouse-client-lib ${LINK_MODE} Client.cpp)
|
||||
target_link_libraries (clickhouse-client-lib PRIVATE clickhouse_common_config clickhouse_functions clickhouse_aggregate_functions clickhouse_common_io ${LINE_EDITING_LIBS} ${Boost_PROGRAM_OPTIONS_LIBRARY})
|
||||
if (READLINE_INCLUDE_DIR)
|
||||
target_include_directories (clickhouse-client-lib SYSTEM PRIVATE ${READLINE_INCLUDE_DIR})
|
||||
endif ()
|
||||
set(CLICKHOUSE_CLIENT_SOURCES ${CMAKE_CURRENT_SOURCE_DIR}/Client.cpp)
|
||||
set(CLICKHOUSE_CLIENT_LINK PRIVATE clickhouse_common_config clickhouse_functions clickhouse_aggregate_functions clickhouse_common_io ${LINE_EDITING_LIBS} ${Boost_PROGRAM_OPTIONS_LIBRARY})
|
||||
set(CLICKHOUSE_CLIENT_INCLUDE SYSTEM PRIVATE ${READLINE_INCLUDE_DIR})
|
||||
|
||||
if (CLICKHOUSE_SPLIT_BINARY)
|
||||
add_executable (clickhouse-client clickhouse-client.cpp)
|
||||
target_link_libraries (clickhouse-client PRIVATE clickhouse-client-lib)
|
||||
install (TARGETS clickhouse-client ${CLICKHOUSE_ALL_TARGETS} RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR} COMPONENT clickhouse)
|
||||
endif ()
|
||||
clickhouse_program_add(client)
|
||||
|
||||
install (FILES clickhouse-client.xml DESTINATION ${CLICKHOUSE_ETC_DIR}/clickhouse-client COMPONENT clickhouse-client RENAME config.xml)
|
||||
|
@ -704,7 +704,7 @@ private:
|
||||
return true;
|
||||
}
|
||||
|
||||
ASTInsertQuery * insert = typeid_cast<ASTInsertQuery *>(ast.get());
|
||||
auto * insert = ast->as<ASTInsertQuery>();
|
||||
|
||||
if (insert && insert->data)
|
||||
{
|
||||
@ -799,14 +799,11 @@ private:
|
||||
written_progress_chars = 0;
|
||||
written_first_block = false;
|
||||
|
||||
const ASTSetQuery * set_query = typeid_cast<const ASTSetQuery *>(&*parsed_query);
|
||||
const ASTUseQuery * use_query = typeid_cast<const ASTUseQuery *>(&*parsed_query);
|
||||
/// INSERT query for which data transfer is needed (not an INSERT SELECT) is processed separately.
|
||||
const ASTInsertQuery * insert = typeid_cast<const ASTInsertQuery *>(&*parsed_query);
|
||||
|
||||
connection->forceConnected();
|
||||
|
||||
if (insert && !insert->select)
|
||||
/// INSERT query for which data transfer is needed (not an INSERT SELECT) is processed separately.
|
||||
const auto * insert_query = parsed_query->as<ASTInsertQuery>();
|
||||
if (insert_query && !insert_query->select)
|
||||
processInsertQuery();
|
||||
else
|
||||
processOrdinaryQuery();
|
||||
@ -814,7 +811,7 @@ private:
|
||||
/// Do not change context (current DB, settings) in case of an exception.
|
||||
if (!got_exception)
|
||||
{
|
||||
if (set_query)
|
||||
if (const auto * set_query = parsed_query->as<ASTSetQuery>())
|
||||
{
|
||||
/// Save all changes in settings to avoid losing them if the connection is lost.
|
||||
for (const auto & change : set_query->changes)
|
||||
@ -826,7 +823,7 @@ private:
|
||||
}
|
||||
}
|
||||
|
||||
if (use_query)
|
||||
if (const auto * use_query = parsed_query->as<ASTUseQuery>())
|
||||
{
|
||||
const String & new_database = use_query->database;
|
||||
/// If the client initiates the reconnection, it takes the settings from the config.
|
||||
@ -858,7 +855,7 @@ private:
|
||||
/// Convert external tables to ExternalTableData and send them using the connection.
|
||||
void sendExternalTables()
|
||||
{
|
||||
auto * select = typeid_cast<const ASTSelectWithUnionQuery *>(&*parsed_query);
|
||||
const auto * select = parsed_query->as<ASTSelectWithUnionQuery>();
|
||||
if (!select && !external_tables.empty())
|
||||
throw Exception("External tables could be sent only with select query", ErrorCodes::BAD_ARGUMENTS);
|
||||
|
||||
@ -883,7 +880,7 @@ private:
|
||||
void processInsertQuery()
|
||||
{
|
||||
/// Send part of query without data, because data will be sent separately.
|
||||
const ASTInsertQuery & parsed_insert_query = typeid_cast<const ASTInsertQuery &>(*parsed_query);
|
||||
const auto & parsed_insert_query = parsed_query->as<ASTInsertQuery &>();
|
||||
String query_without_data = parsed_insert_query.data
|
||||
? query.substr(0, parsed_insert_query.data - query.data())
|
||||
: query;
|
||||
@ -940,7 +937,7 @@ private:
|
||||
void sendData(Block & sample, const ColumnsDescription & columns_description)
|
||||
{
|
||||
/// If INSERT data must be sent.
|
||||
const ASTInsertQuery * parsed_insert_query = typeid_cast<const ASTInsertQuery *>(&*parsed_query);
|
||||
const auto * parsed_insert_query = parsed_query->as<ASTInsertQuery>();
|
||||
if (!parsed_insert_query)
|
||||
return;
|
||||
|
||||
@ -965,9 +962,13 @@ private:
|
||||
String current_format = insert_format;
|
||||
|
||||
/// Data format can be specified in the INSERT query.
|
||||
if (ASTInsertQuery * insert = typeid_cast<ASTInsertQuery *>(&*parsed_query))
|
||||
if (const auto * insert = parsed_query->as<ASTInsertQuery>())
|
||||
{
|
||||
if (!insert->format.empty())
|
||||
current_format = insert->format;
|
||||
if (insert->settings_ast)
|
||||
InterpreterSetQuery(insert->settings_ast, context).executeForCurrentContext();
|
||||
}
|
||||
|
||||
BlockInputStreamPtr block_input = context.getInputFormat(
|
||||
current_format, buf, sample, insert_format_max_block_size);
|
||||
@ -1227,12 +1228,14 @@ private:
|
||||
String current_format = format;
|
||||
|
||||
/// The query can specify output format or output file.
|
||||
if (ASTQueryWithOutput * query_with_output = dynamic_cast<ASTQueryWithOutput *>(&*parsed_query))
|
||||
/// FIXME: try to prettify this cast using `as<>()`
|
||||
if (const auto * query_with_output = dynamic_cast<const ASTQueryWithOutput *>(parsed_query.get()))
|
||||
{
|
||||
if (query_with_output->out_file != nullptr)
|
||||
if (query_with_output->out_file)
|
||||
{
|
||||
const auto & out_file_node = typeid_cast<const ASTLiteral &>(*query_with_output->out_file);
|
||||
const auto & out_file_node = query_with_output->out_file->as<ASTLiteral &>();
|
||||
const auto & out_file = out_file_node.value.safeGet<std::string>();
|
||||
|
||||
out_file_buf.emplace(out_file, DBMS_DEFAULT_BUFFER_SIZE, O_WRONLY | O_EXCL | O_CREAT);
|
||||
out_buf = &*out_file_buf;
|
||||
|
||||
@ -1244,7 +1247,7 @@ private:
|
||||
{
|
||||
if (has_vertical_output_suffix)
|
||||
throw Exception("Output format already specified", ErrorCodes::CLIENT_OUTPUT_FORMAT_SPECIFIED);
|
||||
const auto & id = typeid_cast<const ASTIdentifier &>(*query_with_output->format);
|
||||
const auto & id = query_with_output->format->as<ASTIdentifier &>();
|
||||
current_format = id.name;
|
||||
}
|
||||
if (query_with_output->settings_ast)
|
||||
|
@ -1,9 +1,7 @@
|
||||
add_library (clickhouse-compressor-lib ${LINK_MODE} Compressor.cpp)
|
||||
target_link_libraries (clickhouse-compressor-lib PRIVATE clickhouse_compression clickhouse_common_io ${Boost_PROGRAM_OPTIONS_LIBRARY})
|
||||
# Also in utils
|
||||
|
||||
if (CLICKHOUSE_SPLIT_BINARY)
|
||||
# Also in utils
|
||||
add_executable (clickhouse-compressor clickhouse-compressor.cpp)
|
||||
target_link_libraries (clickhouse-compressor PRIVATE clickhouse-compressor-lib)
|
||||
install (TARGETS clickhouse-compressor ${CLICKHOUSE_ALL_TARGETS} RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR} COMPONENT clickhouse)
|
||||
endif ()
|
||||
set(CLICKHOUSE_COMPRESSOR_SOURCES ${CMAKE_CURRENT_SOURCE_DIR}/Compressor.cpp)
|
||||
set(CLICKHOUSE_COMPRESSOR_LINK PRIVATE clickhouse_compression clickhouse_common_io ${Boost_PROGRAM_OPTIONS_LIBRARY})
|
||||
#set(CLICKHOUSE_COMPRESSOR_INCLUDE SYSTEM PRIVATE ...)
|
||||
|
||||
clickhouse_program_add(compressor)
|
||||
|
@ -6,7 +6,7 @@
|
||||
#cmakedefine01 ENABLE_CLICKHOUSE_CLIENT
|
||||
#cmakedefine01 ENABLE_CLICKHOUSE_LOCAL
|
||||
#cmakedefine01 ENABLE_CLICKHOUSE_BENCHMARK
|
||||
#cmakedefine01 ENABLE_CLICKHOUSE_PERFORMANCE
|
||||
#cmakedefine01 ENABLE_CLICKHOUSE_PERFORMANCE_TEST
|
||||
#cmakedefine01 ENABLE_CLICKHOUSE_COPIER
|
||||
#cmakedefine01 ENABLE_CLICKHOUSE_EXTRACT_FROM_CONFIG
|
||||
#cmakedefine01 ENABLE_CLICKHOUSE_COMPRESSOR
|
||||
|
@ -1,8 +1,5 @@
|
||||
add_library (clickhouse-copier-lib ${LINK_MODE} ClusterCopier.cpp)
|
||||
target_link_libraries (clickhouse-copier-lib PRIVATE clickhouse-server-lib clickhouse_functions clickhouse_aggregate_functions daemon)
|
||||
set(CLICKHOUSE_COPIER_SOURCES ${CMAKE_CURRENT_SOURCE_DIR}/ClusterCopier.cpp)
|
||||
set(CLICKHOUSE_COPIER_LINK PRIVATE clickhouse_functions clickhouse_table_functions clickhouse_aggregate_functions PUBLIC daemon)
|
||||
set(CLICKHOUSE_COPIER_INCLUDE SYSTEM PRIVATE ${PCG_RANDOM_INCLUDE_DIR})
|
||||
|
||||
if (CLICKHOUSE_SPLIT_BINARY)
|
||||
add_executable (clickhouse-copier clickhouse-copier.cpp)
|
||||
target_link_libraries (clickhouse-copier clickhouse-copier-lib)
|
||||
install (TARGETS clickhouse-copier ${CLICKHOUSE_ALL_TARGETS} RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR} COMPONENT clickhouse)
|
||||
endif ()
|
||||
clickhouse_program_add(copier)
|
||||
|
@ -33,6 +33,7 @@
|
||||
#include <Common/CurrentThread.h>
|
||||
#include <Common/escapeForFileName.h>
|
||||
#include <Common/getNumberOfPhysicalCPUCores.h>
|
||||
#include <Common/ThreadStatus.h>
|
||||
#include <Client/Connection.h>
|
||||
#include <Interpreters/Context.h>
|
||||
#include <Interpreters/Cluster.h>
|
||||
@ -482,7 +483,7 @@ String DB::TaskShard::getHostNameExample() const
|
||||
|
||||
static bool isExtendedDefinitionStorage(const ASTPtr & storage_ast)
|
||||
{
|
||||
const ASTStorage & storage = typeid_cast<const ASTStorage &>(*storage_ast);
|
||||
const auto & storage = storage_ast->as<ASTStorage &>();
|
||||
return storage.partition_by || storage.order_by || storage.sample_by;
|
||||
}
|
||||
|
||||
@ -490,8 +491,8 @@ static ASTPtr extractPartitionKey(const ASTPtr & storage_ast)
|
||||
{
|
||||
String storage_str = queryToString(storage_ast);
|
||||
|
||||
const ASTStorage & storage = typeid_cast<const ASTStorage &>(*storage_ast);
|
||||
const ASTFunction & engine = typeid_cast<const ASTFunction &>(*storage.engine);
|
||||
const auto & storage = storage_ast->as<ASTStorage &>();
|
||||
const auto & engine = storage.engine->as<ASTFunction &>();
|
||||
|
||||
if (!endsWith(engine.name, "MergeTree"))
|
||||
{
|
||||
@ -500,7 +501,7 @@ static ASTPtr extractPartitionKey(const ASTPtr & storage_ast)
|
||||
}
|
||||
|
||||
ASTPtr arguments_ast = engine.arguments->clone();
|
||||
ASTs & arguments = typeid_cast<ASTExpressionList &>(*arguments_ast).children;
|
||||
ASTs & arguments = arguments_ast->children;
|
||||
|
||||
if (isExtendedDefinitionStorage(storage_ast))
|
||||
{
|
||||
@ -1178,12 +1179,12 @@ protected:
|
||||
/// Removes MATERIALIZED and ALIAS columns from create table query
|
||||
static ASTPtr removeAliasColumnsFromCreateQuery(const ASTPtr & query_ast)
|
||||
{
|
||||
const ASTs & column_asts = typeid_cast<ASTCreateQuery &>(*query_ast).columns_list->columns->children;
|
||||
const ASTs & column_asts = query_ast->as<ASTCreateQuery &>().columns_list->columns->children;
|
||||
auto new_columns = std::make_shared<ASTExpressionList>();
|
||||
|
||||
for (const ASTPtr & column_ast : column_asts)
|
||||
{
|
||||
const ASTColumnDeclaration & column = typeid_cast<const ASTColumnDeclaration &>(*column_ast);
|
||||
const auto & column = column_ast->as<ASTColumnDeclaration &>();
|
||||
|
||||
if (!column.default_specifier.empty())
|
||||
{
|
||||
@ -1196,12 +1197,11 @@ protected:
|
||||
}
|
||||
|
||||
ASTPtr new_query_ast = query_ast->clone();
|
||||
ASTCreateQuery & new_query = typeid_cast<ASTCreateQuery &>(*new_query_ast);
|
||||
auto & new_query = new_query_ast->as<ASTCreateQuery &>();
|
||||
|
||||
auto new_columns_list = std::make_shared<ASTColumns>();
|
||||
new_columns_list->set(new_columns_list->columns, new_columns);
|
||||
new_columns_list->set(
|
||||
new_columns_list->indices, typeid_cast<ASTCreateQuery &>(*query_ast).columns_list->indices->clone());
|
||||
new_columns_list->set(new_columns_list->indices, query_ast->as<ASTCreateQuery>()->columns_list->indices->clone());
|
||||
|
||||
new_query.replace(new_query.columns_list, new_columns_list);
|
||||
|
||||
@ -1211,7 +1211,7 @@ protected:
|
||||
/// Replaces ENGINE and table name in a create query
|
||||
std::shared_ptr<ASTCreateQuery> rewriteCreateQueryStorage(const ASTPtr & create_query_ast, const DatabaseAndTableName & new_table, const ASTPtr & new_storage_ast)
|
||||
{
|
||||
ASTCreateQuery & create = typeid_cast<ASTCreateQuery &>(*create_query_ast);
|
||||
const auto & create = create_query_ast->as<ASTCreateQuery &>();
|
||||
auto res = std::make_shared<ASTCreateQuery>(create);
|
||||
|
||||
if (create.storage == nullptr || new_storage_ast == nullptr)
|
||||
@ -1645,7 +1645,7 @@ protected:
|
||||
/// Try create table (if not exists) on each shard
|
||||
{
|
||||
auto create_query_push_ast = rewriteCreateQueryStorage(task_shard.current_pull_table_create_query, task_table.table_push, task_table.engine_push_ast);
|
||||
typeid_cast<ASTCreateQuery &>(*create_query_push_ast).if_not_exists = true;
|
||||
create_query_push_ast->as<ASTCreateQuery &>().if_not_exists = true;
|
||||
String query = queryToString(create_query_push_ast);
|
||||
|
||||
LOG_DEBUG(log, "Create destination tables. Query: " << query);
|
||||
@ -1778,7 +1778,7 @@ protected:
|
||||
|
||||
void dropAndCreateLocalTable(const ASTPtr & create_ast)
|
||||
{
|
||||
auto & create = typeid_cast<ASTCreateQuery &>(*create_ast);
|
||||
const auto & create = create_ast->as<ASTCreateQuery &>();
|
||||
dropLocalTableIfExists({create.database, create.table});
|
||||
|
||||
InterpreterCreateQuery interpreter(create_ast, context);
|
||||
@ -2121,6 +2121,7 @@ void ClusterCopierApp::defineOptions(Poco::Util::OptionSet & options)
|
||||
void ClusterCopierApp::mainImpl()
|
||||
{
|
||||
StatusFile status_file(process_path + "/status");
|
||||
ThreadStatus thread_status;
|
||||
|
||||
auto log = &logger();
|
||||
LOG_INFO(log, "Starting clickhouse-copier ("
|
||||
|
@ -1,8 +1,5 @@
|
||||
add_library (clickhouse-extract-from-config-lib ${LINK_MODE} ExtractFromConfig.cpp)
|
||||
target_link_libraries (clickhouse-extract-from-config-lib PRIVATE clickhouse_common_config clickhouse_common_io ${Boost_PROGRAM_OPTIONS_LIBRARY})
|
||||
set(CLICKHOUSE_EXTRACT_FROM_CONFIG_SOURCES ${CMAKE_CURRENT_SOURCE_DIR}/ExtractFromConfig.cpp)
|
||||
set(CLICKHOUSE_EXTRACT_FROM_CONFIG_LINK PRIVATE clickhouse_common_config clickhouse_common_io ${Boost_PROGRAM_OPTIONS_LIBRARY})
|
||||
#set(CLICKHOUSE_EXTRACT_FROM_CONFIG_INCLUDE SYSTEM PRIVATE ...)
|
||||
|
||||
if (CLICKHOUSE_SPLIT_BINARY)
|
||||
add_executable (clickhouse-extract-from-config clickhouse-extract-from-config.cpp)
|
||||
target_link_libraries (clickhouse-extract-from-config PRIVATE clickhouse-extract-from-config-lib)
|
||||
install (TARGETS clickhouse-extract-from-config ${CLICKHOUSE_ALL_TARGETS} RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR} COMPONENT clickhouse)
|
||||
endif ()
|
||||
clickhouse_program_add(extract-from-config)
|
||||
|
@ -1,7 +1,5 @@
|
||||
add_library (clickhouse-format-lib ${LINK_MODE} Format.cpp)
|
||||
target_link_libraries (clickhouse-format-lib PRIVATE dbms clickhouse_common_io clickhouse_parsers ${Boost_PROGRAM_OPTIONS_LIBRARY})
|
||||
if (CLICKHOUSE_SPLIT_BINARY)
|
||||
add_executable (clickhouse-format clickhouse-format.cpp)
|
||||
target_link_libraries (clickhouse-format PRIVATE clickhouse-format-lib)
|
||||
install (TARGETS clickhouse-format ${CLICKHOUSE_ALL_TARGETS} RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR} COMPONENT clickhouse)
|
||||
endif ()
|
||||
set(CLICKHOUSE_FORMAT_SOURCES ${CMAKE_CURRENT_SOURCE_DIR}/Format.cpp)
|
||||
set(CLICKHOUSE_FORMAT_LINK PRIVATE dbms clickhouse_common_io clickhouse_parsers ${Boost_PROGRAM_OPTIONS_LIBRARY})
|
||||
#set(CLICKHOUSE_FORMAT_INCLUDE SYSTEM PRIVATE ...)
|
||||
|
||||
clickhouse_program_add(format)
|
||||
|
@ -1,8 +1,9 @@
|
||||
add_library (clickhouse-local-lib ${LINK_MODE} LocalServer.cpp)
|
||||
target_link_libraries (clickhouse-local-lib PRIVATE clickhouse_common_io clickhouse-server-lib clickhouse_functions clickhouse_aggregate_functions clickhouse_table_functions ${Boost_PROGRAM_OPTIONS_LIBRARY})
|
||||
set(CLICKHOUSE_LOCAL_SOURCES ${CMAKE_CURRENT_SOURCE_DIR}/LocalServer.cpp)
|
||||
set(CLICKHOUSE_LOCAL_LINK PRIVATE clickhouse_dictionaries clickhouse_common_io clickhouse_functions clickhouse_aggregate_functions clickhouse_table_functions ${Boost_PROGRAM_OPTIONS_LIBRARY})
|
||||
#set(CLICKHOUSE_LOCAL_INCLUDE SYSTEM PRIVATE ...)
|
||||
|
||||
if (CLICKHOUSE_SPLIT_BINARY)
|
||||
add_executable (clickhouse-local clickhouse-local.cpp)
|
||||
target_link_libraries (clickhouse-local PRIVATE clickhouse-local-lib)
|
||||
install (TARGETS clickhouse-local ${CLICKHOUSE_ALL_TARGETS} RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR} COMPONENT clickhouse)
|
||||
clickhouse_program_add(local)
|
||||
|
||||
if(NOT CLICKHOUSE_ONE_SHARED)
|
||||
target_link_libraries(clickhouse-local-lib PRIVATE clickhouse-server-lib)
|
||||
endif ()
|
||||
|
@ -1,3 +1,4 @@
|
||||
#include <new>
|
||||
#include <iostream>
|
||||
#include <vector>
|
||||
#include <string>
|
||||
@ -17,12 +18,6 @@
|
||||
#include <gperftools/malloc_extension.h> // Y_IGNORE
|
||||
#endif
|
||||
|
||||
#if ENABLE_CLICKHOUSE_SERVER
|
||||
#include "server/Server.h"
|
||||
#endif
|
||||
#if ENABLE_CLICKHOUSE_LOCAL
|
||||
#include "local/LocalServer.h"
|
||||
#endif
|
||||
#include <Common/StringUtils/StringUtils.h>
|
||||
|
||||
/// Universal executable for various clickhouse applications
|
||||
@ -38,7 +33,7 @@ int mainEntryClickHouseLocal(int argc, char ** argv);
|
||||
#if ENABLE_CLICKHOUSE_BENCHMARK || !defined(ENABLE_CLICKHOUSE_BENCHMARK)
|
||||
int mainEntryClickHouseBenchmark(int argc, char ** argv);
|
||||
#endif
|
||||
#if ENABLE_CLICKHOUSE_PERFORMANCE || !defined(ENABLE_CLICKHOUSE_PERFORMANCE)
|
||||
#if ENABLE_CLICKHOUSE_PERFORMANCE_TEST || !defined(ENABLE_CLICKHOUSE_PERFORMANCE_TEST)
|
||||
int mainEntryClickHousePerformanceTest(int argc, char ** argv);
|
||||
#endif
|
||||
#if ENABLE_CLICKHOUSE_EXTRACT_FROM_CONFIG || !defined(ENABLE_CLICKHOUSE_EXTRACT_FROM_CONFIG)
|
||||
@ -84,7 +79,7 @@ std::pair<const char *, MainFunc> clickhouse_applications[] =
|
||||
#if ENABLE_CLICKHOUSE_SERVER || !defined(ENABLE_CLICKHOUSE_SERVER)
|
||||
{"server", mainEntryClickHouseServer},
|
||||
#endif
|
||||
#if ENABLE_CLICKHOUSE_PERFORMANCE || !defined(ENABLE_CLICKHOUSE_PERFORMANCE)
|
||||
#if ENABLE_CLICKHOUSE_PERFORMANCE_TEST || !defined(ENABLE_CLICKHOUSE_PERFORMANCE_TEST)
|
||||
{"performance-test", mainEntryClickHousePerformanceTest},
|
||||
#endif
|
||||
#if ENABLE_CLICKHOUSE_EXTRACT_FROM_CONFIG || !defined(ENABLE_CLICKHOUSE_EXTRACT_FROM_CONFIG)
|
||||
@ -145,6 +140,10 @@ bool isClickhouseApp(const std::string & app_suffix, std::vector<char *> & argv)
|
||||
|
||||
int main(int argc_, char ** argv_)
|
||||
{
|
||||
/// Reset new handler to default (that throws std::bad_alloc)
|
||||
/// It is needed because LLVM library clobbers it.
|
||||
std::set_new_handler(nullptr);
|
||||
|
||||
#if USE_EMBEDDED_COMPILER
|
||||
if (argc_ >= 2 && 0 == strcmp(argv_[1], "-cc1"))
|
||||
return mainEntryClickHouseClang(argc_, argv_);
|
||||
|
@ -1,9 +1,5 @@
|
||||
add_library (clickhouse-obfuscator-lib ${LINK_MODE} Obfuscator.cpp)
|
||||
target_link_libraries (clickhouse-obfuscator-lib PRIVATE dbms ${Boost_PROGRAM_OPTIONS_LIBRARY})
|
||||
set(CLICKHOUSE_OBFUSCATOR_SOURCES ${CMAKE_CURRENT_SOURCE_DIR}/Obfuscator.cpp)
|
||||
set(CLICKHOUSE_OBFUSCATOR_LINK PRIVATE dbms ${Boost_PROGRAM_OPTIONS_LIBRARY})
|
||||
#set(CLICKHOUSE_OBFUSCATOR_INCLUDE SYSTEM PRIVATE ...)
|
||||
|
||||
if (CLICKHOUSE_SPLIT_BINARY)
|
||||
add_executable (clickhouse-obfuscator clickhouse-obfuscator.cpp)
|
||||
set_target_properties(clickhouse-obfuscator PROPERTIES RUNTIME_OUTPUT_DIRECTORY ..)
|
||||
target_link_libraries (clickhouse-obfuscator PRIVATE clickhouse-obfuscator-lib)
|
||||
install (TARGETS clickhouse-obfuscator ${CLICKHOUSE_ALL_TARGETS} RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR} COMPONENT clickhouse)
|
||||
endif ()
|
||||
clickhouse_program_add(obfuscator)
|
||||
|
@ -577,7 +577,7 @@ public:
|
||||
{
|
||||
for (auto & elem : table)
|
||||
{
|
||||
Histogram & histogram = elem.second;
|
||||
Histogram & histogram = elem.getSecond();
|
||||
|
||||
if (histogram.buckets.size() < params.num_buckets_cutoff)
|
||||
{
|
||||
@ -591,7 +591,7 @@ public:
|
||||
{
|
||||
for (auto & elem : table)
|
||||
{
|
||||
Histogram & histogram = elem.second;
|
||||
Histogram & histogram = elem.getSecond();
|
||||
if (!histogram.total)
|
||||
continue;
|
||||
|
||||
@ -623,7 +623,7 @@ public:
|
||||
{
|
||||
for (auto & elem : table)
|
||||
{
|
||||
Histogram & histogram = elem.second;
|
||||
Histogram & histogram = elem.getSecond();
|
||||
if (!histogram.total)
|
||||
continue;
|
||||
|
||||
@ -639,7 +639,7 @@ public:
|
||||
{
|
||||
for (auto & elem : table)
|
||||
{
|
||||
Histogram & histogram = elem.second;
|
||||
Histogram & histogram = elem.getSecond();
|
||||
if (!histogram.total)
|
||||
continue;
|
||||
|
||||
@ -674,7 +674,7 @@ public:
|
||||
while (true)
|
||||
{
|
||||
it = table.find(hashContext(code_points.data() + code_points.size() - context_size, code_points.data() + code_points.size()));
|
||||
if (table.end() != it && it->second.total + it->second.count_end != 0)
|
||||
if (table.end() != it && it->getSecond().total + it->getSecond().count_end != 0)
|
||||
break;
|
||||
|
||||
if (context_size == 0)
|
||||
@ -708,7 +708,7 @@ public:
|
||||
if (num_bytes_after_desired_size > 0)
|
||||
end_probability_multiplier = std::pow(1.25, num_bytes_after_desired_size);
|
||||
|
||||
CodePoint code = it->second.sample(determinator, end_probability_multiplier);
|
||||
CodePoint code = it->getSecond().sample(determinator, end_probability_multiplier);
|
||||
|
||||
if (code == END)
|
||||
break;
|
||||
|
@ -1,37 +1,36 @@
|
||||
add_library (clickhouse-odbc-bridge-lib ${LINK_MODE}
|
||||
PingHandler.cpp
|
||||
MainHandler.cpp
|
||||
ColumnInfoHandler.cpp
|
||||
IdentifierQuoteHandler.cpp
|
||||
HandlerFactory.cpp
|
||||
ODBCBridge.cpp
|
||||
getIdentifierQuote.cpp
|
||||
validateODBCConnectionString.cpp
|
||||
set(CLICKHOUSE_ODBC_BRIDGE_SOURCES
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/ColumnInfoHandler.cpp
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/getIdentifierQuote.cpp
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/HandlerFactory.cpp
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/IdentifierQuoteHandler.cpp
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/MainHandler.cpp
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/ODBCBlockInputStream.cpp
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/odbc-bridge.cpp
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/ODBCBridge.cpp
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/PingHandler.cpp
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/validateODBCConnectionString.cpp
|
||||
)
|
||||
|
||||
target_link_libraries (clickhouse-odbc-bridge-lib PRIVATE daemon dbms clickhouse_common_io)
|
||||
target_include_directories (clickhouse-odbc-bridge-lib PUBLIC ${ClickHouse_SOURCE_DIR}/libs/libdaemon/include)
|
||||
set(CLICKHOUSE_ODBC_BRIDGE_LINK PRIVATE dbms clickhouse_common_io PUBLIC daemon)
|
||||
set(CLICKHOUSE_ODBC_BRIDGE_INCLUDE PUBLIC ${ClickHouse_SOURCE_DIR}/libs/libdaemon/include)
|
||||
|
||||
if (USE_POCO_SQLODBC)
|
||||
target_link_libraries (clickhouse-odbc-bridge-lib PRIVATE ${Poco_SQLODBC_LIBRARY})
|
||||
target_include_directories (clickhouse-odbc-bridge-lib SYSTEM PRIVATE ${ODBC_INCLUDE_DIRECTORIES} ${Poco_SQLODBC_INCLUDE_DIR})
|
||||
set(CLICKHOUSE_ODBC_BRIDGE_LINK ${CLICKHOUSE_ODBC_BRIDGE_LINK} PRIVATE ${Poco_SQLODBC_LIBRARY})
|
||||
set(CLICKHOUSE_ODBC_BRIDGE_INCLUDE ${CLICKHOUSE_ODBC_BRIDGE_INCLUDE} SYSTEM PRIVATE ${ODBC_INCLUDE_DIRECTORIES} ${Poco_SQLODBC_INCLUDE_DIR})
|
||||
endif ()
|
||||
if (Poco_SQL_FOUND)
|
||||
target_link_libraries (clickhouse-odbc-bridge-lib PRIVATE ${Poco_SQL_LIBRARY})
|
||||
set(CLICKHOUSE_ODBC_BRIDGE_LINK ${CLICKHOUSE_ODBC_BRIDGE_LINK} PRIVATE ${Poco_SQL_LIBRARY})
|
||||
endif ()
|
||||
|
||||
if (USE_POCO_DATAODBC)
|
||||
target_link_libraries (clickhouse-odbc-bridge-lib PRIVATE ${Poco_DataODBC_LIBRARY})
|
||||
target_include_directories (clickhouse-odbc-bridge-lib SYSTEM PRIVATE ${ODBC_INCLUDE_DIRECTORIES} ${Poco_DataODBC_INCLUDE_DIR})
|
||||
set(CLICKHOUSE_ODBC_BRIDGE_LINK ${CLICKHOUSE_ODBC_BRIDGE_LINK} PRIVATE ${Poco_DataODBC_LIBRARY})
|
||||
set(CLICKHOUSE_ODBC_BRIDGE_INCLUDE ${CLICKHOUSE_ODBC_BRIDGE_INCLUDE} SYSTEM PRIVATE ${ODBC_INCLUDE_DIRECTORIES} ${Poco_DataODBC_INCLUDE_DIR})
|
||||
endif()
|
||||
if (Poco_Data_FOUND)
|
||||
target_link_libraries (clickhouse-odbc-bridge-lib PRIVATE ${Poco_Data_LIBRARY})
|
||||
set(CLICKHOUSE_ODBC_BRIDGE_LINK ${CLICKHOUSE_ODBC_BRIDGE_LINK} PRIVATE ${Poco_Data_LIBRARY})
|
||||
endif ()
|
||||
|
||||
|
||||
if (ENABLE_TESTS)
|
||||
add_subdirectory (tests)
|
||||
endif ()
|
||||
clickhouse_program_add_library(odbc-bridge)
|
||||
|
||||
# clickhouse-odbc-bridge is always a separate binary.
|
||||
# Reason: it must not export symbols from SSL, mariadb-client, etc. to not break ABI compatibility with ODBC drivers.
|
||||
@ -39,5 +38,11 @@ endif ()
|
||||
SET(CMAKE_SHARED_LIBRARY_LINK_CXX_FLAGS "")
|
||||
|
||||
add_executable (clickhouse-odbc-bridge odbc-bridge.cpp)
|
||||
target_link_libraries (clickhouse-odbc-bridge PRIVATE clickhouse-odbc-bridge-lib)
|
||||
|
||||
clickhouse_program_link_split_binary(odbc-bridge)
|
||||
|
||||
install (TARGETS clickhouse-odbc-bridge RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR} COMPONENT clickhouse)
|
||||
|
||||
if (ENABLE_TESTS)
|
||||
add_subdirectory (tests)
|
||||
endif ()
|
||||
|
@ -4,7 +4,7 @@
|
||||
#include <memory>
|
||||
#include <DataStreams/copyData.h>
|
||||
#include <DataTypes/DataTypeFactory.h>
|
||||
#include <Dictionaries/ODBCBlockInputStream.h>
|
||||
#include "ODBCBlockInputStream.h"
|
||||
#include <Formats/BinaryRowInputStream.h>
|
||||
#include <Formats/FormatFactory.h>
|
||||
#include <IO/WriteBufferFromHTTPServerResponse.h>
|
||||
@ -37,7 +37,8 @@ ODBCHandler::PoolPtr ODBCHandler::getPool(const std::string & connection_str)
|
||||
std::lock_guard lock(mutex);
|
||||
if (!pool_map->count(connection_str))
|
||||
{
|
||||
pool_map->emplace(connection_str, createAndCheckResizePocoSessionPool([connection_str] {
|
||||
pool_map->emplace(connection_str, createAndCheckResizePocoSessionPool([connection_str]
|
||||
{
|
||||
return std::make_shared<Poco::Data::SessionPool>("ODBC", validateODBCConnectionString(connection_str));
|
||||
}));
|
||||
}
|
||||
|
@ -6,7 +6,7 @@
|
||||
#include <Poco/Data/RecordSet.h>
|
||||
#include <Poco/Data/Session.h>
|
||||
#include <Poco/Data/Statement.h>
|
||||
#include "ExternalResultDescription.h"
|
||||
#include <Core/ExternalResultDescription.h>
|
||||
|
||||
|
||||
namespace DB
|
@ -1,2 +1,3 @@
|
||||
add_executable (validate-odbc-connection-string validate-odbc-connection-string.cpp)
|
||||
target_link_libraries (validate-odbc-connection-string PRIVATE clickhouse-odbc-bridge-lib clickhouse_common_io)
|
||||
clickhouse_target_link_split_lib(validate-odbc-connection-string odbc-bridge)
|
||||
target_link_libraries (validate-odbc-connection-string PRIVATE clickhouse_common_io)
|
||||
|
@ -1,21 +1,18 @@
|
||||
add_library (clickhouse-performance-test-lib ${LINK_MODE}
|
||||
JSONString.cpp
|
||||
StopConditionsSet.cpp
|
||||
TestStopConditions.cpp
|
||||
TestStats.cpp
|
||||
ConfigPreprocessor.cpp
|
||||
PerformanceTest.cpp
|
||||
PerformanceTestInfo.cpp
|
||||
executeQuery.cpp
|
||||
applySubstitutions.cpp
|
||||
ReportBuilder.cpp
|
||||
PerformanceTestSuite.cpp
|
||||
)
|
||||
target_link_libraries (clickhouse-performance-test-lib PRIVATE dbms clickhouse_common_io clickhouse_common_config ${Boost_PROGRAM_OPTIONS_LIBRARY})
|
||||
target_include_directories (clickhouse-performance-test-lib SYSTEM PRIVATE ${PCG_RANDOM_INCLUDE_DIR})
|
||||
set(CLICKHOUSE_PERFORMANCE_TEST_SOURCES
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/JSONString.cpp
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/StopConditionsSet.cpp
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/TestStopConditions.cpp
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/TestStats.cpp
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/ConfigPreprocessor.cpp
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/PerformanceTest.cpp
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/PerformanceTestInfo.cpp
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/executeQuery.cpp
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/applySubstitutions.cpp
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/ReportBuilder.cpp
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/PerformanceTestSuite.cpp
|
||||
)
|
||||
|
||||
if (CLICKHOUSE_SPLIT_BINARY)
|
||||
add_executable (clickhouse-performance-test clickhouse-performance-test.cpp)
|
||||
target_link_libraries (clickhouse-performance-test PRIVATE clickhouse-performance-test-lib)
|
||||
install (TARGETS clickhouse-performance-test ${CLICKHOUSE_ALL_TARGETS} RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR} COMPONENT clickhouse)
|
||||
endif ()
|
||||
set(CLICKHOUSE_PERFORMANCE_TEST_LINK PRIVATE dbms clickhouse_common_io clickhouse_common_config ${Boost_PROGRAM_OPTIONS_LIBRARY})
|
||||
set(CLICKHOUSE_PERFORMANCE_TEST_INCLUDE SYSTEM PRIVATE ${PCG_RANDOM_INCLUDE_DIR})
|
||||
|
||||
clickhouse_program_add(performance-test)
|
||||
|
@ -43,8 +43,10 @@ namespace fs = boost::filesystem;
|
||||
|
||||
PerformanceTestInfo::PerformanceTestInfo(
|
||||
XMLConfigurationPtr config,
|
||||
const std::string & profiles_file_)
|
||||
const std::string & profiles_file_,
|
||||
const Settings & global_settings_)
|
||||
: profiles_file(profiles_file_)
|
||||
, settings(global_settings_)
|
||||
{
|
||||
test_name = config->getString("name");
|
||||
path = config->getString("path");
|
||||
|
@ -26,7 +26,7 @@ using StringToVector = std::map<std::string, Strings>;
|
||||
class PerformanceTestInfo
|
||||
{
|
||||
public:
|
||||
PerformanceTestInfo(XMLConfigurationPtr config, const std::string & profiles_file_);
|
||||
PerformanceTestInfo(XMLConfigurationPtr config, const std::string & profiles_file_, const Settings & global_settings_);
|
||||
|
||||
std::string test_name;
|
||||
std::string path;
|
||||
@ -34,12 +34,12 @@ public:
|
||||
|
||||
Strings queries;
|
||||
|
||||
std::string profiles_file;
|
||||
Settings settings;
|
||||
ExecutionType exec_type;
|
||||
StringToVector substitutions;
|
||||
size_t times_to_run;
|
||||
|
||||
std::string profiles_file;
|
||||
std::vector<TestStopConditions> stop_conditions_by_run;
|
||||
|
||||
Strings create_queries;
|
||||
|
@ -91,16 +91,6 @@ public:
|
||||
throw Exception("No tests were specified", ErrorCodes::BAD_ARGUMENTS);
|
||||
}
|
||||
|
||||
/// This functionality seems strange.
|
||||
//void initialize(Poco::Util::Application & self [[maybe_unused]])
|
||||
//{
|
||||
// std::string home_path;
|
||||
// const char * home_path_cstr = getenv("HOME");
|
||||
// if (home_path_cstr)
|
||||
// home_path = home_path_cstr;
|
||||
// configReadClient(Poco::Util::Application::instance().config(), home_path);
|
||||
//}
|
||||
|
||||
int run()
|
||||
{
|
||||
std::string name;
|
||||
@ -120,6 +110,10 @@ public:
|
||||
|
||||
return 0;
|
||||
}
|
||||
void setContextSetting(const String & name, const std::string & value)
|
||||
{
|
||||
global_context.setSetting(name, value);
|
||||
}
|
||||
|
||||
private:
|
||||
Connection connection;
|
||||
@ -201,7 +195,7 @@ private:
|
||||
|
||||
std::pair<std::string, bool> runTest(XMLConfigurationPtr & test_config)
|
||||
{
|
||||
PerformanceTestInfo info(test_config, profiles_file);
|
||||
PerformanceTestInfo info(test_config, profiles_file, global_context.getSettingsRef());
|
||||
LOG_INFO(log, "Config for test '" << info.test_name << "' parsed");
|
||||
PerformanceTest current(test_config, connection, interrupt_listener, info, global_context, query_indexes[info.path]);
|
||||
|
||||
@ -330,6 +324,7 @@ try
|
||||
using Strings = DB::Strings;
|
||||
|
||||
|
||||
#define DECLARE_SETTING(TYPE, NAME, DEFAULT, DESCRIPTION) (#NAME, po::value<std::string>(), DESCRIPTION)
|
||||
po::options_description desc("Allowed options");
|
||||
desc.add_options()
|
||||
("help", "produce help message")
|
||||
@ -350,7 +345,10 @@ try
|
||||
("skip-names-regexp", value<Strings>()->multitoken(), "Do not run tests with names matching regexp")
|
||||
("input-files", value<Strings>()->multitoken(), "Input .xml files")
|
||||
("query-indexes", value<std::vector<size_t>>()->multitoken(), "Input query indexes")
|
||||
("recursive,r", "Recurse in directories to find all xml's");
|
||||
("recursive,r", "Recurse in directories to find all xml's")
|
||||
APPLY_FOR_SETTINGS(DECLARE_SETTING);
|
||||
#undef DECLARE_SETTING
|
||||
|
||||
|
||||
po::options_description cmdline_options;
|
||||
cmdline_options.add(desc);
|
||||
@ -408,6 +406,15 @@ try
|
||||
std::move(skip_names_regexp),
|
||||
queries_with_indexes,
|
||||
timeouts);
|
||||
/// Extract settings from the options.
|
||||
#define EXTRACT_SETTING(TYPE, NAME, DEFAULT, DESCRIPTION) \
|
||||
if (options.count(#NAME)) \
|
||||
{ \
|
||||
performance_test_suite.setContextSetting(#NAME, options[#NAME].as<std::string>()); \
|
||||
}
|
||||
APPLY_FOR_SETTINGS(EXTRACT_SETTING)
|
||||
#undef EXTRACT_SETTING
|
||||
|
||||
return performance_test_suite.run();
|
||||
}
|
||||
catch (...)
|
||||
|
@ -1,27 +1,22 @@
|
||||
add_library (clickhouse-server-lib ${LINK_MODE}
|
||||
HTTPHandler.cpp
|
||||
InterserverIOHTTPHandler.cpp
|
||||
MetricsTransmitter.cpp
|
||||
NotFoundHandler.cpp
|
||||
PingRequestHandler.cpp
|
||||
ReplicasStatusHandler.cpp
|
||||
RootRequestHandler.cpp
|
||||
Server.cpp
|
||||
TCPHandler.cpp
|
||||
)
|
||||
set(CLICKHOUSE_SERVER_SOURCES
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/HTTPHandler.cpp
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/InterserverIOHTTPHandler.cpp
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/MetricsTransmitter.cpp
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/NotFoundHandler.cpp
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/PingRequestHandler.cpp
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/ReplicasStatusHandler.cpp
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/RootRequestHandler.cpp
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/Server.cpp
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/TCPHandler.cpp
|
||||
)
|
||||
|
||||
target_link_libraries (clickhouse-server-lib PRIVATE clickhouse_common_io daemon clickhouse_storages_system clickhouse_functions clickhouse_aggregate_functions clickhouse_table_functions ${Poco_Net_LIBRARY})
|
||||
set(CLICKHOUSE_SERVER_LINK PRIVATE clickhouse_dictionaries clickhouse_common_io PUBLIC daemon PRIVATE clickhouse_storages_system clickhouse_functions clickhouse_aggregate_functions clickhouse_table_functions ${Poco_Net_LIBRARY})
|
||||
if (USE_POCO_NETSSL)
|
||||
target_link_libraries (clickhouse-server-lib PRIVATE ${Poco_NetSSL_LIBRARY} ${Poco_Crypto_LIBRARY})
|
||||
set(CLICKHOUSE_SERVER_LINK ${CLICKHOUSE_SERVER_LINK} PRIVATE ${Poco_NetSSL_LIBRARY} ${Poco_Crypto_LIBRARY})
|
||||
endif ()
|
||||
set(CLICKHOUSE_SERVER_INCLUDE PUBLIC ${ClickHouse_SOURCE_DIR}/libs/libdaemon/include)
|
||||
|
||||
target_include_directories (clickhouse-server-lib PUBLIC ${ClickHouse_SOURCE_DIR}/libs/libdaemon/include)
|
||||
|
||||
if (CLICKHOUSE_SPLIT_BINARY)
|
||||
add_executable (clickhouse-server clickhouse-server.cpp)
|
||||
target_link_libraries (clickhouse-server PRIVATE clickhouse-server-lib)
|
||||
install (TARGETS clickhouse-server ${CLICKHOUSE_ALL_TARGETS} RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR} COMPONENT clickhouse)
|
||||
endif ()
|
||||
clickhouse_program_add(server)
|
||||
|
||||
if (GLIBC_COMPATIBILITY)
|
||||
set (GLIBC_MAX_REQUIRED 2.4 CACHE INTERNAL "")
|
||||
@ -31,7 +26,4 @@ if (GLIBC_COMPATIBILITY)
|
||||
#add_test(NAME GLIBC_required_version COMMAND bash -c "readelf -s ${CMAKE_CURRENT_BINARY_DIR}/../clickhouse-server | grep '@GLIBC' | grep -oP 'GLIBC_[\\d\\.]+' | sort | uniq | sort --version-sort --reverse | perl -lnE 'warn($_), exit 1 if $_ gt q{GLIBC_${GLIBC_MAX_REQUIRED}}'") # old
|
||||
endif ()
|
||||
|
||||
install (
|
||||
FILES config.xml users.xml
|
||||
DESTINATION ${CLICKHOUSE_ETC_DIR}/clickhouse-server
|
||||
COMPONENT clickhouse)
|
||||
install(FILES config.xml users.xml DESTINATION ${CLICKHOUSE_ETC_DIR}/clickhouse-server COMPONENT clickhouse)
|
||||
|
@ -1,25 +1,26 @@
|
||||
#include "HTTPHandler.h"
|
||||
|
||||
#include <chrono>
|
||||
#include <iomanip>
|
||||
|
||||
#include <Poco/File.h>
|
||||
#include <Poco/Net/HTTPBasicCredentials.h>
|
||||
#include <Poco/Net/HTTPServerRequest.h>
|
||||
#include <Poco/Net/HTTPServerRequestImpl.h>
|
||||
#include <Poco/Net/HTTPServerResponse.h>
|
||||
#include <Poco/Net/NetException.h>
|
||||
|
||||
#include <ext/scope_guard.h>
|
||||
|
||||
#include <Core/ExternalTable.h>
|
||||
#include <Common/StringUtils/StringUtils.h>
|
||||
#include <Common/escapeForFileName.h>
|
||||
#include <Common/getFQDNOrHostName.h>
|
||||
#include <Common/CurrentThread.h>
|
||||
#include <Common/setThreadName.h>
|
||||
#include <Common/config.h>
|
||||
#include <Compression/CompressedReadBuffer.h>
|
||||
#include <Compression/CompressedWriteBuffer.h>
|
||||
#include <IO/ReadBufferFromIStream.h>
|
||||
#include <IO/ZlibInflatingReadBuffer.h>
|
||||
#include <IO/BrotliReadBuffer.h>
|
||||
#include <IO/ReadBufferFromString.h>
|
||||
#include <IO/WriteBufferFromString.h>
|
||||
#include <IO/WriteBufferFromHTTPServerResponse.h>
|
||||
@ -30,17 +31,12 @@
|
||||
#include <IO/CascadeWriteBuffer.h>
|
||||
#include <IO/MemoryReadWriteBuffer.h>
|
||||
#include <IO/WriteBufferFromTemporaryFile.h>
|
||||
|
||||
#include <DataStreams/IBlockInputStream.h>
|
||||
|
||||
#include <Interpreters/executeQuery.h>
|
||||
#include <Interpreters/Quota.h>
|
||||
#include <Common/typeid_cast.h>
|
||||
|
||||
#include <Poco/Net/HTTPStream.h>
|
||||
|
||||
#include "HTTPHandler.h"
|
||||
|
||||
namespace DB
|
||||
{
|
||||
|
||||
@ -396,19 +392,25 @@ void HTTPHandler::processQuery(
|
||||
String http_request_compression_method_str = request.get("Content-Encoding", "");
|
||||
if (!http_request_compression_method_str.empty())
|
||||
{
|
||||
ZlibCompressionMethod method;
|
||||
if (http_request_compression_method_str == "gzip")
|
||||
{
|
||||
method = ZlibCompressionMethod::Gzip;
|
||||
in_post = std::make_unique<ZlibInflatingReadBuffer>(*in_post_raw, ZlibCompressionMethod::Gzip);
|
||||
}
|
||||
else if (http_request_compression_method_str == "deflate")
|
||||
{
|
||||
method = ZlibCompressionMethod::Zlib;
|
||||
in_post = std::make_unique<ZlibInflatingReadBuffer>(*in_post_raw, ZlibCompressionMethod::Zlib);
|
||||
}
|
||||
#if USE_BROTLI
|
||||
else if (http_request_compression_method_str == "br")
|
||||
{
|
||||
in_post = std::make_unique<BrotliReadBuffer>(*in_post_raw);
|
||||
}
|
||||
#endif
|
||||
else
|
||||
{
|
||||
throw Exception("Unknown Content-Encoding of HTTP request: " + http_request_compression_method_str,
|
||||
ErrorCodes::UNKNOWN_COMPRESSION_METHOD);
|
||||
in_post = std::make_unique<ZlibInflatingReadBuffer>(*in_post_raw, method);
|
||||
ErrorCodes::UNKNOWN_COMPRESSION_METHOD);
|
||||
}
|
||||
}
|
||||
else
|
||||
in_post = std::move(in_post_raw);
|
||||
@ -600,6 +602,8 @@ void HTTPHandler::processQuery(
|
||||
});
|
||||
}
|
||||
|
||||
customizeContext(context);
|
||||
|
||||
executeQuery(*in, *used_output.out_maybe_delayed_and_compressed, /* allow_into_outfile = */ false, context,
|
||||
[&response] (const String & content_type) { response.setContentType(content_type); },
|
||||
[&response] (const String & current_query_id) { response.add("Query-Id", current_query_id); });
|
||||
|
@ -28,6 +28,9 @@ public:
|
||||
|
||||
void handleRequest(Poco::Net::HTTPServerRequest & request, Poco::Net::HTTPServerResponse & response) override;
|
||||
|
||||
/// This method is called right before the query execution.
|
||||
virtual void customizeContext(DB::Context& /* context */) {}
|
||||
|
||||
private:
|
||||
struct Output
|
||||
{
|
||||
|
@ -260,6 +260,15 @@ int Server::main(const std::vector<std::string> & /*args*/)
|
||||
StatusFile status{path + "status"};
|
||||
|
||||
SCOPE_EXIT({
|
||||
/** Ask to cancel background jobs all table engines,
|
||||
* and also query_log.
|
||||
* It is important to do early, not in destructor of Context, because
|
||||
* table engines could use Context on destroy.
|
||||
*/
|
||||
LOG_INFO(log, "Shutting down storages.");
|
||||
global_context->shutdown();
|
||||
LOG_DEBUG(log, "Shutted down storages.");
|
||||
|
||||
/** Explicitly destroy Context. It is more convenient than in destructor of Server, because logger is still available.
|
||||
* At this moment, no one could own shared part of Context.
|
||||
*/
|
||||
@ -478,7 +487,7 @@ int Server::main(const std::vector<std::string> & /*args*/)
|
||||
global_context->setFormatSchemaPath(format_schema_path.path());
|
||||
format_schema_path.createDirectories();
|
||||
|
||||
LOG_INFO(log, "Loading metadata.");
|
||||
LOG_INFO(log, "Loading metadata from " + path);
|
||||
try
|
||||
{
|
||||
loadMetadataSystem(*global_context);
|
||||
@ -498,22 +507,11 @@ int Server::main(const std::vector<std::string> & /*args*/)
|
||||
|
||||
global_context->setCurrentDatabase(default_database);
|
||||
|
||||
SCOPE_EXIT({
|
||||
/** Ask to cancel background jobs all table engines,
|
||||
* and also query_log.
|
||||
* It is important to do early, not in destructor of Context, because
|
||||
* table engines could use Context on destroy.
|
||||
*/
|
||||
LOG_INFO(log, "Shutting down storages.");
|
||||
global_context->shutdown();
|
||||
LOG_DEBUG(log, "Shutted down storages.");
|
||||
});
|
||||
|
||||
if (has_zookeeper && config().has("distributed_ddl"))
|
||||
{
|
||||
/// DDL worker should be started after all tables were loaded
|
||||
String ddl_zookeeper_path = config().getString("distributed_ddl.path", "/clickhouse/task_queue/ddl/");
|
||||
global_context->setDDLWorker(std::make_shared<DDLWorker>(ddl_zookeeper_path, *global_context, &config(), "distributed_ddl"));
|
||||
global_context->setDDLWorker(std::make_unique<DDLWorker>(ddl_zookeeper_path, *global_context, &config(), "distributed_ddl"));
|
||||
}
|
||||
|
||||
std::unique_ptr<DNSCacheUpdater> dns_cache_updater;
|
||||
@ -592,7 +590,7 @@ int Server::main(const std::vector<std::string> & /*args*/)
|
||||
auto socket_bind_listen = [&](auto & socket, const std::string & host, UInt16 port, bool secure = 0)
|
||||
{
|
||||
auto address = make_socket_address(host, port);
|
||||
#if !POCO_CLICKHOUSE_PATCH || POCO_VERSION <= 0x02000000 // TODO: fill correct version
|
||||
#if !defined(POCO_CLICKHOUSE_PATCH) || POCO_VERSION <= 0x02000000 // TODO: fill correct version
|
||||
if (secure)
|
||||
/// Bug in old poco, listen() after bind() with reusePort param will fail because have no implementation in SecureServerSocketImpl
|
||||
/// https://github.com/pocoproject/poco/pull/2257
|
||||
|
@ -122,7 +122,7 @@ void TCPHandler::runImpl()
|
||||
|
||||
while (1)
|
||||
{
|
||||
/// Restore context of request.
|
||||
/// Set context of request.
|
||||
query_context = connection_context;
|
||||
|
||||
/// We are waiting for a packet from the client. Thus, every `POLL_INTERVAL` seconds check whether we need to shut down.
|
||||
@ -158,22 +158,22 @@ void TCPHandler::runImpl()
|
||||
if (!receivePacket())
|
||||
continue;
|
||||
|
||||
query_scope.emplace(query_context);
|
||||
query_scope.emplace(*query_context);
|
||||
|
||||
send_exception_with_stack_trace = query_context.getSettingsRef().calculate_text_stack_trace;
|
||||
send_exception_with_stack_trace = query_context->getSettingsRef().calculate_text_stack_trace;
|
||||
|
||||
/// Should we send internal logs to client?
|
||||
if (client_revision >= DBMS_MIN_REVISION_WITH_SERVER_LOGS
|
||||
&& query_context.getSettingsRef().send_logs_level.value != "none")
|
||||
&& query_context->getSettingsRef().send_logs_level.value != LogsLevel::none)
|
||||
{
|
||||
state.logs_queue = std::make_shared<InternalTextLogsQueue>();
|
||||
state.logs_queue->max_priority = Poco::Logger::parseLevel(query_context.getSettingsRef().send_logs_level.value);
|
||||
state.logs_queue->max_priority = Poco::Logger::parseLevel(query_context->getSettingsRef().send_logs_level.toString());
|
||||
CurrentThread::attachInternalTextLogsQueue(state.logs_queue);
|
||||
}
|
||||
|
||||
query_context.setExternalTablesInitializer([&global_settings, this] (Context & context)
|
||||
query_context->setExternalTablesInitializer([&global_settings, this] (Context & context)
|
||||
{
|
||||
if (&context != &query_context)
|
||||
if (&context != &*query_context)
|
||||
throw Exception("Unexpected context in external tables initializer", ErrorCodes::LOGICAL_ERROR);
|
||||
|
||||
/// Get blocks of temporary tables
|
||||
@ -185,8 +185,11 @@ void TCPHandler::runImpl()
|
||||
state.maybe_compressed_in.reset(); /// For more accurate accounting by MemoryTracker.
|
||||
});
|
||||
|
||||
customizeContext(*query_context);
|
||||
|
||||
bool may_have_embedded_data = client_revision >= DBMS_MIN_REVISION_WITH_CLIENT_SUPPORT_EMBEDDED_DATA;
|
||||
/// Processing Query
|
||||
state.io = executeQuery(state.query, query_context, false, state.stage);
|
||||
state.io = executeQuery(state.query, *query_context, false, state.stage, may_have_embedded_data);
|
||||
|
||||
if (state.io.out)
|
||||
state.need_receive_data_for_insert = true;
|
||||
@ -292,6 +295,9 @@ void TCPHandler::runImpl()
|
||||
LOG_INFO(log, std::fixed << std::setprecision(3)
|
||||
<< "Processed in " << watch.elapsedSeconds() << " sec.");
|
||||
|
||||
/// It is important to destroy query context here. We do not want it to live arbitrarily longer than the query.
|
||||
query_context.reset();
|
||||
|
||||
if (network_error)
|
||||
break;
|
||||
}
|
||||
@ -300,7 +306,7 @@ void TCPHandler::runImpl()
|
||||
|
||||
void TCPHandler::readData(const Settings & global_settings)
|
||||
{
|
||||
const auto receive_timeout = query_context.getSettingsRef().receive_timeout.value;
|
||||
const auto receive_timeout = query_context->getSettingsRef().receive_timeout.value;
|
||||
|
||||
/// Poll interval should not be greater than receive_timeout
|
||||
const size_t default_poll_interval = global_settings.poll_interval.value * 1000000;
|
||||
@ -363,8 +369,8 @@ void TCPHandler::processInsertQuery(const Settings & global_settings)
|
||||
/// Send ColumnsDescription for insertion table
|
||||
if (client_revision >= DBMS_MIN_REVISION_WITH_COLUMN_DEFAULTS_METADATA)
|
||||
{
|
||||
const auto & db_and_table = query_context.getInsertionTable();
|
||||
if (auto * columns = ColumnsDescription::loadFromContext(query_context, db_and_table.first, db_and_table.second))
|
||||
const auto & db_and_table = query_context->getInsertionTable();
|
||||
if (auto * columns = ColumnsDescription::loadFromContext(*query_context, db_and_table.first, db_and_table.second))
|
||||
sendTableColumns(*columns);
|
||||
}
|
||||
|
||||
@ -407,7 +413,7 @@ void TCPHandler::processOrdinaryQuery()
|
||||
}
|
||||
else
|
||||
{
|
||||
if (after_send_progress.elapsed() / 1000 >= query_context.getSettingsRef().interactive_delay)
|
||||
if (after_send_progress.elapsed() / 1000 >= query_context->getSettingsRef().interactive_delay)
|
||||
{
|
||||
/// Some time passed and there is a progress.
|
||||
after_send_progress.restart();
|
||||
@ -416,7 +422,7 @@ void TCPHandler::processOrdinaryQuery()
|
||||
|
||||
sendLogs();
|
||||
|
||||
if (async_in.poll(query_context.getSettingsRef().interactive_delay / 1000))
|
||||
if (async_in.poll(query_context->getSettingsRef().interactive_delay / 1000))
|
||||
{
|
||||
/// There is the following result block.
|
||||
block = async_in.read();
|
||||
@ -644,11 +650,11 @@ void TCPHandler::receiveQuery()
|
||||
state.is_empty = false;
|
||||
readStringBinary(state.query_id, *in);
|
||||
|
||||
query_context.setCurrentQueryId(state.query_id);
|
||||
query_context->setCurrentQueryId(state.query_id);
|
||||
|
||||
/// Client info
|
||||
{
|
||||
ClientInfo & client_info = query_context.getClientInfo();
|
||||
ClientInfo & client_info = query_context->getClientInfo();
|
||||
if (client_revision >= DBMS_MIN_REVISION_WITH_CLIENT_INFO)
|
||||
client_info.read(*in, client_revision);
|
||||
|
||||
@ -676,7 +682,7 @@ void TCPHandler::receiveQuery()
|
||||
}
|
||||
|
||||
/// Per query settings.
|
||||
Settings & settings = query_context.getSettingsRef();
|
||||
Settings & settings = query_context->getSettingsRef();
|
||||
settings.deserialize(*in);
|
||||
|
||||
/// Sync timeouts on client and server during current query to avoid dangling queries on server
|
||||
@ -714,16 +720,16 @@ bool TCPHandler::receiveData()
|
||||
{
|
||||
StoragePtr storage;
|
||||
/// If such a table does not exist, create it.
|
||||
if (!(storage = query_context.tryGetExternalTable(external_table_name)))
|
||||
if (!(storage = query_context->tryGetExternalTable(external_table_name)))
|
||||
{
|
||||
NamesAndTypesList columns = block.getNamesAndTypesList();
|
||||
storage = StorageMemory::create(external_table_name,
|
||||
ColumnsDescription{columns, NamesAndTypesList{}, NamesAndTypesList{}, ColumnDefaults{}, ColumnComments{}, ColumnCodecs{}});
|
||||
storage->startup();
|
||||
query_context.addExternalTable(external_table_name, storage);
|
||||
query_context->addExternalTable(external_table_name, storage);
|
||||
}
|
||||
/// The data will be written directly to the table.
|
||||
state.io.out = storage->write(ASTPtr(), query_context.getSettingsRef());
|
||||
state.io.out = storage->write(ASTPtr(), *query_context);
|
||||
}
|
||||
if (block)
|
||||
state.io.out->write(block);
|
||||
@ -762,10 +768,10 @@ void TCPHandler::initBlockOutput(const Block & block)
|
||||
{
|
||||
if (!state.maybe_compressed_out)
|
||||
{
|
||||
std::string method = query_context.getSettingsRef().network_compression_method;
|
||||
std::string method = query_context->getSettingsRef().network_compression_method;
|
||||
std::optional<int> level;
|
||||
if (method == "ZSTD")
|
||||
level = query_context.getSettingsRef().network_zstd_compression_level;
|
||||
level = query_context->getSettingsRef().network_zstd_compression_level;
|
||||
|
||||
if (state.compression == Protocol::Compression::Enable)
|
||||
state.maybe_compressed_out = std::make_shared<CompressedWriteBuffer>(
|
||||
@ -801,7 +807,7 @@ bool TCPHandler::isQueryCancelled()
|
||||
if (state.is_cancelled || state.sent_all_data)
|
||||
return true;
|
||||
|
||||
if (after_check_cancelled.elapsed() / 1000 < query_context.getSettingsRef().interactive_delay)
|
||||
if (after_check_cancelled.elapsed() / 1000 < query_context->getSettingsRef().interactive_delay)
|
||||
return false;
|
||||
|
||||
after_check_cancelled.restart();
|
||||
|
@ -95,6 +95,9 @@ public:
|
||||
|
||||
void run();
|
||||
|
||||
/// This method is called right before the query execution.
|
||||
virtual void customizeContext(DB::Context & /*context*/) {}
|
||||
|
||||
private:
|
||||
IServer & server;
|
||||
Poco::Logger * log;
|
||||
@ -106,7 +109,7 @@ private:
|
||||
UInt64 client_revision = 0;
|
||||
|
||||
Context connection_context;
|
||||
Context query_context;
|
||||
std::optional<Context> query_context;
|
||||
|
||||
/// Streams for reading/writing from/to client connection socket.
|
||||
std::shared_ptr<ReadBuffer> in;
|
||||
|
@ -1,4 +1,7 @@
|
||||
<?xml version="1.0"?>
|
||||
<!--
|
||||
NOTE: User and query level settings are set up in "users.xml" file.
|
||||
-->
|
||||
<yandex>
|
||||
<logger>
|
||||
<!-- Possible levels: https://github.com/pocoproject/poco/blob/develop/Foundation/include/Poco/Logger.h#L105 -->
|
||||
@ -291,12 +294,20 @@
|
||||
<flush_interval_milliseconds>7500</flush_interval_milliseconds>
|
||||
</query_log>
|
||||
|
||||
<!-- Query thread log. Has information about all threads participated in query execution.
|
||||
Used only for queries with setting log_query_threads = 1. -->
|
||||
<query_thread_log>
|
||||
<database>system</database>
|
||||
<table>query_thread_log</table>
|
||||
<partition_by>toYYYYMM(event_date)</partition_by>
|
||||
<flush_interval_milliseconds>7500</flush_interval_milliseconds>
|
||||
</query_thread_log>
|
||||
|
||||
<!-- Uncomment if use part_log
|
||||
<!-- Uncomment if use part log.
|
||||
Part log contains information about all actions with parts in MergeTree tables (creation, deletion, merges, downloads).
|
||||
<part_log>
|
||||
<database>system</database>
|
||||
<table>part_log</table>
|
||||
|
||||
<flush_interval_milliseconds>7500</flush_interval_milliseconds>
|
||||
</part_log>
|
||||
-->
|
||||
|
@ -4,7 +4,6 @@
|
||||
<default>
|
||||
<networks replace="replace">
|
||||
<ip>::1</ip>
|
||||
<ip>0.0.0.0</ip>
|
||||
<ip>127.0.0.1</ip>
|
||||
</networks>
|
||||
</default>
|
||||
|
@ -31,12 +31,13 @@ template <typename Data>
|
||||
class AggregateFunctionArgMinMax final : public IAggregateFunctionDataHelper<Data, AggregateFunctionArgMinMax<Data>>
|
||||
{
|
||||
private:
|
||||
DataTypePtr type_res;
|
||||
DataTypePtr type_val;
|
||||
const DataTypePtr & type_res;
|
||||
const DataTypePtr & type_val;
|
||||
|
||||
public:
|
||||
AggregateFunctionArgMinMax(const DataTypePtr & type_res, const DataTypePtr & type_val)
|
||||
: type_res(type_res), type_val(type_val)
|
||||
: IAggregateFunctionDataHelper<Data, AggregateFunctionArgMinMax<Data>>({type_res, type_val}, {}),
|
||||
type_res(this->argument_types[0]), type_val(this->argument_types[1])
|
||||
{
|
||||
if (!type_val->isComparable())
|
||||
throw Exception("Illegal type " + type_val->getName() + " of second argument of aggregate function " + getName()
|
||||
|
@ -28,7 +28,8 @@ private:
|
||||
|
||||
public:
|
||||
AggregateFunctionArray(AggregateFunctionPtr nested_, const DataTypes & arguments)
|
||||
: nested_func(nested_), num_arguments(arguments.size())
|
||||
: IAggregateFunctionHelper<AggregateFunctionArray>(arguments, {})
|
||||
, nested_func(nested_), num_arguments(arguments.size())
|
||||
{
|
||||
for (const auto & type : arguments)
|
||||
if (!isArray(type))
|
||||
|
@ -27,9 +27,9 @@ AggregateFunctionPtr createAggregateFunctionAvg(const std::string & name, const
|
||||
AggregateFunctionPtr res;
|
||||
DataTypePtr data_type = argument_types[0];
|
||||
if (isDecimal(data_type))
|
||||
res.reset(createWithDecimalType<AggregateFuncAvg>(*data_type, *data_type));
|
||||
res.reset(createWithDecimalType<AggregateFuncAvg>(*data_type, *data_type, argument_types));
|
||||
else
|
||||
res.reset(createWithNumericType<AggregateFuncAvg>(*data_type));
|
||||
res.reset(createWithNumericType<AggregateFuncAvg>(*data_type, argument_types));
|
||||
|
||||
if (!res)
|
||||
throw Exception("Illegal type " + argument_types[0]->getName() + " of argument for aggregate function " + name,
|
||||
|
@ -49,13 +49,15 @@ public:
|
||||
using ColVecResult = std::conditional_t<IsDecimalNumber<T>, ColumnDecimal<Decimal128>, ColumnVector<Float64>>;
|
||||
|
||||
/// ctor for native types
|
||||
AggregateFunctionAvg()
|
||||
: scale(0)
|
||||
AggregateFunctionAvg(const DataTypes & argument_types_)
|
||||
: IAggregateFunctionDataHelper<Data, AggregateFunctionAvg<T, Data>>(argument_types_, {})
|
||||
, scale(0)
|
||||
{}
|
||||
|
||||
/// ctor for Decimals
|
||||
AggregateFunctionAvg(const IDataType & data_type)
|
||||
: scale(getDecimalScale(data_type))
|
||||
AggregateFunctionAvg(const IDataType & data_type, const DataTypes & argument_types_)
|
||||
: IAggregateFunctionDataHelper<Data, AggregateFunctionAvg<T, Data>>(argument_types_, {})
|
||||
, scale(getDecimalScale(data_type))
|
||||
{}
|
||||
|
||||
String getName() const override { return "avg"; }
|
||||
|
@ -21,7 +21,7 @@ AggregateFunctionPtr createAggregateFunctionBitwise(const std::string & name, co
|
||||
+ " is illegal, because it cannot be used in bitwise operations",
|
||||
ErrorCodes::ILLEGAL_TYPE_OF_ARGUMENT);
|
||||
|
||||
AggregateFunctionPtr res(createWithUnsignedIntegerType<AggregateFunctionBitwise, Data>(*argument_types[0]));
|
||||
AggregateFunctionPtr res(createWithUnsignedIntegerType<AggregateFunctionBitwise, Data>(*argument_types[0], argument_types[0]));
|
||||
|
||||
if (!res)
|
||||
throw Exception("Illegal type " + argument_types[0]->getName() + " of argument for aggregate function " + name, ErrorCodes::ILLEGAL_TYPE_OF_ARGUMENT);
|
||||
|
@ -43,6 +43,9 @@ template <typename T, typename Data>
|
||||
class AggregateFunctionBitwise final : public IAggregateFunctionDataHelper<Data, AggregateFunctionBitwise<T, Data>>
|
||||
{
|
||||
public:
|
||||
AggregateFunctionBitwise(const DataTypePtr & type)
|
||||
: IAggregateFunctionDataHelper<Data, AggregateFunctionBitwise<T, Data>>({type}, {}) {}
|
||||
|
||||
String getName() const override { return Data::name(); }
|
||||
|
||||
DataTypePtr getReturnType() const override
|
||||
|
@ -111,6 +111,7 @@ public:
|
||||
}
|
||||
|
||||
AggregateFunctionBoundingRatio(const DataTypes & arguments)
|
||||
: IAggregateFunctionDataHelper<AggregateFunctionBoundingRatioData, AggregateFunctionBoundingRatio>(arguments, {})
|
||||
{
|
||||
const auto x_arg = arguments.at(0).get();
|
||||
const auto y_arg = arguments.at(0).get();
|
||||
|
@ -9,12 +9,12 @@ namespace DB
|
||||
namespace
|
||||
{
|
||||
|
||||
AggregateFunctionPtr createAggregateFunctionCount(const std::string & name, const DataTypes & /*argument_types*/, const Array & parameters)
|
||||
AggregateFunctionPtr createAggregateFunctionCount(const std::string & name, const DataTypes & argument_types, const Array & parameters)
|
||||
{
|
||||
assertNoParameters(name, parameters);
|
||||
|
||||
/// 'count' accept any number of arguments and (in this case of non-Nullable types) simply ignore them.
|
||||
return std::make_shared<AggregateFunctionCount>();
|
||||
return std::make_shared<AggregateFunctionCount>(argument_types);
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -28,6 +28,8 @@ namespace ErrorCodes
|
||||
class AggregateFunctionCount final : public IAggregateFunctionDataHelper<AggregateFunctionCountData, AggregateFunctionCount>
|
||||
{
|
||||
public:
|
||||
AggregateFunctionCount(const DataTypes & argument_types_) : IAggregateFunctionDataHelper(argument_types_, {}) {}
|
||||
|
||||
String getName() const override { return "count"; }
|
||||
|
||||
DataTypePtr getReturnType() const override
|
||||
@ -74,7 +76,8 @@ public:
|
||||
class AggregateFunctionCountNotNullUnary final : public IAggregateFunctionDataHelper<AggregateFunctionCountData, AggregateFunctionCountNotNullUnary>
|
||||
{
|
||||
public:
|
||||
AggregateFunctionCountNotNullUnary(const DataTypePtr & argument)
|
||||
AggregateFunctionCountNotNullUnary(const DataTypePtr & argument, const Array & params)
|
||||
: IAggregateFunctionDataHelper<AggregateFunctionCountData, AggregateFunctionCountNotNullUnary>({argument}, params)
|
||||
{
|
||||
if (!argument->isNullable())
|
||||
throw Exception("Logical error: not Nullable data type passed to AggregateFunctionCountNotNullUnary", ErrorCodes::LOGICAL_ERROR);
|
||||
@ -120,7 +123,8 @@ public:
|
||||
class AggregateFunctionCountNotNullVariadic final : public IAggregateFunctionDataHelper<AggregateFunctionCountData, AggregateFunctionCountNotNullVariadic>
|
||||
{
|
||||
public:
|
||||
AggregateFunctionCountNotNullVariadic(const DataTypes & arguments)
|
||||
AggregateFunctionCountNotNullVariadic(const DataTypes & arguments, const Array & params)
|
||||
: IAggregateFunctionDataHelper<AggregateFunctionCountData, AggregateFunctionCountNotNullVariadic>(arguments, params)
|
||||
{
|
||||
number_of_arguments = arguments.size();
|
||||
|
||||
|
@ -26,12 +26,12 @@ AggregateFunctionPtr createAggregateFunctionEntropy(const std::string & name, co
|
||||
if (num_args == 1)
|
||||
{
|
||||
/// Specialized implementation for single argument of numeric type.
|
||||
if (auto res = createWithNumericBasedType<AggregateFunctionEntropy>(*argument_types[0], num_args))
|
||||
if (auto res = createWithNumericBasedType<AggregateFunctionEntropy>(*argument_types[0], argument_types))
|
||||
return AggregateFunctionPtr(res);
|
||||
}
|
||||
|
||||
/// Generic implementation for other types or for multiple arguments.
|
||||
return std::make_shared<AggregateFunctionEntropy<UInt128>>(num_args);
|
||||
return std::make_shared<AggregateFunctionEntropy<UInt128>>(argument_types);
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -54,7 +54,7 @@ struct EntropyData
|
||||
void merge(const EntropyData & rhs)
|
||||
{
|
||||
for (const auto & pair : rhs.map)
|
||||
map[pair.first] += pair.second;
|
||||
map[pair.getFirst()] += pair.getSecond();
|
||||
}
|
||||
|
||||
void serialize(WriteBuffer & buf) const
|
||||
@ -68,7 +68,7 @@ struct EntropyData
|
||||
while (reader.next())
|
||||
{
|
||||
const auto & pair = reader.get();
|
||||
map[pair.first] = pair.second;
|
||||
map[pair.getFirst()] = pair.getSecond();
|
||||
}
|
||||
}
|
||||
|
||||
@ -76,12 +76,12 @@ struct EntropyData
|
||||
{
|
||||
UInt64 total_value = 0;
|
||||
for (const auto & pair : map)
|
||||
total_value += pair.second;
|
||||
total_value += pair.getSecond();
|
||||
|
||||
Float64 shannon_entropy = 0;
|
||||
for (const auto & pair : map)
|
||||
{
|
||||
Float64 frequency = Float64(pair.second) / total_value;
|
||||
Float64 frequency = Float64(pair.getSecond()) / total_value;
|
||||
shannon_entropy -= frequency * log2(frequency);
|
||||
}
|
||||
|
||||
@ -97,7 +97,9 @@ private:
|
||||
size_t num_args;
|
||||
|
||||
public:
|
||||
AggregateFunctionEntropy(size_t num_args) : num_args(num_args)
|
||||
AggregateFunctionEntropy(const DataTypes & argument_types_)
|
||||
: IAggregateFunctionDataHelper<EntropyData<Value>, AggregateFunctionEntropy<Value>>(argument_types_, {})
|
||||
, num_args(argument_types_.size())
|
||||
{
|
||||
}
|
||||
|
||||
|
@ -97,7 +97,8 @@ private:
|
||||
|
||||
public:
|
||||
AggregateFunctionForEach(AggregateFunctionPtr nested_, const DataTypes & arguments)
|
||||
: nested_func(nested_), num_arguments(arguments.size())
|
||||
: IAggregateFunctionDataHelper<AggregateFunctionForEachData, AggregateFunctionForEach>(arguments, {})
|
||||
, nested_func(nested_), num_arguments(arguments.size())
|
||||
{
|
||||
nested_size_of_data = nested_func->sizeOfData();
|
||||
|
||||
|
@ -12,6 +12,7 @@ namespace DB
|
||||
namespace ErrorCodes
|
||||
{
|
||||
extern const int NUMBER_OF_ARGUMENTS_DOESNT_MATCH;
|
||||
extern const int BAD_ARGUMENTS;
|
||||
}
|
||||
|
||||
namespace
|
||||
|
@ -48,12 +48,13 @@ class GroupArrayNumericImpl final
|
||||
: public IAggregateFunctionDataHelper<GroupArrayNumericData<T>, GroupArrayNumericImpl<T, Tlimit_num_elems>>
|
||||
{
|
||||
static constexpr bool limit_num_elems = Tlimit_num_elems::value;
|
||||
DataTypePtr data_type;
|
||||
DataTypePtr & data_type;
|
||||
UInt64 max_elems;
|
||||
|
||||
public:
|
||||
explicit GroupArrayNumericImpl(const DataTypePtr & data_type_, UInt64 max_elems_ = std::numeric_limits<UInt64>::max())
|
||||
: data_type(data_type_), max_elems(max_elems_) {}
|
||||
: IAggregateFunctionDataHelper<GroupArrayNumericData<T>, GroupArrayNumericImpl<T, Tlimit_num_elems>>({data_type_}, {})
|
||||
, data_type(this->argument_types[0]), max_elems(max_elems_) {}
|
||||
|
||||
String getName() const override { return "groupArray"; }
|
||||
|
||||
@ -248,12 +249,13 @@ class GroupArrayGeneralListImpl final
|
||||
static Data & data(AggregateDataPtr place) { return *reinterpret_cast<Data*>(place); }
|
||||
static const Data & data(ConstAggregateDataPtr place) { return *reinterpret_cast<const Data*>(place); }
|
||||
|
||||
DataTypePtr data_type;
|
||||
DataTypePtr & data_type;
|
||||
UInt64 max_elems;
|
||||
|
||||
public:
|
||||
GroupArrayGeneralListImpl(const DataTypePtr & data_type, UInt64 max_elems_ = std::numeric_limits<UInt64>::max())
|
||||
: data_type(data_type), max_elems(max_elems_) {}
|
||||
: IAggregateFunctionDataHelper<GroupArrayGeneralListData<Node>, GroupArrayGeneralListImpl<Node, limit_num_elems>>({data_type}, {})
|
||||
, data_type(this->argument_types[0]), max_elems(max_elems_) {}
|
||||
|
||||
String getName() const override { return "groupArray"; }
|
||||
|
||||
|
@ -13,6 +13,10 @@ namespace
|
||||
AggregateFunctionPtr createAggregateFunctionGroupArrayInsertAt(const std::string & name, const DataTypes & argument_types, const Array & parameters)
|
||||
{
|
||||
assertBinary(name, argument_types);
|
||||
|
||||
if (argument_types.size() != 2)
|
||||
throw Exception("Aggregate function groupArrayInsertAt requires two arguments.", ErrorCodes::NUMBER_OF_ARGUMENTS_DOESNT_MATCH);
|
||||
|
||||
return std::make_shared<AggregateFunctionGroupArrayInsertAtGeneric>(argument_types, parameters);
|
||||
}
|
||||
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user