mirror of
https://github.com/ClickHouse/ClickHouse.git
synced 2024-11-21 23:21:59 +00:00
Merge branch 'master' into fix_path_in_perftest
This commit is contained in:
commit
660ed26389
2
contrib/hyperscan
vendored
2
contrib/hyperscan
vendored
@ -1 +1 @@
|
||||
Subproject commit 3058c9c20cba3accdf92544d8513a26240c4ff70
|
||||
Subproject commit 3907fd00ee8b2538739768fa9533f8635a276531
|
@ -17,7 +17,7 @@ function find_reference_sha
|
||||
# If not master, try to fetch pull/.../{head,merge}
|
||||
if [ "$PR_TO_TEST" != "0" ]
|
||||
then
|
||||
git -C ch fetch origin "refs/pull/$PR_TO_TEST/*:refs/heads/pr/*"
|
||||
git -C ch fetch origin "refs/pull/$PR_TO_TEST/*:refs/heads/pull/$PR_TO_TEST/*"
|
||||
fi
|
||||
|
||||
# Go back from the revision to be tested, trying to find the closest published
|
||||
@ -28,9 +28,9 @@ function find_reference_sha
|
||||
# and SHA_TO_TEST, but a revision that is merged with recent master, given
|
||||
# by pull/.../merge ref.
|
||||
# Master is the first parent of the pull/.../merge.
|
||||
if git -C ch rev-parse pr/merge
|
||||
if git -C ch rev-parse "pull/$PR_TO_TEST/merge"
|
||||
then
|
||||
start_ref=pr/merge~
|
||||
start_ref="pull/$PR_TO_TEST/merge~"
|
||||
fi
|
||||
|
||||
while :
|
||||
@ -73,11 +73,11 @@ if [ "$REF_PR" == "" ]; then echo Reference PR is not specified ; exit 1 ; fi
|
||||
|
||||
(
|
||||
git -C ch log -1 --decorate "$SHA_TO_TEST" ||:
|
||||
if git -C ch rev-parse pr/merge &> /dev/null
|
||||
if git -C ch rev-parse "pull/$PR_TO_TEST/merge" &> /dev/null
|
||||
then
|
||||
echo
|
||||
echo Real tested commit is:
|
||||
git -C ch log -1 --decorate pr/merge
|
||||
git -C ch log -1 --decorate "pull/$PR_TO_TEST/merge"
|
||||
fi
|
||||
) | tee right-commit.txt
|
||||
|
||||
|
@ -20,9 +20,9 @@ RUN apt-get --allow-unauthenticated update -y \
|
||||
# apt-get --allow-unauthenticated install --yes --no-install-recommends \
|
||||
# pvs-studio
|
||||
|
||||
ENV PKG_VERSION="pvs-studio-7.07.38234.46-amd64.deb"
|
||||
ENV PKG_VERSION="pvs-studio-7.07.38234.48-amd64.deb"
|
||||
|
||||
RUN wget "http://files.viva64.com/$PKG_VERSION"
|
||||
RUN wget "https://files.viva64.com/$PKG_VERSION"
|
||||
RUN sudo dpkg -i "$PKG_VERSION"
|
||||
|
||||
CMD cd /repo_folder && pvs-studio-analyzer credentials $LICENCE_NAME $LICENCE_KEY -o ./licence.lic \
|
||||
|
@ -51,9 +51,6 @@ ORDER BY expr
|
||||
|
||||
For a description of parameters, see the [CREATE query description](../../../sql-reference/statements/create.md).
|
||||
|
||||
!!! note "Note"
|
||||
`INDEX` is an experimental feature, see [Data Skipping Indexes](#table_engine-mergetree-data_skipping-indexes).
|
||||
|
||||
### Query Clauses {#mergetree-query-clauses}
|
||||
|
||||
- `ENGINE` — Name and parameters of the engine. `ENGINE = MergeTree()`. The `MergeTree` engine does not have parameters.
|
||||
@ -257,7 +254,7 @@ ClickHouse cannot use an index if the values of the primary key in the query par
|
||||
|
||||
ClickHouse uses this logic not only for days of the month sequences, but for any primary key that represents a partially-monotonic sequence.
|
||||
|
||||
### Data Skipping Indexes (experimental) {#table_engine-mergetree-data_skipping-indexes}
|
||||
### Data Skipping Indexes {#table_engine-mergetree-data_skipping-indexes}
|
||||
|
||||
The index declaration is in the columns section of the `CREATE` query.
|
||||
|
||||
|
@ -35,7 +35,7 @@ toc_title: Adopters
|
||||
| [Exness](https://www.exness.com){.favicon} | Trading | Metrics, Logging | — | — | [Talk in Russian, May 2019](https://youtu.be/_rpU-TvSfZ8?t=3215) |
|
||||
| [Geniee](https://geniee.co.jp){.favicon} | Ad network | Main product | — | — | [Blog post in Japanese, July 2017](https://tech.geniee.co.jp/entry/2017/07/20/160100) |
|
||||
| [HUYA](https://www.huya.com/){.favicon} | Video Streaming | Analytics | — | — | [Slides in Chinese, October 2018](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup19/7.%20ClickHouse万亿数据分析实践%20李本旺(sundy-li)%20虎牙.pdf) |
|
||||
| [Idealista](https://www.idealista.com){.favicon} | Real Estate | Analytics | — | — | [Blog Post in English, April 2019](https://clickhouse.yandex/blog/en/clickhouse-meetup-in-madrid-on-april-2-2019) |
|
||||
| [Idealista](https://www.idealista.com){.favicon} | Real Estate | Analytics | — | — | [Blog Post in English, April 2019](https://clickhouse.tech/blog/en/clickhouse-meetup-in-madrid-on-april-2-2019) |
|
||||
| [Infovista](https://www.infovista.com/){.favicon} | Networks | Analytics | — | — | [Slides in English, October 2019](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup30/infovista.pdf) |
|
||||
| [InnoGames](https://www.innogames.com){.favicon} | Games | Metrics, Logging | — | — | [Slides in Russian, September 2019](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup28/graphite_and_clickHouse.pdf) |
|
||||
| [Integros](https://integros.com){.favicon} | Platform for video services | Analytics | — | — | [Slides in Russian, May 2019](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup22/strategies.pdf) |
|
||||
|
@ -822,6 +822,7 @@ ClickHouse supports the following algorithms of choosing replicas:
|
||||
- [Nearest hostname](#load_balancing-nearest_hostname)
|
||||
- [In order](#load_balancing-in_order)
|
||||
- [First or random](#load_balancing-first_or_random)
|
||||
- [Round robin](#load_balancing-round_robin)
|
||||
|
||||
### Random (by Default) {#load_balancing-random}
|
||||
|
||||
@ -865,6 +866,14 @@ This algorithm chooses the first replica in the set or a random replica if the f
|
||||
|
||||
The `first_or_random` algorithm solves the problem of the `in_order` algorithm. With `in_order`, if one replica goes down, the next one gets a double load while the remaining replicas handle the usual amount of traffic. When using the `first_or_random` algorithm, the load is evenly distributed among replicas that are still available.
|
||||
|
||||
### Round robin {#load_balancing-round_robin}
|
||||
|
||||
``` sql
|
||||
load_balancing = round_robin
|
||||
```
|
||||
|
||||
This algorithm uses round robin policy across replicas with the same number of errors (only the queries with `round_robin` policy is accounted).
|
||||
|
||||
## prefer\_localhost\_replica {#settings-prefer-localhost-replica}
|
||||
|
||||
Enables/disables preferable using the localhost replica when processing distributed queries.
|
||||
|
@ -5,7 +5,7 @@ toc_title: ClickHouse Update
|
||||
|
||||
# ClickHouse Update {#clickhouse-update}
|
||||
|
||||
If ClickHouse was installed from deb packages, execute the following commands on the server:
|
||||
If ClickHouse was installed from `deb` packages, execute the following commands on the server:
|
||||
|
||||
``` bash
|
||||
$ sudo apt-get update
|
||||
@ -13,6 +13,6 @@ $ sudo apt-get install clickhouse-client clickhouse-server
|
||||
$ sudo service clickhouse-server restart
|
||||
```
|
||||
|
||||
If you installed ClickHouse using something other than the recommended deb packages, use the appropriate update method.
|
||||
If you installed ClickHouse using something other than the recommended `deb` packages, use the appropriate update method.
|
||||
|
||||
ClickHouse does not support a distributed update. The operation should be performed consecutively on each separate server. Do not update all the servers on a cluster simultaneously, or the cluster will be unavailable for some time.
|
||||
|
@ -370,6 +370,46 @@ GROUP BY timeslot
|
||||
└─────────────────────┴──────────────────────────────────────────────┴────────────────────────────────┘
|
||||
```
|
||||
|
||||
## minMap(key, value), minMap(Tuple(key, value)) {#agg_functions-minmap}
|
||||
|
||||
Calculates the minimum from ‘value’ array according to the keys specified in the ‘key’ array.
|
||||
Passing tuple of keys and values arrays is synonymical to passing two arrays of keys and values.
|
||||
The number of elements in ‘key’ and ‘value’ must be the same for each row that is totaled.
|
||||
Returns a tuple of two arrays: keys in sorted order, and values calculated for the corresponding keys.
|
||||
|
||||
Example:
|
||||
|
||||
```sql
|
||||
SELECT minMap(a, b)
|
||||
FROM values('a Array(Int32), b Array(Int64)', ([1, 2], [2, 2]), ([2, 3], [1, 1]))
|
||||
```
|
||||
|
||||
```text
|
||||
┌─minMap(a, b)──────┐
|
||||
│ ([1,2,3],[2,1,1]) │
|
||||
└───────────────────┘
|
||||
```
|
||||
|
||||
## maxMap(key, value), maxMap(Tuple(key, value)) {#agg_functions-maxmap}
|
||||
|
||||
Calculates the maximum from ‘value’ array according to the keys specified in the ‘key’ array.
|
||||
Passing tuple of keys and values arrays is synonymical to passing two arrays of keys and values.
|
||||
The number of elements in ‘key’ and ‘value’ must be the same for each row that is totaled.
|
||||
Returns a tuple of two arrays: keys in sorted order, and values calculated for the corresponding keys.
|
||||
|
||||
Example:
|
||||
|
||||
```sql
|
||||
SELECT maxMap(a, b)
|
||||
FROM values('a Array(Int32), b Array(Int64)', ([1, 2], [2, 2]), ([2, 3], [1, 1]))
|
||||
```
|
||||
|
||||
```text
|
||||
┌─maxMap(a, b)──────┐
|
||||
│ ([1,2,3],[2,2,1]) │
|
||||
└───────────────────┘
|
||||
```
|
||||
|
||||
## skewPop {#skewpop}
|
||||
|
||||
Computes the [skewness](https://en.wikipedia.org/wiki/Skewness) of a sequence.
|
||||
|
@ -31,7 +31,7 @@ CREATE TABLE hits (url String, from IPv4) ENGINE = MergeTree() ORDER BY from;
|
||||
`IPv4` domain supports custom input format as IPv4-strings:
|
||||
|
||||
``` sql
|
||||
INSERT INTO hits (url, from) VALUES ('https://wikipedia.org', '116.253.40.133')('https://clickhouse.tech', '183.247.232.58')('https://clickhouse.yandex/docs/en/', '116.106.34.242');
|
||||
INSERT INTO hits (url, from) VALUES ('https://wikipedia.org', '116.253.40.133')('https://clickhouse.tech', '183.247.232.58')('https://clickhouse.tech/docs/en/', '116.106.34.242');
|
||||
|
||||
SELECT * FROM hits;
|
||||
```
|
||||
|
@ -31,7 +31,7 @@ CREATE TABLE hits (url String, from IPv6) ENGINE = MergeTree() ORDER BY from;
|
||||
`IPv6` domain supports custom input as IPv6-strings:
|
||||
|
||||
``` sql
|
||||
INSERT INTO hits (url, from) VALUES ('https://wikipedia.org', '2a02:aa08:e000:3100::2')('https://clickhouse.tech', '2001:44c8:129:2632:33:0:252:2')('https://clickhouse.yandex/docs/en/', '2a02:e980:1e::1');
|
||||
INSERT INTO hits (url, from) VALUES ('https://wikipedia.org', '2a02:aa08:e000:3100::2')('https://clickhouse.tech', '2001:44c8:129:2632:33:0:252:2')('https://clickhouse.tech/docs/en/', '2a02:e980:1e::1');
|
||||
|
||||
SELECT * FROM hits;
|
||||
```
|
||||
|
@ -701,13 +701,13 @@ arrayDifference(array)
|
||||
|
||||
**Parameters**
|
||||
|
||||
- `array` – [Array](https://clickhouse.yandex/docs/en/data_types/array/).
|
||||
- `array` – [Array](https://clickhouse.tech/docs/en/data_types/array/).
|
||||
|
||||
**Returned values**
|
||||
|
||||
Returns an array of differences between adjacent elements.
|
||||
|
||||
Type: [UInt\*](https://clickhouse.yandex/docs/en/data_types/int_uint/#uint-ranges), [Int\*](https://clickhouse.yandex/docs/en/data_types/int_uint/#int-ranges), [Float\*](https://clickhouse.yandex/docs/en/data_types/float/).
|
||||
Type: [UInt\*](https://clickhouse.tech/docs/en/data_types/int_uint/#uint-ranges), [Int\*](https://clickhouse.tech/docs/en/data_types/int_uint/#int-ranges), [Float\*](https://clickhouse.tech/docs/en/data_types/float/).
|
||||
|
||||
**Example**
|
||||
|
||||
@ -753,7 +753,7 @@ arrayDistinct(array)
|
||||
|
||||
**Parameters**
|
||||
|
||||
- `array` – [Array](https://clickhouse.yandex/docs/en/data_types/array/).
|
||||
- `array` – [Array](https://clickhouse.tech/docs/en/data_types/array/).
|
||||
|
||||
**Returned values**
|
||||
|
||||
|
@ -79,6 +79,7 @@ Complied expression cache used when query/user/profile enable option [compile](.
|
||||
## FLUSH LOGS {#query_language-system-flush_logs}
|
||||
|
||||
Flushes buffers of log messages to system tables (e.g. system.query\_log). Allows you to not wait 7.5 seconds when debugging.
|
||||
This will also create system tables even if message queue is empty.
|
||||
|
||||
## RELOAD CONFIG {#query_language-system-reload-config}
|
||||
|
||||
|
@ -24,7 +24,7 @@ This release contains bug fixes for the previous release 1.1.54310:
|
||||
#### New Features: {#new-features}
|
||||
|
||||
- Custom partitioning key for the MergeTree family of table engines.
|
||||
- [Kafka](https://clickhouse.yandex/docs/en/operations/table_engines/kafka/) table engine.
|
||||
- [Kafka](https://clickhouse.tech/docs/en/operations/table_engines/kafka/) table engine.
|
||||
- Added support for loading [CatBoost](https://catboost.yandex/) models and applying them to data stored in ClickHouse.
|
||||
- Added support for time zones with non-integer offsets from UTC.
|
||||
- Added support for arithmetic operations with time intervals.
|
||||
|
@ -37,7 +37,7 @@ toc_title: Adoptante
|
||||
| <a href="https://www.exness.com" class="favicon">Exness</a> | Comercio | Métricas, Registro | — | — | [Charla en ruso, mayo 2019](https://youtu.be/_rpU-TvSfZ8?t=3215) |
|
||||
| <a href="https://geniee.co.jp" class="favicon">Sistema abierto.</a> | Red Ad | Producto principal | — | — | [Publicación de blog en japonés, julio 2017](https://tech.geniee.co.jp/entry/2017/07/20/160100) |
|
||||
| <a href="https://www.huya.com/" class="favicon">HUYA</a> | Video Streaming | Analítica | — | — | [Diapositivas en chino, octubre 2018](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup19/7.%20ClickHouse万亿数据分析实践%20李本旺(sundy-li)%20虎牙.pdf) |
|
||||
| <a href="https://www.idealista.com" class="favicon">Idealista</a> | Inmobiliario | Analítica | — | — | [Blog Post en Inglés, Abril 2019](https://clickhouse.yandex/blog/en/clickhouse-meetup-in-madrid-on-april-2-2019) |
|
||||
| <a href="https://www.idealista.com" class="favicon">Idealista</a> | Inmobiliario | Analítica | — | — | [Blog Post en Inglés, Abril 2019](https://clickhouse.tech/blog/en/clickhouse-meetup-in-madrid-on-april-2-2019) |
|
||||
| <a href="https://www.infovista.com/" class="favicon">Infovista</a> | Red | Analítica | — | — | [Diapositivas en español, octubre 2019](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup30/infovista.pdf) |
|
||||
| <a href="https://www.innogames.com" class="favicon">InnoGames</a> | Juego | Métricas, Registro | — | — | [Diapositivas en ruso, septiembre 2019](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup28/graphite_and_clickHouse.pdf) |
|
||||
| <a href="https://integros.com" class="favicon">Integros</a> | Plataforma para servicios de video | Analítica | — | — | [Diapositivas en ruso, mayo 2019](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup22/strategies.pdf) |
|
||||
|
@ -33,7 +33,7 @@ CREATE TABLE hits (url String, from IPv4) ENGINE = MergeTree() ORDER BY from;
|
||||
`IPv4` domain admite formato de entrada personalizado como cadenas IPv4:
|
||||
|
||||
``` sql
|
||||
INSERT INTO hits (url, from) VALUES ('https://wikipedia.org', '116.253.40.133')('https://clickhouse.tech', '183.247.232.58')('https://clickhouse.yandex/docs/en/', '116.106.34.242');
|
||||
INSERT INTO hits (url, from) VALUES ('https://wikipedia.org', '116.253.40.133')('https://clickhouse.tech', '183.247.232.58')('https://clickhouse.tech/docs/en/', '116.106.34.242');
|
||||
|
||||
SELECT * FROM hits;
|
||||
```
|
||||
|
@ -33,7 +33,7 @@ CREATE TABLE hits (url String, from IPv6) ENGINE = MergeTree() ORDER BY from;
|
||||
`IPv6` domain admite entradas personalizadas como cadenas IPv6:
|
||||
|
||||
``` sql
|
||||
INSERT INTO hits (url, from) VALUES ('https://wikipedia.org', '2a02:aa08:e000:3100::2')('https://clickhouse.tech', '2001:44c8:129:2632:33:0:252:2')('https://clickhouse.yandex/docs/en/', '2a02:e980:1e::1');
|
||||
INSERT INTO hits (url, from) VALUES ('https://wikipedia.org', '2a02:aa08:e000:3100::2')('https://clickhouse.tech', '2001:44c8:129:2632:33:0:252:2')('https://clickhouse.tech/docs/en/', '2a02:e980:1e::1');
|
||||
|
||||
SELECT * FROM hits;
|
||||
```
|
||||
|
@ -702,13 +702,13 @@ arrayDifference(array)
|
||||
|
||||
**Parámetros**
|
||||
|
||||
- `array` – [Matriz](https://clickhouse.yandex/docs/en/data_types/array/).
|
||||
- `array` – [Matriz](https://clickhouse.tech/docs/en/data_types/array/).
|
||||
|
||||
**Valores devueltos**
|
||||
|
||||
Devuelve una matriz de diferencias entre los elementos adyacentes.
|
||||
|
||||
Tipo: [UInt\*](https://clickhouse.yandex/docs/en/data_types/int_uint/#uint-ranges), [En\*](https://clickhouse.yandex/docs/en/data_types/int_uint/#int-ranges), [Flotante\*](https://clickhouse.yandex/docs/en/data_types/float/).
|
||||
Tipo: [UInt\*](https://clickhouse.tech/docs/en/data_types/int_uint/#uint-ranges), [En\*](https://clickhouse.tech/docs/en/data_types/int_uint/#int-ranges), [Flotante\*](https://clickhouse.tech/docs/en/data_types/float/).
|
||||
|
||||
**Ejemplo**
|
||||
|
||||
@ -754,7 +754,7 @@ arrayDistinct(array)
|
||||
|
||||
**Parámetros**
|
||||
|
||||
- `array` – [Matriz](https://clickhouse.yandex/docs/en/data_types/array/).
|
||||
- `array` – [Matriz](https://clickhouse.tech/docs/en/data_types/array/).
|
||||
|
||||
**Valores devueltos**
|
||||
|
||||
|
@ -26,7 +26,7 @@ Esta versión contiene correcciones de errores para la versión anterior 1.1.543
|
||||
#### Novedad: {#new-features}
|
||||
|
||||
- Clave de partición personalizada para la familia MergeTree de motores de tabla.
|
||||
- [Kafka](https://clickhouse.yandex/docs/en/operations/table_engines/kafka/) motor de mesa.
|
||||
- [Kafka](https://clickhouse.tech/docs/en/operations/table_engines/kafka/) motor de mesa.
|
||||
- Se agregó soporte para cargar [CatBoost](https://catboost.yandex/) modelos y aplicarlos a los datos almacenados en ClickHouse.
|
||||
- Se agregó soporte para zonas horarias con desplazamientos no enteros de UTC.
|
||||
- Se agregó soporte para operaciones aritméticas con intervalos de tiempo.
|
||||
|
@ -37,7 +37,7 @@ toc_title: "\u067E\u0630\u06CC\u0631\u0627"
|
||||
| <a href="https://www.exness.com" class="favicon">اعمال</a> | بازرگانی | معیارهای ورود به سیستم | — | — | [بحث در روسیه, بیشتر 2019](https://youtu.be/_rpU-TvSfZ8?t=3215) |
|
||||
| <a href="https://geniee.co.jp" class="favicon">ژنی</a> | شبکه تبلیغاتی | محصول اصلی | — | — | [پست وبلاگ در ژاپن, جولای 2017](https://tech.geniee.co.jp/entry/2017/07/20/160100) |
|
||||
| <a href="https://www.huya.com/" class="favicon">HUYA</a> | جریان ویدیو | تجزیه و تحلیل | — | — | [اسلاید در چین, اکتبر 2018](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup19/7.%20ClickHouse万亿数据分析实践%20李本旺(sundy-li)%20虎牙.pdf) |
|
||||
| <a href="https://www.idealista.com" class="favicon">Idealista</a> | املاک و مستغلات | تجزیه و تحلیل | — | — | [پست وبلاگ به زبان انگلیسی, مارس 2019](https://clickhouse.yandex/blog/en/clickhouse-meetup-in-madrid-on-april-2-2019) |
|
||||
| <a href="https://www.idealista.com" class="favicon">Idealista</a> | املاک و مستغلات | تجزیه و تحلیل | — | — | [پست وبلاگ به زبان انگلیسی, مارس 2019](https://clickhouse.tech/blog/en/clickhouse-meetup-in-madrid-on-april-2-2019) |
|
||||
| <a href="https://www.infovista.com/" class="favicon">اینفویستا</a> | شبکه ها | تجزیه و تحلیل | — | — | [اسلاید به زبان انگلیسی, اکتبر 2019](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup30/infovista.pdf) |
|
||||
| <a href="https://www.innogames.com" class="favicon">نام</a> | بازی ها | معیارهای ورود به سیستم | — | — | [اسلاید در روسیه, سپتامبر 2019](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup28/graphite_and_clickHouse.pdf) |
|
||||
| <a href="https://integros.com" class="favicon">پوششی</a> | بستر های نرم افزاری برای خدمات تصویری | تجزیه و تحلیل | — | — | [اسلاید در روسیه, بیشتر 2019](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup22/strategies.pdf) |
|
||||
|
@ -33,7 +33,7 @@ CREATE TABLE hits (url String, from IPv4) ENGINE = MergeTree() ORDER BY from;
|
||||
`IPv4` دامنه پشتیبانی از فرمت ورودی سفارشی به عنوان ایپو4 رشته:
|
||||
|
||||
``` sql
|
||||
INSERT INTO hits (url, from) VALUES ('https://wikipedia.org', '116.253.40.133')('https://clickhouse.tech', '183.247.232.58')('https://clickhouse.yandex/docs/en/', '116.106.34.242');
|
||||
INSERT INTO hits (url, from) VALUES ('https://wikipedia.org', '116.253.40.133')('https://clickhouse.tech', '183.247.232.58')('https://clickhouse.tech/docs/en/', '116.106.34.242');
|
||||
|
||||
SELECT * FROM hits;
|
||||
```
|
||||
|
@ -33,7 +33,7 @@ CREATE TABLE hits (url String, from IPv6) ENGINE = MergeTree() ORDER BY from;
|
||||
`IPv6` دامنه پشتیبانی از ورودی های سفارشی به عنوان ایپو6 رشته:
|
||||
|
||||
``` sql
|
||||
INSERT INTO hits (url, from) VALUES ('https://wikipedia.org', '2a02:aa08:e000:3100::2')('https://clickhouse.tech', '2001:44c8:129:2632:33:0:252:2')('https://clickhouse.yandex/docs/en/', '2a02:e980:1e::1');
|
||||
INSERT INTO hits (url, from) VALUES ('https://wikipedia.org', '2a02:aa08:e000:3100::2')('https://clickhouse.tech', '2001:44c8:129:2632:33:0:252:2')('https://clickhouse.tech/docs/en/', '2a02:e980:1e::1');
|
||||
|
||||
SELECT * FROM hits;
|
||||
```
|
||||
|
@ -702,13 +702,13 @@ arrayDifference(array)
|
||||
|
||||
**پارامترها**
|
||||
|
||||
- `array` – [& حذف](https://clickhouse.yandex/docs/en/data_types/array/).
|
||||
- `array` – [& حذف](https://clickhouse.tech/docs/en/data_types/array/).
|
||||
|
||||
**مقادیر بازگشتی**
|
||||
|
||||
بازگرداندن مجموعه ای از تفاوت بین عناصر مجاور.
|
||||
|
||||
نوع: [اینترنت\*](https://clickhouse.yandex/docs/en/data_types/int_uint/#uint-ranges), [Int\*](https://clickhouse.yandex/docs/en/data_types/int_uint/#int-ranges), [شناور\*](https://clickhouse.yandex/docs/en/data_types/float/).
|
||||
نوع: [اینترنت\*](https://clickhouse.tech/docs/en/data_types/int_uint/#uint-ranges), [Int\*](https://clickhouse.tech/docs/en/data_types/int_uint/#int-ranges), [شناور\*](https://clickhouse.tech/docs/en/data_types/float/).
|
||||
|
||||
**مثال**
|
||||
|
||||
@ -754,7 +754,7 @@ arrayDistinct(array)
|
||||
|
||||
**پارامترها**
|
||||
|
||||
- `array` – [& حذف](https://clickhouse.yandex/docs/en/data_types/array/).
|
||||
- `array` – [& حذف](https://clickhouse.tech/docs/en/data_types/array/).
|
||||
|
||||
**مقادیر بازگشتی**
|
||||
|
||||
|
@ -26,7 +26,7 @@ toc_title: '2017'
|
||||
#### ویژگی های جدید: {#new-features}
|
||||
|
||||
- کلید پارتیشن بندی سفارشی برای خانواده ادغام موتورهای جدول.
|
||||
- [کافکا](https://clickhouse.yandex/docs/en/operations/table_engines/kafka/) موتور جدول.
|
||||
- [کافکا](https://clickhouse.tech/docs/en/operations/table_engines/kafka/) موتور جدول.
|
||||
- اضافه شدن پشتیبانی برای بارگذاری [مانتو](https://catboost.yandex/) مدل ها و استفاده از داده های ذخیره شده در کلیک.
|
||||
- اضافه شدن پشتیبانی برای مناطق زمانی با شیپور خاموشی غیر عدد صحیح از مجموعه مقالات.
|
||||
- اضافه شدن پشتیبانی برای عملیات ریاضی با فواصل زمانی.
|
||||
|
@ -37,7 +37,7 @@ toc_title: Adoptant
|
||||
| <a href="https://www.exness.com" class="favicon">Exness</a> | Trading | Métriques, Journalisation | — | — | [Parler en russe, mai 2019](https://youtu.be/_rpU-TvSfZ8?t=3215) |
|
||||
| <a href="https://geniee.co.jp" class="favicon">Geniee</a> | Réseau publicitaire | Produit principal | — | — | [Billet de Blog en japonais, juillet 2017](https://tech.geniee.co.jp/entry/2017/07/20/160100) |
|
||||
| <a href="https://www.huya.com/" class="favicon">HUYA</a> | Le Streaming Vidéo | Analytics | — | — | [Diapositives en chinois, octobre 2018](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup19/7.%20ClickHouse万亿数据分析实践%20李本旺(sundy-li)%20虎牙.pdf) |
|
||||
| <a href="https://www.idealista.com" class="favicon">Idealista</a> | Immobilier | Analytics | — | — | [Billet de Blog en anglais, avril 2019](https://clickhouse.yandex/blog/en/clickhouse-meetup-in-madrid-on-april-2-2019) |
|
||||
| <a href="https://www.idealista.com" class="favicon">Idealista</a> | Immobilier | Analytics | — | — | [Billet de Blog en anglais, avril 2019](https://clickhouse.tech/blog/en/clickhouse-meetup-in-madrid-on-april-2-2019) |
|
||||
| <a href="https://www.infovista.com/" class="favicon">Infovista</a> | Réseau | Analytics | — | — | [Diapositives en anglais, octobre 2019](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup30/infovista.pdf) |
|
||||
| <a href="https://www.innogames.com" class="favicon">InnoGames</a> | Jeu | Métriques, Journalisation | — | — | [Diapositives en russe, septembre 2019](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup28/graphite_and_clickHouse.pdf) |
|
||||
| <a href="https://integros.com" class="favicon">Integros</a> | Plate-forme pour les services vidéo | Analytics | — | — | [Diapositives en russe, mai 2019](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup22/strategies.pdf) |
|
||||
|
@ -33,7 +33,7 @@ CREATE TABLE hits (url String, from IPv4) ENGINE = MergeTree() ORDER BY from;
|
||||
`IPv4` le domaine prend en charge le format d'entrée personnalisé en tant que chaînes IPv4:
|
||||
|
||||
``` sql
|
||||
INSERT INTO hits (url, from) VALUES ('https://wikipedia.org', '116.253.40.133')('https://clickhouse.tech', '183.247.232.58')('https://clickhouse.yandex/docs/en/', '116.106.34.242');
|
||||
INSERT INTO hits (url, from) VALUES ('https://wikipedia.org', '116.253.40.133')('https://clickhouse.tech', '183.247.232.58')('https://clickhouse.tech/docs/en/', '116.106.34.242');
|
||||
|
||||
SELECT * FROM hits;
|
||||
```
|
||||
|
@ -33,7 +33,7 @@ CREATE TABLE hits (url String, from IPv6) ENGINE = MergeTree() ORDER BY from;
|
||||
`IPv6` le domaine prend en charge l'entrée personnalisée en tant que chaînes IPv6:
|
||||
|
||||
``` sql
|
||||
INSERT INTO hits (url, from) VALUES ('https://wikipedia.org', '2a02:aa08:e000:3100::2')('https://clickhouse.tech', '2001:44c8:129:2632:33:0:252:2')('https://clickhouse.yandex/docs/en/', '2a02:e980:1e::1');
|
||||
INSERT INTO hits (url, from) VALUES ('https://wikipedia.org', '2a02:aa08:e000:3100::2')('https://clickhouse.tech', '2001:44c8:129:2632:33:0:252:2')('https://clickhouse.tech/docs/en/', '2a02:e980:1e::1');
|
||||
|
||||
SELECT * FROM hits;
|
||||
```
|
||||
|
@ -702,13 +702,13 @@ arrayDifference(array)
|
||||
|
||||
**Paramètre**
|
||||
|
||||
- `array` – [Tableau](https://clickhouse.yandex/docs/en/data_types/array/).
|
||||
- `array` – [Tableau](https://clickhouse.tech/docs/en/data_types/array/).
|
||||
|
||||
**Valeurs renvoyées**
|
||||
|
||||
Renvoie un tableau de différences entre les éléments adjacents.
|
||||
|
||||
Type: [UInt\*](https://clickhouse.yandex/docs/en/data_types/int_uint/#uint-ranges), [Int\*](https://clickhouse.yandex/docs/en/data_types/int_uint/#int-ranges), [Flottant\*](https://clickhouse.yandex/docs/en/data_types/float/).
|
||||
Type: [UInt\*](https://clickhouse.tech/docs/en/data_types/int_uint/#uint-ranges), [Int\*](https://clickhouse.tech/docs/en/data_types/int_uint/#int-ranges), [Flottant\*](https://clickhouse.tech/docs/en/data_types/float/).
|
||||
|
||||
**Exemple**
|
||||
|
||||
@ -754,7 +754,7 @@ arrayDistinct(array)
|
||||
|
||||
**Paramètre**
|
||||
|
||||
- `array` – [Tableau](https://clickhouse.yandex/docs/en/data_types/array/).
|
||||
- `array` – [Tableau](https://clickhouse.tech/docs/en/data_types/array/).
|
||||
|
||||
**Valeurs renvoyées**
|
||||
|
||||
|
@ -26,7 +26,7 @@ Cette version contient des corrections de bugs pour la version précédente 1.1.
|
||||
#### Nouveauté: {#new-features}
|
||||
|
||||
- Clé de partitionnement personnalisée pour la famille MergeTree des moteurs de table.
|
||||
- [Kafka](https://clickhouse.yandex/docs/en/operations/table_engines/kafka/) tableau moteur.
|
||||
- [Kafka](https://clickhouse.tech/docs/en/operations/table_engines/kafka/) tableau moteur.
|
||||
- Ajout du support pour le chargement [CatBoost](https://catboost.yandex/) modèles et les appliquer aux données stockées dans ClickHouse.
|
||||
- Ajout du support pour les fuseaux horaires avec des décalages non entiers de UTC.
|
||||
- Ajout du support pour les opérations arithmétiques avec des intervalles de temps.
|
||||
|
@ -37,7 +37,7 @@ toc_title: "\u30A2\u30C0\u30D7\u30BF\u30FC"
|
||||
| <a href="https://www.exness.com" class="favicon">Exness</a> | 取引 | 指標、ロギング | — | — | [ロシア語で話す,May2019](https://youtu.be/_rpU-TvSfZ8?t=3215) |
|
||||
| <a href="https://geniee.co.jp" class="favicon">魔神</a> | 広告ネットワーク | 主な製品 | — | — | [ブログ投稿日本語,July2017](https://tech.geniee.co.jp/entry/2017/07/20/160100) |
|
||||
| <a href="https://www.huya.com/" class="favicon">HUYA</a> | ビデオストリーミング | 分析 | — | — | [中国語でのスライド,October2018](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup19/7.%20ClickHouse万亿数据分析实践%20李本旺(sundy-li)%20虎牙.pdf) |
|
||||
| <a href="https://www.idealista.com" class="favicon">イデアリスタ</a> | 不動産 | 分析 | — | — | [ブログ投稿英語,April2019](https://clickhouse.yandex/blog/en/clickhouse-meetup-in-madrid-on-april-2-2019) |
|
||||
| <a href="https://www.idealista.com" class="favicon">イデアリスタ</a> | 不動産 | 分析 | — | — | [ブログ投稿英語,April2019](https://clickhouse.tech/blog/en/clickhouse-meetup-in-madrid-on-april-2-2019) |
|
||||
| <a href="https://www.infovista.com/" class="favicon">インフォビスタ</a> | ネット | 分析 | — | — | [2019年のスライド](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup30/infovista.pdf) |
|
||||
| <a href="https://www.innogames.com" class="favicon">InnoGames</a> | ゲーム | 指標、ロギング | — | — | [2019年ロシア](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup28/graphite_and_clickHouse.pdf) |
|
||||
| <a href="https://integros.com" class="favicon">インテグロス</a> | Platformビデオサービス | 分析 | — | — | [ロシア語でのスライド,月2019](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup22/strategies.pdf) |
|
||||
|
@ -69,4 +69,4 @@ ClickHouseには、精度を犠牲にしてパフォーマンスを得るため
|
||||
2. 既に挿入されたデータの変更または削除を、高頻度かつ低遅延に行う機能はありません。 [GDPR](https://gdpr-info.eu)に準拠するなど、データをクリーンアップまたは変更するために、バッチ削除およびバッチ更新が利用可能です。
|
||||
3. インデックスが疎であるため、ClickHouseは、キーで単一行を取得するようなクエリにはあまり適していません。
|
||||
|
||||
[Original article](https://clickhouse.yandex/docs/en/introduction/distinctive_features/) <!--hide-->
|
||||
[Original article](https://clickhouse.tech/docs/en/introduction/distinctive_features/) <!--hide-->
|
||||
|
@ -48,4 +48,4 @@ Yandex.Metricaには、Metrageと呼ばれるデータを集計するための
|
||||
|
||||
OLAPServerの制限を取り除き、レポートのための非集計データを扱う問題を解決するために、私達は ClickHouse DBMSを開発しました。
|
||||
|
||||
[Original article](https://clickhouse.yandex/docs/en/introduction/history/) <!--hide-->
|
||||
[Original article](https://clickhouse.tech/docs/en/introduction/history/) <!--hide-->
|
||||
|
@ -5,9 +5,9 @@ toc_title: "\u30D1\u30D5\u30A9\u30FC\u30DE\u30F3\u30B9"
|
||||
|
||||
# パフォーマンス {#pahuomansu}
|
||||
|
||||
Yandexの内部テスト結果によると、ClickHouseは、テスト可能なクラスのシステム間で同等の動作シナリオで最高のパフォーマンス(長時間のクエリで最も高いスループットと、短時間のクエリで最小のレイテンシの両方)を示します。 [別のページで](https://clickhouse.yandex/benchmark/dbms/)テスト結果を表示できます 。
|
||||
Yandexの内部テスト結果によると、ClickHouseは、テスト可能なクラスのシステム間で同等の動作シナリオで最高のパフォーマンス(長時間のクエリで最も高いスループットと、短時間のクエリで最小のレイテンシの両方)を示します。 [別のページで](https://clickhouse.tech/benchmark/dbms/)テスト結果を表示できます 。
|
||||
|
||||
これは、多数の独立したベンチマークでも確認されています。インターネット検索で見つけることは難しくありませんし、 [私達がまとめた関連リンク集](https://clickhouse.yandex/#independent-benchmarks) から見つけることもできます。
|
||||
これは、多数の独立したベンチマークでも確認されています。インターネット検索で見つけることは難しくありませんし、 [私達がまとめた関連リンク集](https://clickhouse.tech/#independent-benchmarks) から見つけることもできます。
|
||||
|
||||
## 単一の巨大なクエリのスループット {#dan-yi-noju-da-nakuerinosurupututo}
|
||||
|
||||
@ -27,4 +27,4 @@ Yandexの内部テスト結果によると、ClickHouseは、テスト可能な
|
||||
|
||||
少なくとも1000行のパケットにデータを挿入することをお勧めします。または、1秒あたり1回のリクエストを超えないでください。タブ区切りのダンプデータをMergeTreeテーブルに挿入する場合、挿入速度は50〜200MB/sになります。挿入された行のサイズが約1Kbの場合、速度は毎秒50,000〜200,000行になります。行が小さい場合、パフォーマンスは1秒あたりの行数で高くなります(Banner System データ- `>` 500,000行/秒、Graphite データ- `>` 1,000,000行/秒)。パフォーマンスを向上させるために、複数のINSERTクエリを並行して作成することで、パフォーマンスを線形に向上できます。
|
||||
|
||||
[Original article](https://clickhouse.yandex/docs/ja/introduction/performance/) <!--hide-->
|
||||
[Original article](https://clickhouse.tech/docs/ja/introduction/performance/) <!--hide-->
|
||||
|
@ -33,7 +33,7 @@ CREATE TABLE hits (url String, from IPv4) ENGINE = MergeTree() ORDER BY from;
|
||||
`IPv4` ドメインはIPv4文字列としてカスタム入力形式をサポート:
|
||||
|
||||
``` sql
|
||||
INSERT INTO hits (url, from) VALUES ('https://wikipedia.org', '116.253.40.133')('https://clickhouse.tech', '183.247.232.58')('https://clickhouse.yandex/docs/en/', '116.106.34.242');
|
||||
INSERT INTO hits (url, from) VALUES ('https://wikipedia.org', '116.253.40.133')('https://clickhouse.tech', '183.247.232.58')('https://clickhouse.tech/docs/en/', '116.106.34.242');
|
||||
|
||||
SELECT * FROM hits;
|
||||
```
|
||||
|
@ -33,7 +33,7 @@ CREATE TABLE hits (url String, from IPv6) ENGINE = MergeTree() ORDER BY from;
|
||||
`IPv6` ドメイ:
|
||||
|
||||
``` sql
|
||||
INSERT INTO hits (url, from) VALUES ('https://wikipedia.org', '2a02:aa08:e000:3100::2')('https://clickhouse.tech', '2001:44c8:129:2632:33:0:252:2')('https://clickhouse.yandex/docs/en/', '2a02:e980:1e::1');
|
||||
INSERT INTO hits (url, from) VALUES ('https://wikipedia.org', '2a02:aa08:e000:3100::2')('https://clickhouse.tech', '2001:44c8:129:2632:33:0:252:2')('https://clickhouse.tech/docs/en/', '2a02:e980:1e::1');
|
||||
|
||||
SELECT * FROM hits;
|
||||
```
|
||||
|
@ -702,13 +702,13 @@ arrayDifference(array)
|
||||
|
||||
**パラメータ**
|
||||
|
||||
- `array` – [配列](https://clickhouse.yandex/docs/en/data_types/array/).
|
||||
- `array` – [配列](https://clickhouse.tech/docs/en/data_types/array/).
|
||||
|
||||
**戻り値**
|
||||
|
||||
隣接する要素間の差分の配列を返します。
|
||||
|
||||
タイプ: [UInt\*](https://clickhouse.yandex/docs/en/data_types/int_uint/#uint-ranges), [Int\*](https://clickhouse.yandex/docs/en/data_types/int_uint/#int-ranges), [フロート\*](https://clickhouse.yandex/docs/en/data_types/float/).
|
||||
タイプ: [UInt\*](https://clickhouse.tech/docs/en/data_types/int_uint/#uint-ranges), [Int\*](https://clickhouse.tech/docs/en/data_types/int_uint/#int-ranges), [フロート\*](https://clickhouse.tech/docs/en/data_types/float/).
|
||||
|
||||
**例**
|
||||
|
||||
@ -754,7 +754,7 @@ arrayDistinct(array)
|
||||
|
||||
**パラメータ**
|
||||
|
||||
- `array` – [配列](https://clickhouse.yandex/docs/en/data_types/array/).
|
||||
- `array` – [配列](https://clickhouse.tech/docs/en/data_types/array/).
|
||||
|
||||
**戻り値**
|
||||
|
||||
|
@ -26,7 +26,7 @@ toc_title: '2017'
|
||||
#### 新しい機能: {#new-features}
|
||||
|
||||
- カスタムパーティショニングキーのMergeTree家族のテーブルエンジンです。
|
||||
- [カフカ](https://clickhouse.yandex/docs/en/operations/table_engines/kafka/) テーブルエンジン。
|
||||
- [カフカ](https://clickhouse.tech/docs/en/operations/table_engines/kafka/) テーブルエンジン。
|
||||
- ロードのサポートを追加 [CatBoost](https://catboost.yandex/) モデルとClickHouseに格納されたデータにそれらを適用します。
|
||||
- サポートが追加された時間帯と非整数オフセットからのUTCです。
|
||||
- 時間間隔での算術演算のサポートが追加されました。
|
||||
|
@ -26,7 +26,7 @@ CREATE TABLE hits (url String, from IPv4) ENGINE = MergeTree() ORDER BY from;
|
||||
`IPv4` поддерживает вставку в виде строк с текстовым представлением IPv4 адреса:
|
||||
|
||||
``` sql
|
||||
INSERT INTO hits (url, from) VALUES ('https://wikipedia.org', '116.253.40.133')('https://clickhouse.tech', '183.247.232.58')('https://clickhouse.yandex/docs/en/', '116.106.34.242');
|
||||
INSERT INTO hits (url, from) VALUES ('https://wikipedia.org', '116.253.40.133')('https://clickhouse.tech', '183.247.232.58')('https://clickhouse.tech/docs/en/', '116.106.34.242');
|
||||
|
||||
SELECT * FROM hits;
|
||||
```
|
||||
|
@ -26,7 +26,7 @@ CREATE TABLE hits (url String, from IPv6) ENGINE = MergeTree() ORDER BY from;
|
||||
`IPv6` поддерживает вставку в виде строк с текстовым представлением IPv6 адреса:
|
||||
|
||||
``` sql
|
||||
INSERT INTO hits (url, from) VALUES ('https://wikipedia.org', '2a02:aa08:e000:3100::2')('https://clickhouse.tech', '2001:44c8:129:2632:33:0:252:2')('https://clickhouse.yandex/docs/en/', '2a02:e980:1e::1');
|
||||
INSERT INTO hits (url, from) VALUES ('https://wikipedia.org', '2a02:aa08:e000:3100::2')('https://clickhouse.tech', '2001:44c8:129:2632:33:0:252:2')('https://clickhouse.tech/docs/en/', '2a02:e980:1e::1');
|
||||
|
||||
SELECT * FROM hits;
|
||||
```
|
||||
|
@ -692,7 +692,7 @@ arrayDifference(array)
|
||||
|
||||
**Параметры**
|
||||
|
||||
- `array` – [Массив](https://clickhouse.yandex/docs/ru/data_types/array/).
|
||||
- `array` – [Массив](https://clickhouse.tech/docs/ru/data_types/array/).
|
||||
|
||||
**Возвращаемое значение**
|
||||
|
||||
@ -742,7 +742,7 @@ arrayDistinct(array)
|
||||
|
||||
**Параметры**
|
||||
|
||||
- `array` – [Массив](https://clickhouse.yandex/docs/ru/data_types/array/).
|
||||
- `array` – [Массив](https://clickhouse.tech/docs/ru/data_types/array/).
|
||||
|
||||
**Возвращаемое значение**
|
||||
|
||||
|
@ -74,6 +74,7 @@ SELECT name, status FROM system.dictionaries;
|
||||
## FLUSH LOGS {#query_language-system-flush_logs}
|
||||
|
||||
Записывает буферы логов в системные таблицы (например system.query\_log). Позволяет не ждать 7.5 секунд при отладке.
|
||||
Если буфер логов пустой, то этот запрос просто создаст системные таблицы.
|
||||
|
||||
## RELOAD CONFIG {#query_language-system-reload-config}
|
||||
|
||||
|
107
docs/tools/blog.py
Normal file
107
docs/tools/blog.py
Normal file
@ -0,0 +1,107 @@
|
||||
#!/usr/bin/env python3
|
||||
import datetime
|
||||
import logging
|
||||
import os
|
||||
import time
|
||||
|
||||
import nav # monkey patches mkdocs
|
||||
|
||||
import mkdocs.commands
|
||||
from mkdocs import config
|
||||
from mkdocs import exceptions
|
||||
|
||||
import mdx_clickhouse
|
||||
import redirects
|
||||
|
||||
import util
|
||||
|
||||
|
||||
def build_for_lang(lang, args):
|
||||
logging.info(f'Building {lang} blog')
|
||||
|
||||
try:
|
||||
theme_cfg = {
|
||||
'name': None,
|
||||
'custom_dir': os.path.join(os.path.dirname(__file__), '..', args.theme_dir),
|
||||
'language': lang,
|
||||
'direction': 'ltr',
|
||||
'static_templates': ['404.html'],
|
||||
'extra': {
|
||||
'now': int(time.mktime(datetime.datetime.now().timetuple())) # TODO better way to avoid caching
|
||||
}
|
||||
}
|
||||
|
||||
# the following list of languages is sorted according to
|
||||
# https://en.wikipedia.org/wiki/List_of_languages_by_total_number_of_speakers
|
||||
languages = {
|
||||
'en': 'English',
|
||||
'ru': 'Русский'
|
||||
}
|
||||
|
||||
site_names = {
|
||||
'en': 'ClickHouse Blog',
|
||||
'ru': 'Блог ClickHouse '
|
||||
}
|
||||
|
||||
assert len(site_names) == len(languages)
|
||||
|
||||
site_dir = os.path.join(args.blog_output_dir, lang)
|
||||
|
||||
plugins = ['macros']
|
||||
if args.htmlproofer:
|
||||
plugins.append('htmlproofer')
|
||||
|
||||
website_url = 'https://clickhouse.tech'
|
||||
site_name = site_names.get(lang, site_names['en'])
|
||||
blog_nav, post_meta = nav.build_blog_nav(lang, args)
|
||||
raw_config = dict(
|
||||
site_name=site_name,
|
||||
site_url=f'{website_url}/blog/{lang}/',
|
||||
docs_dir=os.path.join(args.blog_dir, lang),
|
||||
site_dir=site_dir,
|
||||
strict=True,
|
||||
theme=theme_cfg,
|
||||
nav=blog_nav,
|
||||
copyright='©2016–2020 Yandex LLC',
|
||||
use_directory_urls=True,
|
||||
repo_name='ClickHouse/ClickHouse',
|
||||
repo_url='https://github.com/ClickHouse/ClickHouse/',
|
||||
edit_uri=f'edit/master/website/blog/{lang}',
|
||||
markdown_extensions=mdx_clickhouse.MARKDOWN_EXTENSIONS,
|
||||
plugins=plugins,
|
||||
extra=dict(
|
||||
now=datetime.datetime.now().isoformat(),
|
||||
rev=args.rev,
|
||||
rev_short=args.rev_short,
|
||||
rev_url=args.rev_url,
|
||||
website_url=website_url,
|
||||
events=args.events,
|
||||
languages=languages,
|
||||
includes_dir=os.path.join(os.path.dirname(__file__), '..', '_includes'),
|
||||
is_amp=False,
|
||||
is_blog=True,
|
||||
post_meta=post_meta
|
||||
)
|
||||
)
|
||||
|
||||
cfg = config.load_config(**raw_config)
|
||||
mkdocs.commands.build.build(cfg)
|
||||
|
||||
redirects.build_blog_redirects(args)
|
||||
|
||||
# TODO: AMP for blog
|
||||
# if not args.skip_amp:
|
||||
# amp.build_amp(lang, args, cfg)
|
||||
|
||||
logging.info(f'Finished building {lang} blog')
|
||||
|
||||
except exceptions.ConfigurationError as e:
|
||||
raise SystemExit('\n' + str(e))
|
||||
|
||||
|
||||
def build_blog(args):
|
||||
tasks = []
|
||||
for lang in args.blog_lang.split(','):
|
||||
if lang:
|
||||
tasks.append((lang, args,))
|
||||
util.run_function_in_parallel(build_for_lang, tasks, threads=False)
|
@ -20,8 +20,8 @@ from mkdocs import exceptions
|
||||
import mkdocs.commands.build
|
||||
|
||||
import amp
|
||||
import blog
|
||||
import mdx_clickhouse
|
||||
|
||||
import redirects
|
||||
import single_page
|
||||
import test
|
||||
@ -95,25 +95,6 @@ def build_for_lang(lang, args):
|
||||
else:
|
||||
site_dir = os.path.join(args.docs_output_dir, lang)
|
||||
|
||||
markdown_extensions = [
|
||||
'mdx_clickhouse',
|
||||
'admonition',
|
||||
'attr_list',
|
||||
'codehilite',
|
||||
'nl2br',
|
||||
'sane_lists',
|
||||
'pymdownx.details',
|
||||
'pymdownx.magiclink',
|
||||
'pymdownx.superfences',
|
||||
'extra',
|
||||
{
|
||||
'toc': {
|
||||
'permalink': True,
|
||||
'slugify': mdx_clickhouse.slugify
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
plugins = ['macros']
|
||||
if args.htmlproofer:
|
||||
plugins.append('htmlproofer')
|
||||
@ -133,7 +114,7 @@ def build_for_lang(lang, args):
|
||||
repo_name='ClickHouse/ClickHouse',
|
||||
repo_url='https://github.com/ClickHouse/ClickHouse/',
|
||||
edit_uri=f'edit/master/docs/{lang}',
|
||||
markdown_extensions=markdown_extensions,
|
||||
markdown_extensions=mdx_clickhouse.MARKDOWN_EXTENSIONS,
|
||||
plugins=plugins,
|
||||
extra=dict(
|
||||
now=datetime.datetime.now().isoformat(),
|
||||
@ -147,14 +128,15 @@ def build_for_lang(lang, args):
|
||||
events=args.events,
|
||||
languages=languages,
|
||||
includes_dir=os.path.join(os.path.dirname(__file__), '..', '_includes'),
|
||||
is_amp=False
|
||||
is_amp=False,
|
||||
is_blog=False
|
||||
)
|
||||
)
|
||||
|
||||
if os.path.exists(config_path):
|
||||
raw_config['config_file'] = config_path
|
||||
else:
|
||||
raw_config['nav'] = nav.build_nav(lang, args)
|
||||
raw_config['nav'] = nav.build_docs_nav(lang, args)
|
||||
|
||||
cfg = config.load_config(**raw_config)
|
||||
|
||||
@ -187,7 +169,7 @@ def build_docs(args):
|
||||
if lang:
|
||||
tasks.append((lang, args,))
|
||||
util.run_function_in_parallel(build_for_lang, tasks, threads=False)
|
||||
redirects.build_redirects(args)
|
||||
redirects.build_docs_redirects(args)
|
||||
|
||||
|
||||
def build(args):
|
||||
@ -204,6 +186,9 @@ def build(args):
|
||||
from github import build_releases
|
||||
build_releases(args, build_docs)
|
||||
|
||||
if not args.skip_blog:
|
||||
blog.build_blog(args)
|
||||
|
||||
if not args.skip_website:
|
||||
website.process_benchmark_results(args)
|
||||
website.minify_website(args)
|
||||
@ -215,9 +200,11 @@ if __name__ == '__main__':
|
||||
website_dir = os.path.join('..', 'website')
|
||||
arg_parser = argparse.ArgumentParser()
|
||||
arg_parser.add_argument('--lang', default='en,es,fr,ru,zh,ja,tr,fa')
|
||||
arg_parser.add_argument('--blog-lang', default='en,ru')
|
||||
arg_parser.add_argument('--docs-dir', default='.')
|
||||
arg_parser.add_argument('--theme-dir', default=website_dir)
|
||||
arg_parser.add_argument('--website-dir', default=website_dir)
|
||||
arg_parser.add_argument('--blog-dir', default=os.path.join(website_dir, 'blog'))
|
||||
arg_parser.add_argument('--output-dir', default='build')
|
||||
arg_parser.add_argument('--enable-stable-releases', action='store_true')
|
||||
arg_parser.add_argument('--stable-releases-limit', type=int, default='3')
|
||||
@ -230,6 +217,7 @@ if __name__ == '__main__':
|
||||
arg_parser.add_argument('--skip-amp', action='store_true')
|
||||
arg_parser.add_argument('--skip-pdf', action='store_true')
|
||||
arg_parser.add_argument('--skip-website', action='store_true')
|
||||
arg_parser.add_argument('--skip-blog', action='store_true')
|
||||
arg_parser.add_argument('--skip-git-log', action='store_true')
|
||||
arg_parser.add_argument('--test-only', action='store_true')
|
||||
arg_parser.add_argument('--minify', action='store_true')
|
||||
@ -249,6 +237,7 @@ if __name__ == '__main__':
|
||||
logging.getLogger('MARKDOWN').setLevel(logging.INFO)
|
||||
|
||||
args.docs_output_dir = os.path.join(os.path.abspath(args.output_dir), 'docs')
|
||||
args.blog_output_dir = os.path.join(os.path.abspath(args.output_dir), 'blog')
|
||||
|
||||
from github import choose_latest_releases, get_events
|
||||
args.stable_releases = choose_latest_releases(args) if args.enable_stable_releases else []
|
||||
@ -259,6 +248,7 @@ if __name__ == '__main__':
|
||||
|
||||
if args.test_only:
|
||||
args.skip_multi_page = True
|
||||
args.skip_blog = True
|
||||
args.skip_website = True
|
||||
args.skip_pdf = True
|
||||
args.skip_amp = True
|
||||
|
@ -18,6 +18,30 @@ import amp
|
||||
import website
|
||||
|
||||
|
||||
def slugify(value, separator):
|
||||
return slugify_impl.slugify(value, separator=separator, word_boundary=True, save_order=True)
|
||||
|
||||
|
||||
MARKDOWN_EXTENSIONS = [
|
||||
'mdx_clickhouse',
|
||||
'admonition',
|
||||
'attr_list',
|
||||
'codehilite',
|
||||
'nl2br',
|
||||
'sane_lists',
|
||||
'pymdownx.details',
|
||||
'pymdownx.magiclink',
|
||||
'pymdownx.superfences',
|
||||
'extra',
|
||||
{
|
||||
'toc': {
|
||||
'permalink': True,
|
||||
'slugify': slugify
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
|
||||
class ClickHouseLinkMixin(object):
|
||||
|
||||
def handleMatch(self, m, data):
|
||||
@ -72,10 +96,6 @@ def makeExtension(**kwargs):
|
||||
return ClickHouseMarkdown(**kwargs)
|
||||
|
||||
|
||||
def slugify(value, separator):
|
||||
return slugify_impl.slugify(value, separator=separator, word_boundary=True, save_order=True)
|
||||
|
||||
|
||||
def get_translations(dirname, lang):
|
||||
import babel.support
|
||||
return babel.support.Translations.load(
|
||||
|
@ -1,4 +1,5 @@
|
||||
import collections
|
||||
import datetime
|
||||
import logging
|
||||
import os
|
||||
|
||||
@ -19,7 +20,8 @@ def build_nav_entry(root, args):
|
||||
return None, None, None
|
||||
result_items = []
|
||||
index_meta, index_content = util.read_md_file(os.path.join(root, 'index.md'))
|
||||
current_title = index_meta.get('toc_folder_title', index_meta.get('toc_title', find_first_header(index_content)))
|
||||
current_title = index_meta.get('toc_folder_title', index_meta.get('toc_title'))
|
||||
current_title = current_title or index_meta.get('title', find_first_header(index_content))
|
||||
for filename in os.listdir(root):
|
||||
path = os.path.join(root, filename)
|
||||
if os.path.isdir(path):
|
||||
@ -47,7 +49,7 @@ def build_nav_entry(root, args):
|
||||
return index_meta.get('toc_priority', 10000), current_title, result
|
||||
|
||||
|
||||
def build_nav(lang, args):
|
||||
def build_docs_nav(lang, args):
|
||||
docs_dir = os.path.join(args.docs_dir, lang)
|
||||
_, _, nav = build_nav_entry(docs_dir, args)
|
||||
result = []
|
||||
@ -64,10 +66,50 @@ def build_nav(lang, args):
|
||||
key = list(result[0].keys())[0]
|
||||
result[0][key][index_key] = 'index.md'
|
||||
result[0][key].move_to_end(index_key, last=False)
|
||||
print('result', result)
|
||||
return result
|
||||
|
||||
|
||||
def build_blog_nav(lang, args):
|
||||
blog_dir = os.path.join(args.blog_dir, lang)
|
||||
years = sorted(os.listdir(blog_dir), reverse=True)
|
||||
result_nav = [{'hidden': 'index.md'}]
|
||||
post_meta = collections.OrderedDict()
|
||||
for year in years:
|
||||
year_dir = os.path.join(blog_dir, year)
|
||||
if not os.path.isdir(year_dir):
|
||||
continue
|
||||
result_nav.append({year: collections.OrderedDict()})
|
||||
posts = []
|
||||
post_meta_items = []
|
||||
for post in os.listdir(year_dir):
|
||||
meta, _ = util.read_md_file(os.path.join(year_dir, post))
|
||||
post_date = meta['date']
|
||||
post_title = meta['title']
|
||||
if datetime.date.fromisoformat(post_date) > datetime.date.today():
|
||||
continue
|
||||
posts.append(
|
||||
(post_date, post_title, os.path.join(year, post),)
|
||||
)
|
||||
if post_title in post_meta:
|
||||
raise RuntimeError(f'Duplicate post title: {post_title}')
|
||||
if not post_date.startswith(f'{year}-'):
|
||||
raise RuntimeError(f'Post date {post_date} doesn\'t match the folder year {year}: {post_title}')
|
||||
post_url_part = post.replace('.md', '')
|
||||
post_meta_items.append((post_date, {
|
||||
'date': post_date,
|
||||
'title': post_title,
|
||||
'image': meta.get('image'),
|
||||
'url': f'/blog/{lang}/{year}/{post_url_part}/'
|
||||
},))
|
||||
for _, title, path in sorted(posts, reverse=True):
|
||||
result_nav[-1][year][title] = path
|
||||
for _, post_meta_item in sorted(post_meta_items,
|
||||
reverse=True,
|
||||
key=lambda item: item[0]):
|
||||
post_meta[post_meta_item['title']] = post_meta_item
|
||||
return result_nav, post_meta
|
||||
|
||||
|
||||
def _custom_get_navigation(files, config):
|
||||
nav_config = config['nav'] or mkdocs.structure.nav.nest_paths(f.src_path for f in files.documentation_pages())
|
||||
items = mkdocs.structure.nav._data_to_navigation(nav_config, files, config)
|
||||
|
@ -25,24 +25,34 @@ def write_redirect_html(out_path, to_url):
|
||||
</html>''')
|
||||
|
||||
|
||||
def build_redirect_html(args, from_path, to_path):
|
||||
for lang in args.lang.split(','):
|
||||
out_path = os.path.join(
|
||||
args.docs_output_dir, lang,
|
||||
from_path.replace('/index.md', '/index.html').replace('.md', '/index.html')
|
||||
)
|
||||
version_prefix = f'/{args.version_prefix}/' if args.version_prefix else '/'
|
||||
target_path = to_path.replace('/index.md', '/').replace('.md', '/')
|
||||
to_url = f'/docs{version_prefix}{lang}/{target_path}'
|
||||
to_url = to_url.strip()
|
||||
write_redirect_html(out_path, to_url)
|
||||
def build_redirect_html(args, base_prefix, lang, output_dir, from_path, to_path):
|
||||
out_path = os.path.join(
|
||||
output_dir, lang,
|
||||
from_path.replace('/index.md', '/index.html').replace('.md', '/index.html')
|
||||
)
|
||||
version_prefix = f'/{args.version_prefix}/' if args.version_prefix else '/'
|
||||
target_path = to_path.replace('/index.md', '/').replace('.md', '/')
|
||||
to_url = f'/{base_prefix}{version_prefix}{lang}/{target_path}'
|
||||
to_url = to_url.strip()
|
||||
write_redirect_html(out_path, to_url)
|
||||
|
||||
|
||||
def build_redirects(args):
|
||||
def build_docs_redirects(args):
|
||||
with open(os.path.join(args.docs_dir, 'redirects.txt'), 'r') as f:
|
||||
for line in f:
|
||||
from_path, to_path = line.split(' ', 1)
|
||||
build_redirect_html(args, from_path, to_path)
|
||||
for lang in args.lang.split(','):
|
||||
from_path, to_path = line.split(' ', 1)
|
||||
build_redirect_html(args, 'docs', lang, args.docs_output_dir, from_path, to_path)
|
||||
|
||||
|
||||
def build_blog_redirects(args):
|
||||
for lang in args.blog_lang.split(','):
|
||||
redirects_path = os.path.join(args.blog_dir, lang, 'redirects.txt')
|
||||
if os.path.exists(redirects_path):
|
||||
with open(redirects_path, 'r') as f:
|
||||
for line in f:
|
||||
from_path, to_path = line.split(' ', 1)
|
||||
build_redirect_html(args, 'blog', lang, args.blog_output_dir, from_path, to_path)
|
||||
|
||||
|
||||
def build_static_redirects(args):
|
||||
|
@ -17,20 +17,56 @@ import jsmin
|
||||
import mdx_clickhouse
|
||||
|
||||
|
||||
def handle_iframe(iframe, soup):
|
||||
if not iframe.attrs['src'].startswith('https://www.youtube.com/'):
|
||||
raise RuntimeError('iframes are allowed only for YouTube')
|
||||
wrapper = soup.new_tag('div')
|
||||
wrapper.attrs['class'] = ['embed-responsive', 'embed-responsive-16by9']
|
||||
iframe.insert_before(wrapper)
|
||||
iframe.extract()
|
||||
wrapper.insert(0, iframe)
|
||||
if 'width' in iframe.attrs:
|
||||
del iframe.attrs['width']
|
||||
if 'height' in iframe.attrs:
|
||||
del iframe.attrs['height']
|
||||
iframe.attrs['allow'] = 'accelerometer; autoplay; encrypted-media; gyroscope; picture-in-picture'
|
||||
iframe.attrs['class'] = 'embed-responsive-item'
|
||||
iframe.attrs['frameborder'] = '0'
|
||||
iframe.attrs['allowfullscreen'] = '1'
|
||||
|
||||
|
||||
def adjust_markdown_html(content):
|
||||
soup = bs4.BeautifulSoup(
|
||||
content,
|
||||
features='html.parser'
|
||||
)
|
||||
|
||||
for a in soup.find_all('a'):
|
||||
a_class = a.attrs.get('class')
|
||||
if a_class and 'headerlink' in a_class:
|
||||
a.string = '\xa0'
|
||||
|
||||
for iframe in soup.find_all('iframe'):
|
||||
handle_iframe(iframe, soup)
|
||||
|
||||
for img in soup.find_all('img'):
|
||||
if img.attrs.get('alt') == 'iframe':
|
||||
img.name = 'iframe'
|
||||
img.string = ''
|
||||
handle_iframe(img, soup)
|
||||
continue
|
||||
img_class = img.attrs.get('class')
|
||||
if img_class:
|
||||
img.attrs['class'] = img_class + ['img-fluid']
|
||||
else:
|
||||
img.attrs['class'] = 'img-fluid'
|
||||
|
||||
for details in soup.find_all('details'):
|
||||
for summary in details.find_all('summary'):
|
||||
if summary.parent != details:
|
||||
summary.extract()
|
||||
details.insert(0, summary)
|
||||
|
||||
for div in soup.find_all('div'):
|
||||
div_class = div.attrs.get('class')
|
||||
is_admonition = div_class and 'admonition' in div.attrs.get('class')
|
||||
@ -41,10 +77,12 @@ def adjust_markdown_html(content):
|
||||
a.attrs['class'] = a_class + ['alert-link']
|
||||
else:
|
||||
a.attrs['class'] = 'alert-link'
|
||||
|
||||
for p in div.find_all('p'):
|
||||
p_class = p.attrs.get('class')
|
||||
if is_admonition and p_class and ('admonition-title' in p_class):
|
||||
p.attrs['class'] = p_class + ['alert-heading', 'display-6', 'mb-2']
|
||||
|
||||
if is_admonition:
|
||||
div.attrs['role'] = 'alert'
|
||||
if ('info' in div_class) or ('note' in div_class):
|
||||
@ -107,10 +145,13 @@ def build_website(args):
|
||||
'public',
|
||||
'node_modules',
|
||||
'templates',
|
||||
'feathericons',
|
||||
'locale'
|
||||
)
|
||||
)
|
||||
shutil.copy2(
|
||||
os.path.join(args.website_dir, 'js', 'embedd.min.js'),
|
||||
os.path.join(args.output_dir, 'js', 'embedd.min.js')
|
||||
)
|
||||
|
||||
for root, _, filenames in os.walk(args.output_dir):
|
||||
for filename in filenames:
|
||||
@ -136,6 +177,7 @@ def get_css_in(args):
|
||||
f"'{args.website_dir}/css/bootstrap.css'",
|
||||
f"'{args.website_dir}/css/docsearch.css'",
|
||||
f"'{args.website_dir}/css/base.css'",
|
||||
f"'{args.website_dir}/css/blog.css'",
|
||||
f"'{args.website_dir}/css/docs.css'",
|
||||
f"'{args.website_dir}/css/highlight.css'"
|
||||
]
|
||||
|
@ -37,7 +37,7 @@ toc_title: Benimseyenler
|
||||
| <a href="https://www.exness.com" class="favicon">Exness</a> | Ticaret | Metrikler, Günlük Kaydı | — | — | [Rusça konuşun, Mayıs 2019](https://youtu.be/_rpU-TvSfZ8?t=3215) |
|
||||
| <a href="https://geniee.co.jp" class="favicon">Geniee</a> | Reklam Ağı | Ana ürün | — | — | [Japonca Blog yazısı, Temmuz 2017](https://tech.geniee.co.jp/entry/2017/07/20/160100) |
|
||||
| <a href="https://www.huya.com/" class="favicon">HUYA</a> | Video Akışı | Analiz | — | — | [Çince slaytlar, Ekim 2018](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup19/7.%20ClickHouse万亿数据分析实践%20李本旺(sundy-li)%20虎牙.pdf) |
|
||||
| <a href="https://www.idealista.com" class="favicon">Idealista</a> | Emlak | Analiz | — | — | [İngilizce Blog yazısı, Nisan 2019](https://clickhouse.yandex/blog/en/clickhouse-meetup-in-madrid-on-april-2-2019) |
|
||||
| <a href="https://www.idealista.com" class="favicon">Idealista</a> | Emlak | Analiz | — | — | [İngilizce Blog yazısı, Nisan 2019](https://clickhouse.tech/blog/en/clickhouse-meetup-in-madrid-on-april-2-2019) |
|
||||
| <a href="https://www.infovista.com/" class="favicon">Infovista</a> | Ağlar | Analiz | — | — | [İngilizce slaytlar, Ekim 2019](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup30/infovista.pdf) |
|
||||
| <a href="https://www.innogames.com" class="favicon">Innogames</a> | Oyun | Metrikler, Günlük Kaydı | — | — | [Rusça slaytlar, Eylül 2019](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup28/graphite_and_clickHouse.pdf) |
|
||||
| <a href="https://integros.com" class="favicon">Integros</a> | Video hizmetleri platformu | Analiz | — | — | [Rusça slaytlar, Mayıs 2019](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup22/strategies.pdf) |
|
||||
|
@ -33,7 +33,7 @@ CREATE TABLE hits (url String, from IPv4) ENGINE = MergeTree() ORDER BY from;
|
||||
`IPv4` etki alanı IPv4 dizeleri olarak özel giriş biçimini destekler:
|
||||
|
||||
``` sql
|
||||
INSERT INTO hits (url, from) VALUES ('https://wikipedia.org', '116.253.40.133')('https://clickhouse.tech', '183.247.232.58')('https://clickhouse.yandex/docs/en/', '116.106.34.242');
|
||||
INSERT INTO hits (url, from) VALUES ('https://wikipedia.org', '116.253.40.133')('https://clickhouse.tech', '183.247.232.58')('https://clickhouse.tech/docs/en/', '116.106.34.242');
|
||||
|
||||
SELECT * FROM hits;
|
||||
```
|
||||
|
@ -33,7 +33,7 @@ CREATE TABLE hits (url String, from IPv6) ENGINE = MergeTree() ORDER BY from;
|
||||
`IPv6` etki alanı IPv6 dizeleri olarak özel girişi destekler:
|
||||
|
||||
``` sql
|
||||
INSERT INTO hits (url, from) VALUES ('https://wikipedia.org', '2a02:aa08:e000:3100::2')('https://clickhouse.tech', '2001:44c8:129:2632:33:0:252:2')('https://clickhouse.yandex/docs/en/', '2a02:e980:1e::1');
|
||||
INSERT INTO hits (url, from) VALUES ('https://wikipedia.org', '2a02:aa08:e000:3100::2')('https://clickhouse.tech', '2001:44c8:129:2632:33:0:252:2')('https://clickhouse.tech/docs/en/', '2a02:e980:1e::1');
|
||||
|
||||
SELECT * FROM hits;
|
||||
```
|
||||
|
@ -702,13 +702,13 @@ arrayDifference(array)
|
||||
|
||||
**Parametre**
|
||||
|
||||
- `array` – [Dizi](https://clickhouse.yandex/docs/en/data_types/array/).
|
||||
- `array` – [Dizi](https://clickhouse.tech/docs/en/data_types/array/).
|
||||
|
||||
**Döndürülen değerler**
|
||||
|
||||
Bitişik öğeler arasındaki farklar dizisini döndürür.
|
||||
|
||||
Tür: [Uİnt\*](https://clickhouse.yandex/docs/en/data_types/int_uint/#uint-ranges), [Tamsayı\*](https://clickhouse.yandex/docs/en/data_types/int_uint/#int-ranges), [Yüzdürmek\*](https://clickhouse.yandex/docs/en/data_types/float/).
|
||||
Tür: [Uİnt\*](https://clickhouse.tech/docs/en/data_types/int_uint/#uint-ranges), [Tamsayı\*](https://clickhouse.tech/docs/en/data_types/int_uint/#int-ranges), [Yüzdürmek\*](https://clickhouse.tech/docs/en/data_types/float/).
|
||||
|
||||
**Örnek**
|
||||
|
||||
@ -754,7 +754,7 @@ arrayDistinct(array)
|
||||
|
||||
**Parametre**
|
||||
|
||||
- `array` – [Dizi](https://clickhouse.yandex/docs/en/data_types/array/).
|
||||
- `array` – [Dizi](https://clickhouse.tech/docs/en/data_types/array/).
|
||||
|
||||
**Döndürülen değerler**
|
||||
|
||||
|
@ -26,7 +26,7 @@ Bu sürüm önceki sürüm 1.1.54310 için hata düzeltmeleri içerir:
|
||||
#### Yenilik: {#new-features}
|
||||
|
||||
- Tablo motorları MergeTree ailesi için özel bölümleme anahtarı.
|
||||
- [Kafka](https://clickhouse.yandex/docs/en/operations/table_engines/kafka/) masa motoru.
|
||||
- [Kafka](https://clickhouse.tech/docs/en/operations/table_engines/kafka/) masa motoru.
|
||||
- Yükleme için destek eklendi [CatBoost](https://catboost.yandex/) modelleri ve ClickHouse saklanan verilere uygulayarak.
|
||||
- UTC olmayan tamsayı uzaklıklar ile saat dilimleri için destek eklendi.
|
||||
- Zaman aralıklarıyla aritmetik işlemler için destek eklendi.
|
||||
|
@ -35,7 +35,7 @@ toc_title: "\u91C7\u7528\u8005"
|
||||
| [Exness](https://www.exness.com) | 交易 | 指标,日志记录 | — | — | [俄语交谈,2019年5月](https://youtu.be/_rpU-TvSfZ8?t=3215) |
|
||||
| [精灵](https://geniee.co.jp) | 广告网络 | 主要产品 | — | — | [日文博客,2017年7月](https://tech.geniee.co.jp/entry/2017/07/20/160100) |
|
||||
| [虎牙](https://www.huya.com/) | 视频流 | 分析 | — | — | [中文幻灯片,2018年10月](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup19/7.%20ClickHouse万亿数据分析实践%20李本旺(sundy-li)%20虎牙.pdf) |
|
||||
| [Idealista](https://www.idealista.com) | 房地产 | 分析 | — | — | [英文博客文章,四月2019](https://clickhouse.yandex/blog/en/clickhouse-meetup-in-madrid-on-april-2-2019) |
|
||||
| [Idealista](https://www.idealista.com) | 房地产 | 分析 | — | — | [英文博客文章,四月2019](https://clickhouse.tech/blog/en/clickhouse-meetup-in-madrid-on-april-2-2019) |
|
||||
| [Infovista](https://www.infovista.com/) | 网络 | 分析 | — | — | [英文幻灯片,十月2019](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup30/infovista.pdf) |
|
||||
| [InnoGames](https://www.innogames.com) | 游戏 | 指标,日志记录 | — | — | [俄文幻灯片,2019年9月](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup28/graphite_and_clickHouse.pdf) |
|
||||
| [Integros](https://integros.com) | 视频服务平台 | 分析 | — | — | [俄文幻灯片,2019年5月](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup22/strategies.pdf) |
|
||||
|
@ -24,7 +24,7 @@ CREATE TABLE hits (url String, from IPv4) ENGINE = MergeTree() ORDER BY from;
|
||||
在写入与查询时,`IPv4`类型能够识别可读性更加友好的输入输出格式:
|
||||
|
||||
``` sql
|
||||
INSERT INTO hits (url, from) VALUES ('https://wikipedia.org', '116.253.40.133')('https://clickhouse.tech', '183.247.232.58')('https://clickhouse.yandex/docs/en/', '116.106.34.242');
|
||||
INSERT INTO hits (url, from) VALUES ('https://wikipedia.org', '116.253.40.133')('https://clickhouse.tech', '183.247.232.58')('https://clickhouse.tech/docs/en/', '116.106.34.242');
|
||||
|
||||
SELECT * FROM hits;
|
||||
```
|
||||
|
@ -24,7 +24,7 @@ CREATE TABLE hits (url String, from IPv6) ENGINE = MergeTree() ORDER BY from;
|
||||
在写入与查询时,`IPv6`类型能够识别可读性更加友好的输入输出格式:
|
||||
|
||||
``` sql
|
||||
INSERT INTO hits (url, from) VALUES ('https://wikipedia.org', '2a02:aa08:e000:3100::2')('https://clickhouse.tech', '2001:44c8:129:2632:33:0:252:2')('https://clickhouse.yandex/docs/en/', '2a02:e980:1e::1');
|
||||
INSERT INTO hits (url, from) VALUES ('https://wikipedia.org', '2a02:aa08:e000:3100::2')('https://clickhouse.tech', '2001:44c8:129:2632:33:0:252:2')('https://clickhouse.tech/docs/en/', '2a02:e980:1e::1');
|
||||
|
||||
SELECT * FROM hits;
|
||||
```
|
||||
|
@ -26,7 +26,7 @@ toc_title: '2017'
|
||||
#### 新功能: {#new-features}
|
||||
|
||||
- MergeTree表引擎系列的自定义分区键。
|
||||
- [卡夫卡](https://clickhouse.yandex/docs/en/operations/table_engines/kafka/) 表引擎。
|
||||
- [卡夫卡](https://clickhouse.tech/docs/en/operations/table_engines/kafka/) 表引擎。
|
||||
- 增加了对加载的支持 [CatBoost](https://catboost.yandex/) 模型并将其应用到ClickHouse中存储的数据。
|
||||
- 增加了对UTC非整数偏移的时区的支持。
|
||||
- 增加了对具有时间间隔的算术运算的支持。
|
||||
|
@ -986,7 +986,10 @@ private:
|
||||
/// Process the query that doesn't require transferring data blocks to the server.
|
||||
void processOrdinaryQuery()
|
||||
{
|
||||
/// We will always rewrite query (even if there are no query_parameters) because it will help to find errors in query formatter.
|
||||
/// Rewrite query only when we have query parameters.
|
||||
/// Note that if query is rewritten, comments in query are lost.
|
||||
/// But the user often wants to see comments in server logs, query log, processlist, etc.
|
||||
if (!query_parameters.empty())
|
||||
{
|
||||
/// Replace ASTQueryParameter with ASTLiteral for prepared statements.
|
||||
ReplaceQueryParameterVisitor visitor(query_parameters);
|
||||
|
@ -1,5 +1,6 @@
|
||||
#include "Internals.h"
|
||||
#include <Storages/MergeTree/MergeTreeData.h>
|
||||
#include <Storages/extractKeyExpressionList.h>
|
||||
|
||||
namespace DB
|
||||
{
|
||||
@ -184,9 +185,9 @@ Names extractPrimaryKeyColumnNames(const ASTPtr & storage_ast)
|
||||
const auto sorting_key_ast = extractOrderBy(storage_ast);
|
||||
const auto primary_key_ast = extractPrimaryKey(storage_ast);
|
||||
|
||||
const auto sorting_key_expr_list = MergeTreeData::extractKeyExpressionList(sorting_key_ast);
|
||||
const auto sorting_key_expr_list = extractKeyExpressionList(sorting_key_ast);
|
||||
const auto primary_key_expr_list = primary_key_ast
|
||||
? MergeTreeData::extractKeyExpressionList(primary_key_ast) : sorting_key_expr_list->clone();
|
||||
? extractKeyExpressionList(primary_key_ast) : sorting_key_expr_list->clone();
|
||||
|
||||
/// Maybe we have to handle VersionedCollapsing engine separately. But in our case in looks pointless.
|
||||
|
||||
|
@ -14,6 +14,7 @@ set (CLICKHOUSE_ODBC_BRIDGE_SOURCES
|
||||
set (CLICKHOUSE_ODBC_BRIDGE_LINK
|
||||
PRIVATE
|
||||
clickhouse_parsers
|
||||
clickhouse_aggregate_functions
|
||||
daemon
|
||||
dbms
|
||||
Poco::Data
|
||||
|
@ -869,7 +869,7 @@ int Server::main(const std::vector<std::string> & /*args*/)
|
||||
|
||||
if (listen_try)
|
||||
{
|
||||
LOG_ERROR(log, "{}. If it is an IPv6 or IPv4 address and your host has disabled IPv6 or IPv4, then consider to "
|
||||
LOG_WARNING(log, "{}. If it is an IPv6 or IPv4 address and your host has disabled IPv6 or IPv4, then consider to "
|
||||
"specify not disabled IPv4 or IPv6 address to listen in <listen_host> element of configuration "
|
||||
"file. Example for disabled IPv6: <listen_host>0.0.0.0</listen_host> ."
|
||||
" Example for disabled IPv4: <listen_host>::</listen_host>",
|
||||
@ -1013,7 +1013,8 @@ int Server::main(const std::vector<std::string> & /*args*/)
|
||||
}
|
||||
|
||||
if (servers.empty())
|
||||
throw Exception("No servers started (add valid listen_host and 'tcp_port' or 'http_port' to configuration file.)", ErrorCodes::NO_ELEMENTS_IN_CONFIG);
|
||||
throw Exception("No servers started (add valid listen_host and 'tcp_port' or 'http_port' to configuration file.)",
|
||||
ErrorCodes::NO_ELEMENTS_IN_CONFIG);
|
||||
|
||||
global_context->enableNamedSessions();
|
||||
|
||||
|
@ -64,19 +64,23 @@ namespace
|
||||
|
||||
bool parseImpl(Pos & pos, ASTPtr & node, Expected & expected) override
|
||||
{
|
||||
if (ParserCreateUserQuery{}.enableAttachMode(true).parse(pos, node, expected))
|
||||
return true;
|
||||
if (ParserCreateRoleQuery{}.enableAttachMode(true).parse(pos, node, expected))
|
||||
return true;
|
||||
if (ParserCreateRowPolicyQuery{}.enableAttachMode(true).parse(pos, node, expected))
|
||||
return true;
|
||||
if (ParserCreateQuotaQuery{}.enableAttachMode(true).parse(pos, node, expected))
|
||||
return true;
|
||||
if (ParserCreateSettingsProfileQuery{}.enableAttachMode(true).parse(pos, node, expected))
|
||||
return true;
|
||||
if (ParserGrantQuery{}.enableAttachMode(true).parse(pos, node, expected))
|
||||
return true;
|
||||
return false;
|
||||
ParserCreateUserQuery create_user_p;
|
||||
ParserCreateRoleQuery create_role_p;
|
||||
ParserCreateRowPolicyQuery create_policy_p;
|
||||
ParserCreateQuotaQuery create_quota_p;
|
||||
ParserCreateSettingsProfileQuery create_profile_p;
|
||||
ParserGrantQuery grant_p;
|
||||
|
||||
create_user_p.useAttachMode();
|
||||
create_role_p.useAttachMode();
|
||||
create_policy_p.useAttachMode();
|
||||
create_quota_p.useAttachMode();
|
||||
create_profile_p.useAttachMode();
|
||||
grant_p.useAttachMode();
|
||||
|
||||
return create_user_p.parse(pos, node, expected) || create_role_p.parse(pos, node, expected)
|
||||
|| create_policy_p.parse(pos, node, expected) || create_quota_p.parse(pos, node, expected)
|
||||
|| create_profile_p.parse(pos, node, expected) || grant_p.parse(pos, node, expected);
|
||||
}
|
||||
};
|
||||
|
||||
@ -261,7 +265,9 @@ namespace
|
||||
/// Calculates the path for storing a map of name of access entity to UUID for access entities of some type.
|
||||
std::filesystem::path getListFilePath(const String & directory_path, EntityType type)
|
||||
{
|
||||
std::string_view file_name = EntityTypeInfo::get(type).list_filename;
|
||||
String file_name = EntityTypeInfo::get(type).plural_raw_name;
|
||||
boost::to_lower(file_name);
|
||||
file_name += ".list";
|
||||
return std::filesystem::path(directory_path).append(file_name);
|
||||
}
|
||||
|
||||
|
@ -45,11 +45,13 @@ struct IAccessEntity
|
||||
struct TypeInfo
|
||||
{
|
||||
const char * const raw_name;
|
||||
const char * const plural_raw_name;
|
||||
const String name; /// Uppercased with spaces instead of underscores, e.g. "SETTINGS PROFILE".
|
||||
const String alias; /// Alias of the keyword or empty string, e.g. "PROFILE".
|
||||
const String plural_name; /// Uppercased with spaces plural name, e.g. "SETTINGS PROFILES".
|
||||
const String plural_alias; /// Uppercased with spaces plural name alias, e.g. "PROFILES".
|
||||
const String name_for_output_with_entity_name; /// Lowercased with spaces instead of underscores, e.g. "settings profile".
|
||||
const char unique_char; /// Unique character for this type. E.g. 'P' for SETTINGS_PROFILE.
|
||||
const String list_filename; /// Name of the file containing list of objects of this type, including the file extension ".list".
|
||||
const int not_found_error_code;
|
||||
|
||||
static const TypeInfo & get(Type type_);
|
||||
@ -69,6 +71,18 @@ struct IAccessEntity
|
||||
friend bool operator ==(const IAccessEntity & lhs, const IAccessEntity & rhs) { return lhs.equal(rhs); }
|
||||
friend bool operator !=(const IAccessEntity & lhs, const IAccessEntity & rhs) { return !(lhs == rhs); }
|
||||
|
||||
struct LessByName
|
||||
{
|
||||
bool operator()(const IAccessEntity & lhs, const IAccessEntity & rhs) const { return (lhs.getName() < rhs.getName()); }
|
||||
bool operator()(const std::shared_ptr<const IAccessEntity> & lhs, const std::shared_ptr<const IAccessEntity> & rhs) const { return operator()(*lhs, *rhs); }
|
||||
};
|
||||
|
||||
struct LessByTypeAndName
|
||||
{
|
||||
bool operator()(const IAccessEntity & lhs, const IAccessEntity & rhs) const { return (lhs.getType() < rhs.getType()) || ((lhs.getType() == rhs.getType()) && (lhs.getName() < rhs.getName())); }
|
||||
bool operator()(const std::shared_ptr<const IAccessEntity> & lhs, const std::shared_ptr<const IAccessEntity> & rhs) const { return operator()(*lhs, *rhs); }
|
||||
};
|
||||
|
||||
protected:
|
||||
String name;
|
||||
|
||||
@ -87,44 +101,49 @@ using AccessEntityPtr = std::shared_ptr<const IAccessEntity>;
|
||||
|
||||
inline const IAccessEntity::TypeInfo & IAccessEntity::TypeInfo::get(Type type_)
|
||||
{
|
||||
static constexpr auto make_info = [](const char * raw_name_, char unique_char_, const char * list_filename_, int not_found_error_code_)
|
||||
static constexpr auto make_info = [](const char * raw_name_, const char * plural_raw_name_, char unique_char_, int not_found_error_code_)
|
||||
{
|
||||
String init_name = raw_name_;
|
||||
boost::to_upper(init_name);
|
||||
boost::replace_all(init_name, "_", " ");
|
||||
String init_alias;
|
||||
if (auto underscore_pos = init_name.find_first_of(" "); underscore_pos != String::npos)
|
||||
init_alias = init_name.substr(underscore_pos + 1);
|
||||
String init_name_for_output_with_entity_name = init_name;
|
||||
String init_names[2] = {raw_name_, plural_raw_name_};
|
||||
String init_aliases[2];
|
||||
for (size_t i = 0; i != std::size(init_names); ++i)
|
||||
{
|
||||
String & init_name = init_names[i];
|
||||
String & init_alias = init_aliases[i];
|
||||
boost::to_upper(init_name);
|
||||
boost::replace_all(init_name, "_", " ");
|
||||
if (auto underscore_pos = init_name.find_first_of(" "); underscore_pos != String::npos)
|
||||
init_alias = init_name.substr(underscore_pos + 1);
|
||||
}
|
||||
String init_name_for_output_with_entity_name = init_names[0];
|
||||
boost::to_lower(init_name_for_output_with_entity_name);
|
||||
return TypeInfo{raw_name_, std::move(init_name), std::move(init_alias), std::move(init_name_for_output_with_entity_name), unique_char_, list_filename_, not_found_error_code_};
|
||||
return TypeInfo{raw_name_, plural_raw_name_, std::move(init_names[0]), std::move(init_aliases[0]), std::move(init_names[1]), std::move(init_aliases[1]), std::move(init_name_for_output_with_entity_name), unique_char_, not_found_error_code_};
|
||||
};
|
||||
|
||||
switch (type_)
|
||||
{
|
||||
case Type::USER:
|
||||
{
|
||||
static const auto info = make_info("USER", 'U', "users.list", ErrorCodes::UNKNOWN_USER);
|
||||
static const auto info = make_info("USER", "USERS", 'U', ErrorCodes::UNKNOWN_USER);
|
||||
return info;
|
||||
}
|
||||
case Type::ROLE:
|
||||
{
|
||||
static const auto info = make_info("ROLE", 'R', "roles.list", ErrorCodes::UNKNOWN_ROLE);
|
||||
static const auto info = make_info("ROLE", "ROLES", 'R', ErrorCodes::UNKNOWN_ROLE);
|
||||
return info;
|
||||
}
|
||||
case Type::SETTINGS_PROFILE:
|
||||
{
|
||||
static const auto info = make_info("SETTINGS_PROFILE", 'S', "settings_profiles.list", ErrorCodes::THERE_IS_NO_PROFILE);
|
||||
static const auto info = make_info("SETTINGS_PROFILE", "SETTINGS_PROFILES", 'S', ErrorCodes::THERE_IS_NO_PROFILE);
|
||||
return info;
|
||||
}
|
||||
case Type::ROW_POLICY:
|
||||
{
|
||||
static const auto info = make_info("ROW_POLICY", 'P', "row_policies.list", ErrorCodes::UNKNOWN_ROW_POLICY);
|
||||
static const auto info = make_info("ROW_POLICY", "ROW_POLICIES", 'P', ErrorCodes::UNKNOWN_ROW_POLICY);
|
||||
return info;
|
||||
}
|
||||
case Type::QUOTA:
|
||||
{
|
||||
static const auto info = make_info("QUOTA", 'Q', "quotas.list", ErrorCodes::UNKNOWN_QUOTA);
|
||||
static const auto info = make_info("QUOTA", "QUOTAS", 'Q', ErrorCodes::UNKNOWN_QUOTA);
|
||||
return info;
|
||||
}
|
||||
case Type::MAX: break;
|
||||
|
@ -24,16 +24,141 @@ namespace
|
||||
using EntityType = IAccessStorage::EntityType;
|
||||
using EntityTypeInfo = IAccessStorage::EntityTypeInfo;
|
||||
|
||||
bool isNotFoundErrorCode(int error_code)
|
||||
|
||||
String outputID(const UUID & id)
|
||||
{
|
||||
if (error_code == ErrorCodes::ACCESS_ENTITY_NOT_FOUND)
|
||||
return true;
|
||||
return "ID(" + toString(id) + ")";
|
||||
}
|
||||
|
||||
for (auto type : ext::range(EntityType::MAX))
|
||||
if (error_code == EntityTypeInfo::get(type).not_found_error_code)
|
||||
return true;
|
||||
String outputTypeAndNameOrID(const IAccessStorage & storage, const UUID & id)
|
||||
{
|
||||
auto entity = storage.tryRead(id);
|
||||
if (entity)
|
||||
return entity->outputTypeAndName();
|
||||
return outputID(id);
|
||||
}
|
||||
|
||||
return false;
|
||||
|
||||
template <typename Func, typename ResultType = std::result_of_t<Func()>>
|
||||
ResultType doTry(const Func & func)
|
||||
{
|
||||
try
|
||||
{
|
||||
return func();
|
||||
}
|
||||
catch (Exception &)
|
||||
{
|
||||
return {};
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
template <bool ignore_errors, typename T, typename ApplyFunc, typename GetNameFunc = std::nullptr_t,
|
||||
typename ResultTypeOfApplyFunc = std::result_of_t<ApplyFunc(T)>,
|
||||
typename ResultType = std::conditional_t<std::is_same_v<ResultTypeOfApplyFunc, void>, void, std::vector<ResultTypeOfApplyFunc>>>
|
||||
ResultType applyToMultipleEntities(
|
||||
const std::vector<T> & multiple_entities,
|
||||
const ApplyFunc & apply_function,
|
||||
const char * error_message_format [[maybe_unused]] = nullptr,
|
||||
const GetNameFunc & get_name_function [[maybe_unused]] = nullptr)
|
||||
{
|
||||
std::optional<Exception> exception;
|
||||
std::vector<bool> success;
|
||||
|
||||
auto helper = [&](const auto & apply_and_store_result_function)
|
||||
{
|
||||
for (size_t i = 0; i != multiple_entities.size(); ++i)
|
||||
{
|
||||
try
|
||||
{
|
||||
apply_and_store_result_function(multiple_entities[i]);
|
||||
if constexpr (!ignore_errors)
|
||||
success[i] = true;
|
||||
}
|
||||
catch (Exception & e)
|
||||
{
|
||||
if (!ignore_errors && !exception)
|
||||
exception.emplace(e);
|
||||
}
|
||||
catch (Poco::Exception & e)
|
||||
{
|
||||
if (!ignore_errors && !exception)
|
||||
exception.emplace(Exception::CreateFromPocoTag{}, e);
|
||||
}
|
||||
catch (std::exception & e)
|
||||
{
|
||||
if (!ignore_errors && !exception)
|
||||
exception.emplace(Exception::CreateFromSTDTag{}, e);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
if constexpr (std::is_same_v<ResultType, void>)
|
||||
{
|
||||
if (multiple_entities.empty())
|
||||
return;
|
||||
|
||||
if (multiple_entities.size() == 1)
|
||||
{
|
||||
apply_function(multiple_entities.front());
|
||||
return;
|
||||
}
|
||||
|
||||
if constexpr (!ignore_errors)
|
||||
success.resize(multiple_entities.size(), false);
|
||||
|
||||
helper(apply_function);
|
||||
|
||||
if (ignore_errors || !exception)
|
||||
return;
|
||||
}
|
||||
else
|
||||
{
|
||||
ResultType result;
|
||||
if (multiple_entities.empty())
|
||||
return result;
|
||||
|
||||
if (multiple_entities.size() == 1)
|
||||
{
|
||||
result.emplace_back(apply_function(multiple_entities.front()));
|
||||
return result;
|
||||
}
|
||||
|
||||
result.reserve(multiple_entities.size());
|
||||
if constexpr (!ignore_errors)
|
||||
success.resize(multiple_entities.size(), false);
|
||||
|
||||
helper([&](const T & entity) { result.emplace_back(apply_function(entity)); });
|
||||
|
||||
if (ignore_errors || !exception)
|
||||
return result;
|
||||
}
|
||||
|
||||
if constexpr (!ignore_errors)
|
||||
{
|
||||
Strings succeeded_names_list;
|
||||
Strings failed_names_list;
|
||||
for (size_t i = 0; i != multiple_entities.size(); ++i)
|
||||
{
|
||||
const auto & entity = multiple_entities[i];
|
||||
String name = get_name_function(entity);
|
||||
if (success[i])
|
||||
succeeded_names_list.emplace_back(name);
|
||||
else
|
||||
failed_names_list.emplace_back(name);
|
||||
}
|
||||
String succeeded_names = boost::algorithm::join(succeeded_names_list, ", ");
|
||||
String failed_names = boost::algorithm::join(failed_names_list, ", ");
|
||||
if (succeeded_names.empty())
|
||||
succeeded_names = "none";
|
||||
|
||||
String error_message = error_message_format;
|
||||
boost::replace_all(error_message, "{succeeded_names}", succeeded_names);
|
||||
boost::replace_all(error_message, "{failed_names}", failed_names);
|
||||
exception->addMessage(error_message);
|
||||
exception->rethrow();
|
||||
}
|
||||
__builtin_unreachable();
|
||||
}
|
||||
}
|
||||
|
||||
@ -91,14 +216,7 @@ bool IAccessStorage::exists(const UUID & id) const
|
||||
|
||||
AccessEntityPtr IAccessStorage::tryReadBase(const UUID & id) const
|
||||
{
|
||||
try
|
||||
{
|
||||
return readImpl(id);
|
||||
}
|
||||
catch (Exception &)
|
||||
{
|
||||
return nullptr;
|
||||
}
|
||||
return doTry([&] { return readImpl(id); });
|
||||
}
|
||||
|
||||
|
||||
@ -110,14 +228,7 @@ String IAccessStorage::readName(const UUID & id) const
|
||||
|
||||
std::optional<String> IAccessStorage::tryReadName(const UUID & id) const
|
||||
{
|
||||
try
|
||||
{
|
||||
return readNameImpl(id);
|
||||
}
|
||||
catch (Exception &)
|
||||
{
|
||||
return {};
|
||||
}
|
||||
return doTry([&] { return std::optional<String>{readNameImpl(id)}; });
|
||||
}
|
||||
|
||||
|
||||
@ -129,56 +240,25 @@ UUID IAccessStorage::insert(const AccessEntityPtr & entity)
|
||||
|
||||
std::vector<UUID> IAccessStorage::insert(const std::vector<AccessEntityPtr> & multiple_entities)
|
||||
{
|
||||
std::vector<UUID> ids;
|
||||
ids.reserve(multiple_entities.size());
|
||||
String error_message;
|
||||
for (const auto & entity : multiple_entities)
|
||||
{
|
||||
try
|
||||
{
|
||||
ids.push_back(insertImpl(entity, false));
|
||||
}
|
||||
catch (Exception & e)
|
||||
{
|
||||
if (e.code() != ErrorCodes::ACCESS_ENTITY_ALREADY_EXISTS)
|
||||
throw;
|
||||
error_message += (error_message.empty() ? "" : ". ") + e.message();
|
||||
}
|
||||
}
|
||||
if (!error_message.empty())
|
||||
throw Exception(error_message, ErrorCodes::ACCESS_ENTITY_ALREADY_EXISTS);
|
||||
return ids;
|
||||
return applyToMultipleEntities</* ignore_errors = */ false>(
|
||||
multiple_entities,
|
||||
[this](const AccessEntityPtr & entity) { return insertImpl(entity, /* replace_if_exists = */ false); },
|
||||
"Couldn't insert {failed_names}. Successfully inserted: {succeeded_names}",
|
||||
[](const AccessEntityPtr & entity) { return entity->outputTypeAndName(); });
|
||||
}
|
||||
|
||||
|
||||
std::optional<UUID> IAccessStorage::tryInsert(const AccessEntityPtr & entity)
|
||||
{
|
||||
try
|
||||
{
|
||||
return insertImpl(entity, false);
|
||||
}
|
||||
catch (Exception &)
|
||||
{
|
||||
return {};
|
||||
}
|
||||
return doTry([&] { return std::optional<UUID>{insertImpl(entity, false)}; });
|
||||
}
|
||||
|
||||
|
||||
std::vector<UUID> IAccessStorage::tryInsert(const std::vector<AccessEntityPtr> & multiple_entities)
|
||||
{
|
||||
std::vector<UUID> ids;
|
||||
ids.reserve(multiple_entities.size());
|
||||
for (const auto & entity : multiple_entities)
|
||||
{
|
||||
try
|
||||
{
|
||||
ids.push_back(insertImpl(entity, false));
|
||||
}
|
||||
catch (Exception &)
|
||||
{
|
||||
}
|
||||
}
|
||||
return ids;
|
||||
return applyToMultipleEntities</* ignore_errors = */ true>(
|
||||
multiple_entities,
|
||||
[this](const AccessEntityPtr & entity) { return insertImpl(entity, /* replace_if_exists = */ false); });
|
||||
}
|
||||
|
||||
|
||||
@ -190,11 +270,11 @@ UUID IAccessStorage::insertOrReplace(const AccessEntityPtr & entity)
|
||||
|
||||
std::vector<UUID> IAccessStorage::insertOrReplace(const std::vector<AccessEntityPtr> & multiple_entities)
|
||||
{
|
||||
std::vector<UUID> ids;
|
||||
ids.reserve(multiple_entities.size());
|
||||
for (const auto & entity : multiple_entities)
|
||||
ids.push_back(insertImpl(entity, true));
|
||||
return ids;
|
||||
return applyToMultipleEntities</* ignore_errors = */ false>(
|
||||
multiple_entities,
|
||||
[this](const AccessEntityPtr & entity) { return insertImpl(entity, /* replace_if_exists = */ true); },
|
||||
"Couldn't insert {failed_names}. Successfully inserted: {succeeded_names}",
|
||||
[](const AccessEntityPtr & entity) -> String { return entity->outputTypeAndName(); });
|
||||
}
|
||||
|
||||
|
||||
@ -206,60 +286,25 @@ void IAccessStorage::remove(const UUID & id)
|
||||
|
||||
void IAccessStorage::remove(const std::vector<UUID> & ids)
|
||||
{
|
||||
String error_message;
|
||||
std::optional<int> error_code;
|
||||
for (const auto & id : ids)
|
||||
{
|
||||
try
|
||||
{
|
||||
removeImpl(id);
|
||||
}
|
||||
catch (Exception & e)
|
||||
{
|
||||
if (!isNotFoundErrorCode(e.code()))
|
||||
throw;
|
||||
error_message += (error_message.empty() ? "" : ". ") + e.message();
|
||||
if (error_code && (*error_code != e.code()))
|
||||
error_code = ErrorCodes::ACCESS_ENTITY_NOT_FOUND;
|
||||
else
|
||||
error_code = e.code();
|
||||
}
|
||||
}
|
||||
if (!error_message.empty())
|
||||
throw Exception(error_message, *error_code);
|
||||
applyToMultipleEntities</* ignore_errors = */ false>(
|
||||
ids,
|
||||
[this](const UUID & id) { removeImpl(id); },
|
||||
"Couldn't remove {failed_names}. Successfully removed: {succeeded_names}",
|
||||
[this](const UUID & id) { return outputTypeAndNameOrID(*this, id); });
|
||||
}
|
||||
|
||||
|
||||
bool IAccessStorage::tryRemove(const UUID & id)
|
||||
{
|
||||
try
|
||||
{
|
||||
removeImpl(id);
|
||||
return true;
|
||||
}
|
||||
catch (Exception &)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
return doTry([&] { removeImpl(id); return true; });
|
||||
}
|
||||
|
||||
|
||||
std::vector<UUID> IAccessStorage::tryRemove(const std::vector<UUID> & ids)
|
||||
{
|
||||
std::vector<UUID> removed;
|
||||
removed.reserve(ids.size());
|
||||
for (const auto & id : ids)
|
||||
{
|
||||
try
|
||||
{
|
||||
removeImpl(id);
|
||||
removed.push_back(id);
|
||||
}
|
||||
catch (Exception &)
|
||||
{
|
||||
}
|
||||
}
|
||||
return removed;
|
||||
return applyToMultipleEntities</* ignore_errors = */ true>(
|
||||
ids,
|
||||
[this](const UUID & id) { removeImpl(id); return id; });
|
||||
}
|
||||
|
||||
|
||||
@ -271,60 +316,25 @@ void IAccessStorage::update(const UUID & id, const UpdateFunc & update_func)
|
||||
|
||||
void IAccessStorage::update(const std::vector<UUID> & ids, const UpdateFunc & update_func)
|
||||
{
|
||||
String error_message;
|
||||
std::optional<int> error_code;
|
||||
for (const auto & id : ids)
|
||||
{
|
||||
try
|
||||
{
|
||||
updateImpl(id, update_func);
|
||||
}
|
||||
catch (Exception & e)
|
||||
{
|
||||
if (!isNotFoundErrorCode(e.code()))
|
||||
throw;
|
||||
error_message += (error_message.empty() ? "" : ". ") + e.message();
|
||||
if (error_code && (*error_code != e.code()))
|
||||
error_code = ErrorCodes::ACCESS_ENTITY_NOT_FOUND;
|
||||
else
|
||||
error_code = e.code();
|
||||
}
|
||||
}
|
||||
if (!error_message.empty())
|
||||
throw Exception(error_message, *error_code);
|
||||
applyToMultipleEntities</* ignore_errors = */ false>(
|
||||
ids,
|
||||
[this, &update_func](const UUID & id) { updateImpl(id, update_func); },
|
||||
"Couldn't update {failed_names}. Successfully updated: {succeeded_names}",
|
||||
[this](const UUID & id) { return outputTypeAndNameOrID(*this, id); });
|
||||
}
|
||||
|
||||
|
||||
bool IAccessStorage::tryUpdate(const UUID & id, const UpdateFunc & update_func)
|
||||
{
|
||||
try
|
||||
{
|
||||
updateImpl(id, update_func);
|
||||
return true;
|
||||
}
|
||||
catch (Exception &)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
return doTry([&] { updateImpl(id, update_func); return true; });
|
||||
}
|
||||
|
||||
|
||||
std::vector<UUID> IAccessStorage::tryUpdate(const std::vector<UUID> & ids, const UpdateFunc & update_func)
|
||||
{
|
||||
std::vector<UUID> updated;
|
||||
updated.reserve(ids.size());
|
||||
for (const auto & id : ids)
|
||||
{
|
||||
try
|
||||
{
|
||||
updateImpl(id, update_func);
|
||||
updated.push_back(id);
|
||||
}
|
||||
catch (Exception &)
|
||||
{
|
||||
}
|
||||
}
|
||||
return updated;
|
||||
return applyToMultipleEntities</* ignore_errors = */ true>(
|
||||
ids,
|
||||
[this, &update_func](const UUID & id) { updateImpl(id, update_func); return id; });
|
||||
}
|
||||
|
||||
|
||||
@ -388,7 +398,7 @@ Poco::Logger * IAccessStorage::getLogger() const
|
||||
|
||||
void IAccessStorage::throwNotFound(const UUID & id) const
|
||||
{
|
||||
throw Exception("ID {" + toString(id) + "} not found in [" + getStorageName() + "]", ErrorCodes::ACCESS_ENTITY_NOT_FOUND);
|
||||
throw Exception(outputID(id) + " not found in [" + getStorageName() + "]", ErrorCodes::ACCESS_ENTITY_NOT_FOUND);
|
||||
}
|
||||
|
||||
|
||||
@ -402,7 +412,7 @@ void IAccessStorage::throwNotFound(EntityType type, const String & name) const
|
||||
void IAccessStorage::throwBadCast(const UUID & id, EntityType type, const String & name, EntityType required_type)
|
||||
{
|
||||
throw Exception(
|
||||
"ID {" + toString(id) + "}: " + outputEntityTypeAndName(type, name) + " expected to be of type " + toString(required_type),
|
||||
outputID(id) + ": " + outputEntityTypeAndName(type, name) + " expected to be of type " + toString(required_type),
|
||||
ErrorCodes::LOGICAL_ERROR);
|
||||
}
|
||||
|
||||
@ -410,7 +420,7 @@ void IAccessStorage::throwBadCast(const UUID & id, EntityType type, const String
|
||||
void IAccessStorage::throwIDCollisionCannotInsert(const UUID & id, EntityType type, const String & name, EntityType existing_type, const String & existing_name) const
|
||||
{
|
||||
throw Exception(
|
||||
outputEntityTypeAndName(type, name) + ": cannot insert because the ID {" + toString(id) + "} is already used by "
|
||||
outputEntityTypeAndName(type, name) + ": cannot insert because the " + outputID(id) + " is already used by "
|
||||
+ outputEntityTypeAndName(existing_type, existing_name) + " in [" + getStorageName() + "]",
|
||||
ErrorCodes::ACCESS_ENTITY_ALREADY_EXISTS);
|
||||
}
|
||||
|
@ -1,7 +1,9 @@
|
||||
#pragma once
|
||||
|
||||
#include <Access/IAccessEntity.h>
|
||||
#include <Access/ExtendedRoleSet.h>
|
||||
#include <Access/RolesOrUsersSet.h>
|
||||
#include <ext/range.h>
|
||||
#include <boost/algorithm/string/split.hpp>
|
||||
#include <boost/lexical_cast.hpp>
|
||||
#include <chrono>
|
||||
|
||||
@ -84,14 +86,15 @@ struct Quota : public IAccessEntity
|
||||
struct KeyTypeInfo
|
||||
{
|
||||
const char * const raw_name;
|
||||
const String name; /// Lowercased with spaces, e.g. "client key".
|
||||
const String name; /// Lowercased with underscores, e.g. "client_key".
|
||||
const std::vector<KeyType> base_types; /// For combined types keeps base types, e.g. for CLIENT_KEY_OR_USER_NAME it keeps [KeyType::CLIENT_KEY, KeyType::USER_NAME].
|
||||
static const KeyTypeInfo & get(KeyType type);
|
||||
};
|
||||
|
||||
KeyType key_type = KeyType::NONE;
|
||||
|
||||
/// Which roles or users should use this quota.
|
||||
ExtendedRoleSet to_roles;
|
||||
RolesOrUsersSet to_roles;
|
||||
|
||||
bool equal(const IAccessEntity & other) const override;
|
||||
std::shared_ptr<IAccessEntity> clone() const override { return cloneImpl<Quota>(); }
|
||||
@ -195,8 +198,21 @@ inline const Quota::KeyTypeInfo & Quota::KeyTypeInfo::get(KeyType type)
|
||||
{
|
||||
String init_name = raw_name_;
|
||||
boost::to_lower(init_name);
|
||||
boost::replace_all(init_name, "_", " ");
|
||||
return KeyTypeInfo{raw_name_, std::move(init_name)};
|
||||
std::vector<KeyType> init_base_types;
|
||||
String replaced = boost::algorithm::replace_all_copy(init_name, "_or_", "|");
|
||||
Strings tokens;
|
||||
boost::algorithm::split(tokens, replaced, boost::is_any_of("|"));
|
||||
if (tokens.size() > 1)
|
||||
{
|
||||
for (const auto & token : tokens)
|
||||
for (auto kt : ext::range(KeyType::MAX))
|
||||
if (KeyTypeInfo::get(kt).name == token)
|
||||
{
|
||||
init_base_types.push_back(kt);
|
||||
break;
|
||||
}
|
||||
}
|
||||
return KeyTypeInfo{raw_name_, std::move(init_name), std::move(init_base_types)};
|
||||
};
|
||||
|
||||
switch (type)
|
||||
|
@ -39,7 +39,7 @@ private:
|
||||
|
||||
QuotaPtr quota;
|
||||
UUID quota_id;
|
||||
const ExtendedRoleSet * roles = nullptr;
|
||||
const RolesOrUsersSet * roles = nullptr;
|
||||
std::unordered_map<String /* quota key */, boost::shared_ptr<const Intervals>> key_to_intervals;
|
||||
};
|
||||
|
||||
|
@ -1,9 +1,8 @@
|
||||
|
||||
#include <Access/ExtendedRoleSet.h>
|
||||
#include <Access/RolesOrUsersSet.h>
|
||||
#include <Access/AccessControlManager.h>
|
||||
#include <Access/User.h>
|
||||
#include <Access/Role.h>
|
||||
#include <Parsers/ASTExtendedRoleSet.h>
|
||||
#include <Parsers/ASTRolesOrUsersSet.h>
|
||||
#include <Parsers/formatAST.h>
|
||||
#include <IO/ReadHelpers.h>
|
||||
#include <IO/WriteHelpers.h>
|
||||
@ -20,51 +19,51 @@ namespace ErrorCodes
|
||||
}
|
||||
|
||||
|
||||
ExtendedRoleSet::ExtendedRoleSet() = default;
|
||||
ExtendedRoleSet::ExtendedRoleSet(const ExtendedRoleSet & src) = default;
|
||||
ExtendedRoleSet & ExtendedRoleSet::operator =(const ExtendedRoleSet & src) = default;
|
||||
ExtendedRoleSet::ExtendedRoleSet(ExtendedRoleSet && src) = default;
|
||||
ExtendedRoleSet & ExtendedRoleSet::operator =(ExtendedRoleSet && src) = default;
|
||||
RolesOrUsersSet::RolesOrUsersSet() = default;
|
||||
RolesOrUsersSet::RolesOrUsersSet(const RolesOrUsersSet & src) = default;
|
||||
RolesOrUsersSet & RolesOrUsersSet::operator =(const RolesOrUsersSet & src) = default;
|
||||
RolesOrUsersSet::RolesOrUsersSet(RolesOrUsersSet && src) = default;
|
||||
RolesOrUsersSet & RolesOrUsersSet::operator =(RolesOrUsersSet && src) = default;
|
||||
|
||||
|
||||
ExtendedRoleSet::ExtendedRoleSet(AllTag)
|
||||
RolesOrUsersSet::RolesOrUsersSet(AllTag)
|
||||
{
|
||||
all = true;
|
||||
}
|
||||
|
||||
ExtendedRoleSet::ExtendedRoleSet(const UUID & id)
|
||||
RolesOrUsersSet::RolesOrUsersSet(const UUID & id)
|
||||
{
|
||||
add(id);
|
||||
}
|
||||
|
||||
|
||||
ExtendedRoleSet::ExtendedRoleSet(const std::vector<UUID> & ids_)
|
||||
RolesOrUsersSet::RolesOrUsersSet(const std::vector<UUID> & ids_)
|
||||
{
|
||||
add(ids_);
|
||||
}
|
||||
|
||||
|
||||
ExtendedRoleSet::ExtendedRoleSet(const ASTExtendedRoleSet & ast)
|
||||
RolesOrUsersSet::RolesOrUsersSet(const ASTRolesOrUsersSet & ast)
|
||||
{
|
||||
init(ast, nullptr);
|
||||
}
|
||||
|
||||
ExtendedRoleSet::ExtendedRoleSet(const ASTExtendedRoleSet & ast, const std::optional<UUID> & current_user_id)
|
||||
RolesOrUsersSet::RolesOrUsersSet(const ASTRolesOrUsersSet & ast, const std::optional<UUID> & current_user_id)
|
||||
{
|
||||
init(ast, nullptr, current_user_id);
|
||||
}
|
||||
|
||||
ExtendedRoleSet::ExtendedRoleSet(const ASTExtendedRoleSet & ast, const AccessControlManager & manager)
|
||||
RolesOrUsersSet::RolesOrUsersSet(const ASTRolesOrUsersSet & ast, const AccessControlManager & manager)
|
||||
{
|
||||
init(ast, &manager);
|
||||
}
|
||||
|
||||
ExtendedRoleSet::ExtendedRoleSet(const ASTExtendedRoleSet & ast, const AccessControlManager & manager, const std::optional<UUID> & current_user_id)
|
||||
RolesOrUsersSet::RolesOrUsersSet(const ASTRolesOrUsersSet & ast, const AccessControlManager & manager, const std::optional<UUID> & current_user_id)
|
||||
{
|
||||
init(ast, &manager, current_user_id);
|
||||
}
|
||||
|
||||
void ExtendedRoleSet::init(const ASTExtendedRoleSet & ast, const AccessControlManager * manager, const std::optional<UUID> & current_user_id)
|
||||
void RolesOrUsersSet::init(const ASTRolesOrUsersSet & ast, const AccessControlManager * manager, const std::optional<UUID> & current_user_id)
|
||||
{
|
||||
all = ast.all;
|
||||
|
||||
@ -73,20 +72,20 @@ void ExtendedRoleSet::init(const ASTExtendedRoleSet & ast, const AccessControlMa
|
||||
if (ast.id_mode)
|
||||
return parse<UUID>(name);
|
||||
assert(manager);
|
||||
if (ast.can_contain_users && ast.can_contain_roles)
|
||||
if (ast.allow_user_names && ast.allow_role_names)
|
||||
{
|
||||
auto id = manager->find<User>(name);
|
||||
if (id)
|
||||
return *id;
|
||||
return manager->getID<Role>(name);
|
||||
}
|
||||
else if (ast.can_contain_users)
|
||||
else if (ast.allow_user_names)
|
||||
{
|
||||
return manager->getID<User>(name);
|
||||
}
|
||||
else
|
||||
{
|
||||
assert(ast.can_contain_roles);
|
||||
assert(ast.allow_role_names);
|
||||
return manager->getID<Role>(name);
|
||||
}
|
||||
};
|
||||
@ -122,9 +121,9 @@ void ExtendedRoleSet::init(const ASTExtendedRoleSet & ast, const AccessControlMa
|
||||
}
|
||||
|
||||
|
||||
std::shared_ptr<ASTExtendedRoleSet> ExtendedRoleSet::toAST() const
|
||||
std::shared_ptr<ASTRolesOrUsersSet> RolesOrUsersSet::toAST() const
|
||||
{
|
||||
auto ast = std::make_shared<ASTExtendedRoleSet>();
|
||||
auto ast = std::make_shared<ASTRolesOrUsersSet>();
|
||||
ast->id_mode = true;
|
||||
ast->all = all;
|
||||
|
||||
@ -148,9 +147,9 @@ std::shared_ptr<ASTExtendedRoleSet> ExtendedRoleSet::toAST() const
|
||||
}
|
||||
|
||||
|
||||
std::shared_ptr<ASTExtendedRoleSet> ExtendedRoleSet::toASTWithNames(const AccessControlManager & manager) const
|
||||
std::shared_ptr<ASTRolesOrUsersSet> RolesOrUsersSet::toASTWithNames(const AccessControlManager & manager) const
|
||||
{
|
||||
auto ast = std::make_shared<ASTExtendedRoleSet>();
|
||||
auto ast = std::make_shared<ASTRolesOrUsersSet>();
|
||||
ast->all = all;
|
||||
|
||||
if (!ids.empty())
|
||||
@ -181,21 +180,21 @@ std::shared_ptr<ASTExtendedRoleSet> ExtendedRoleSet::toASTWithNames(const Access
|
||||
}
|
||||
|
||||
|
||||
String ExtendedRoleSet::toString() const
|
||||
String RolesOrUsersSet::toString() const
|
||||
{
|
||||
auto ast = toAST();
|
||||
return serializeAST(*ast);
|
||||
}
|
||||
|
||||
|
||||
String ExtendedRoleSet::toStringWithNames(const AccessControlManager & manager) const
|
||||
String RolesOrUsersSet::toStringWithNames(const AccessControlManager & manager) const
|
||||
{
|
||||
auto ast = toASTWithNames(manager);
|
||||
return serializeAST(*ast);
|
||||
}
|
||||
|
||||
|
||||
Strings ExtendedRoleSet::toStringsWithNames(const AccessControlManager & manager) const
|
||||
Strings RolesOrUsersSet::toStringsWithNames(const AccessControlManager & manager) const
|
||||
{
|
||||
if (!all && ids.empty())
|
||||
return {};
|
||||
@ -233,13 +232,13 @@ Strings ExtendedRoleSet::toStringsWithNames(const AccessControlManager & manager
|
||||
}
|
||||
|
||||
|
||||
bool ExtendedRoleSet::empty() const
|
||||
bool RolesOrUsersSet::empty() const
|
||||
{
|
||||
return ids.empty() && !all;
|
||||
}
|
||||
|
||||
|
||||
void ExtendedRoleSet::clear()
|
||||
void RolesOrUsersSet::clear()
|
||||
{
|
||||
ids.clear();
|
||||
all = false;
|
||||
@ -247,26 +246,26 @@ void ExtendedRoleSet::clear()
|
||||
}
|
||||
|
||||
|
||||
void ExtendedRoleSet::add(const UUID & id)
|
||||
void RolesOrUsersSet::add(const UUID & id)
|
||||
{
|
||||
ids.insert(id);
|
||||
}
|
||||
|
||||
|
||||
void ExtendedRoleSet::add(const std::vector<UUID> & ids_)
|
||||
void RolesOrUsersSet::add(const std::vector<UUID> & ids_)
|
||||
{
|
||||
for (const auto & id : ids_)
|
||||
add(id);
|
||||
}
|
||||
|
||||
|
||||
bool ExtendedRoleSet::match(const UUID & id) const
|
||||
bool RolesOrUsersSet::match(const UUID & id) const
|
||||
{
|
||||
return (all || ids.count(id)) && !except_ids.count(id);
|
||||
}
|
||||
|
||||
|
||||
bool ExtendedRoleSet::match(const UUID & user_id, const boost::container::flat_set<UUID> & enabled_roles) const
|
||||
bool RolesOrUsersSet::match(const UUID & user_id, const boost::container::flat_set<UUID> & enabled_roles) const
|
||||
{
|
||||
if (!all && !ids.count(user_id))
|
||||
{
|
||||
@ -285,7 +284,7 @@ bool ExtendedRoleSet::match(const UUID & user_id, const boost::container::flat_s
|
||||
}
|
||||
|
||||
|
||||
std::vector<UUID> ExtendedRoleSet::getMatchingIDs() const
|
||||
std::vector<UUID> RolesOrUsersSet::getMatchingIDs() const
|
||||
{
|
||||
if (all)
|
||||
throw Exception("getAllMatchingIDs() can't get ALL ids without manager", ErrorCodes::LOGICAL_ERROR);
|
||||
@ -295,7 +294,7 @@ std::vector<UUID> ExtendedRoleSet::getMatchingIDs() const
|
||||
}
|
||||
|
||||
|
||||
std::vector<UUID> ExtendedRoleSet::getMatchingIDs(const AccessControlManager & manager) const
|
||||
std::vector<UUID> RolesOrUsersSet::getMatchingIDs(const AccessControlManager & manager) const
|
||||
{
|
||||
if (!all)
|
||||
return getMatchingIDs();
|
||||
@ -316,7 +315,7 @@ std::vector<UUID> ExtendedRoleSet::getMatchingIDs(const AccessControlManager & m
|
||||
}
|
||||
|
||||
|
||||
bool operator ==(const ExtendedRoleSet & lhs, const ExtendedRoleSet & rhs)
|
||||
bool operator ==(const RolesOrUsersSet & lhs, const RolesOrUsersSet & rhs)
|
||||
{
|
||||
return (lhs.all == rhs.all) && (lhs.ids == rhs.ids) && (lhs.except_ids == rhs.except_ids);
|
||||
}
|
@ -8,35 +8,35 @@
|
||||
|
||||
namespace DB
|
||||
{
|
||||
class ASTExtendedRoleSet;
|
||||
class ASTRolesOrUsersSet;
|
||||
class AccessControlManager;
|
||||
|
||||
|
||||
/// Represents a set of users/roles like
|
||||
/// {user_name | role_name | CURRENT_USER} [,...] | NONE | ALL | ALL EXCEPT {user_name | role_name | CURRENT_USER} [,...]
|
||||
/// Similar to ASTExtendedRoleSet, but with IDs instead of names.
|
||||
struct ExtendedRoleSet
|
||||
/// Similar to ASTRolesOrUsersSet, but with IDs instead of names.
|
||||
struct RolesOrUsersSet
|
||||
{
|
||||
ExtendedRoleSet();
|
||||
ExtendedRoleSet(const ExtendedRoleSet & src);
|
||||
ExtendedRoleSet & operator =(const ExtendedRoleSet & src);
|
||||
ExtendedRoleSet(ExtendedRoleSet && src);
|
||||
ExtendedRoleSet & operator =(ExtendedRoleSet && src);
|
||||
RolesOrUsersSet();
|
||||
RolesOrUsersSet(const RolesOrUsersSet & src);
|
||||
RolesOrUsersSet & operator =(const RolesOrUsersSet & src);
|
||||
RolesOrUsersSet(RolesOrUsersSet && src);
|
||||
RolesOrUsersSet & operator =(RolesOrUsersSet && src);
|
||||
|
||||
struct AllTag {};
|
||||
ExtendedRoleSet(AllTag);
|
||||
RolesOrUsersSet(AllTag);
|
||||
|
||||
ExtendedRoleSet(const UUID & id);
|
||||
ExtendedRoleSet(const std::vector<UUID> & ids_);
|
||||
RolesOrUsersSet(const UUID & id);
|
||||
RolesOrUsersSet(const std::vector<UUID> & ids_);
|
||||
|
||||
/// The constructor from AST requires the AccessControlManager if `ast.id_mode == false`.
|
||||
ExtendedRoleSet(const ASTExtendedRoleSet & ast);
|
||||
ExtendedRoleSet(const ASTExtendedRoleSet & ast, const std::optional<UUID> & current_user_id);
|
||||
ExtendedRoleSet(const ASTExtendedRoleSet & ast, const AccessControlManager & manager);
|
||||
ExtendedRoleSet(const ASTExtendedRoleSet & ast, const AccessControlManager & manager, const std::optional<UUID> & current_user_id);
|
||||
RolesOrUsersSet(const ASTRolesOrUsersSet & ast);
|
||||
RolesOrUsersSet(const ASTRolesOrUsersSet & ast, const std::optional<UUID> & current_user_id);
|
||||
RolesOrUsersSet(const ASTRolesOrUsersSet & ast, const AccessControlManager & manager);
|
||||
RolesOrUsersSet(const ASTRolesOrUsersSet & ast, const AccessControlManager & manager, const std::optional<UUID> & current_user_id);
|
||||
|
||||
std::shared_ptr<ASTExtendedRoleSet> toAST() const;
|
||||
std::shared_ptr<ASTExtendedRoleSet> toASTWithNames(const AccessControlManager & manager) const;
|
||||
std::shared_ptr<ASTRolesOrUsersSet> toAST() const;
|
||||
std::shared_ptr<ASTRolesOrUsersSet> toASTWithNames(const AccessControlManager & manager) const;
|
||||
|
||||
String toString() const;
|
||||
String toStringWithNames(const AccessControlManager & manager) const;
|
||||
@ -47,7 +47,7 @@ struct ExtendedRoleSet
|
||||
void add(const UUID & id);
|
||||
void add(const std::vector<UUID> & ids_);
|
||||
|
||||
/// Checks if a specified ID matches this ExtendedRoleSet.
|
||||
/// Checks if a specified ID matches this RolesOrUsersSet.
|
||||
bool match(const UUID & id) const;
|
||||
bool match(const UUID & user_id, const boost::container::flat_set<UUID> & enabled_roles) const;
|
||||
|
||||
@ -57,15 +57,15 @@ struct ExtendedRoleSet
|
||||
/// Returns a list of matching users and roles.
|
||||
std::vector<UUID> getMatchingIDs(const AccessControlManager & manager) const;
|
||||
|
||||
friend bool operator ==(const ExtendedRoleSet & lhs, const ExtendedRoleSet & rhs);
|
||||
friend bool operator !=(const ExtendedRoleSet & lhs, const ExtendedRoleSet & rhs) { return !(lhs == rhs); }
|
||||
friend bool operator ==(const RolesOrUsersSet & lhs, const RolesOrUsersSet & rhs);
|
||||
friend bool operator !=(const RolesOrUsersSet & lhs, const RolesOrUsersSet & rhs) { return !(lhs == rhs); }
|
||||
|
||||
boost::container::flat_set<UUID> ids;
|
||||
bool all = false;
|
||||
boost::container::flat_set<UUID> except_ids;
|
||||
|
||||
private:
|
||||
void init(const ASTExtendedRoleSet & ast, const AccessControlManager * manager = nullptr, const std::optional<UUID> & current_user_id = {});
|
||||
void init(const ASTRolesOrUsersSet & ast, const AccessControlManager * manager = nullptr, const std::optional<UUID> & current_user_id = {});
|
||||
};
|
||||
|
||||
}
|
@ -11,22 +11,6 @@ namespace ErrorCodes
|
||||
}
|
||||
|
||||
|
||||
String RowPolicy::NameParts::getName() const
|
||||
{
|
||||
String name;
|
||||
name.reserve(database.length() + table_name.length() + short_name.length() + 6);
|
||||
name += backQuoteIfNeed(short_name);
|
||||
name += " ON ";
|
||||
if (!name.empty())
|
||||
{
|
||||
name += backQuoteIfNeed(database);
|
||||
name += '.';
|
||||
}
|
||||
name += backQuoteIfNeed(table_name);
|
||||
return name;
|
||||
}
|
||||
|
||||
|
||||
void RowPolicy::setDatabase(const String & database)
|
||||
{
|
||||
name_parts.database = database;
|
||||
|
@ -1,7 +1,7 @@
|
||||
#pragma once
|
||||
|
||||
#include <Access/IAccessEntity.h>
|
||||
#include <Access/ExtendedRoleSet.h>
|
||||
#include <Access/RolesOrUsersSet.h>
|
||||
#include <array>
|
||||
|
||||
|
||||
@ -23,7 +23,9 @@ struct RowPolicy : public IAccessEntity
|
||||
String database;
|
||||
String table_name;
|
||||
|
||||
bool empty() const { return short_name.empty(); }
|
||||
String getName() const;
|
||||
String toString() const { return getName(); }
|
||||
auto toTuple() const { return std::tie(short_name, database, table_name); }
|
||||
friend bool operator ==(const NameParts & left, const NameParts & right) { return left.toTuple() == right.toTuple(); }
|
||||
friend bool operator !=(const NameParts & left, const NameParts & right) { return left.toTuple() != right.toTuple(); }
|
||||
@ -89,7 +91,7 @@ struct RowPolicy : public IAccessEntity
|
||||
Type getType() const override { return TYPE; }
|
||||
|
||||
/// Which roles or users should use this row policy.
|
||||
ExtendedRoleSet to_roles;
|
||||
RolesOrUsersSet to_roles;
|
||||
|
||||
private:
|
||||
void setName(const String & name_) override;
|
||||
@ -153,4 +155,20 @@ inline String toString(RowPolicy::ConditionType type)
|
||||
return RowPolicy::ConditionTypeInfo::get(type).raw_name;
|
||||
}
|
||||
|
||||
|
||||
inline String RowPolicy::NameParts::getName() const
|
||||
{
|
||||
String name;
|
||||
name.reserve(database.length() + table_name.length() + short_name.length() + 6);
|
||||
name += backQuoteIfNeed(short_name);
|
||||
name += " ON ";
|
||||
if (!database.empty())
|
||||
{
|
||||
name += backQuoteIfNeed(database);
|
||||
name += '.';
|
||||
}
|
||||
name += backQuoteIfNeed(table_name);
|
||||
return name;
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -27,7 +27,7 @@ private:
|
||||
void setPolicy(const RowPolicyPtr & policy_);
|
||||
|
||||
RowPolicyPtr policy;
|
||||
const ExtendedRoleSet * roles = nullptr;
|
||||
const RolesOrUsersSet * roles = nullptr;
|
||||
std::shared_ptr<const std::pair<String, String>> database_and_table_name;
|
||||
ASTPtr parsed_conditions[RowPolicy::MAX_CONDITION_TYPE];
|
||||
};
|
||||
|
@ -1,7 +1,7 @@
|
||||
#pragma once
|
||||
|
||||
#include <Access/IAccessEntity.h>
|
||||
#include <Access/ExtendedRoleSet.h>
|
||||
#include <Access/RolesOrUsersSet.h>
|
||||
#include <Access/SettingsProfileElement.h>
|
||||
|
||||
|
||||
@ -14,7 +14,7 @@ struct SettingsProfile : public IAccessEntity
|
||||
SettingsProfileElements elements;
|
||||
|
||||
/// Which roles or users should use this settings profile.
|
||||
ExtendedRoleSet to_roles;
|
||||
RolesOrUsersSet to_roles;
|
||||
|
||||
bool equal(const IAccessEntity & other) const override;
|
||||
std::shared_ptr<IAccessEntity> clone() const override { return cloneImpl<SettingsProfile>(); }
|
||||
|
@ -5,7 +5,7 @@
|
||||
#include <Access/AllowedClientHosts.h>
|
||||
#include <Access/GrantedAccess.h>
|
||||
#include <Access/GrantedRoles.h>
|
||||
#include <Access/ExtendedRoleSet.h>
|
||||
#include <Access/RolesOrUsersSet.h>
|
||||
#include <Access/SettingsProfileElement.h>
|
||||
|
||||
|
||||
@ -19,7 +19,7 @@ struct User : public IAccessEntity
|
||||
AllowedClientHosts allowed_client_hosts = AllowedClientHosts::AnyHostTag{};
|
||||
GrantedAccess access;
|
||||
GrantedRoles granted_roles;
|
||||
ExtendedRoleSet default_roles = ExtendedRoleSet::AllTag{};
|
||||
RolesOrUsersSet default_roles = RolesOrUsersSet::AllTag{};
|
||||
SettingsProfileElements settings;
|
||||
|
||||
bool equal(const IAccessEntity & other) const override;
|
||||
|
@ -353,16 +353,17 @@ namespace
|
||||
for (const String & name : names)
|
||||
{
|
||||
SettingsProfileElement profile_element;
|
||||
profile_element.setting_index = Settings::findIndexStrict(name);
|
||||
size_t setting_index = Settings::findIndexStrict(name);
|
||||
profile_element.setting_index = setting_index;
|
||||
Poco::Util::AbstractConfiguration::Keys constraint_types;
|
||||
String path_to_name = path_to_constraints + "." + name;
|
||||
config.keys(path_to_name, constraint_types);
|
||||
for (const String & constraint_type : constraint_types)
|
||||
{
|
||||
if (constraint_type == "min")
|
||||
profile_element.min_value = config.getString(path_to_name + "." + constraint_type);
|
||||
profile_element.min_value = Settings::valueToCorrespondingType(setting_index, config.getString(path_to_name + "." + constraint_type));
|
||||
else if (constraint_type == "max")
|
||||
profile_element.max_value = config.getString(path_to_name + "." + constraint_type);
|
||||
profile_element.max_value = Settings::valueToCorrespondingType(setting_index, config.getString(path_to_name + "." + constraint_type));
|
||||
else if (constraint_type == "readonly")
|
||||
profile_element.readonly = true;
|
||||
else
|
||||
@ -402,8 +403,9 @@ namespace
|
||||
}
|
||||
|
||||
SettingsProfileElement profile_element;
|
||||
profile_element.setting_index = Settings::findIndexStrict(key);
|
||||
profile_element.value = config.getString(profile_config + "." + key);
|
||||
size_t setting_index = Settings::findIndexStrict(key);
|
||||
profile_element.setting_index = setting_index;
|
||||
profile_element.value = Settings::valueToCorrespondingType(setting_index, config.getString(profile_config + "." + key));
|
||||
profile->elements.emplace_back(std::move(profile_element));
|
||||
}
|
||||
|
||||
|
@ -17,7 +17,6 @@ SRCS(
|
||||
EnabledRolesInfo.cpp
|
||||
EnabledRowPolicies.cpp
|
||||
EnabledSettings.cpp
|
||||
ExtendedRoleSet.cpp
|
||||
GrantedAccess.cpp
|
||||
GrantedRoles.cpp
|
||||
IAccessEntity.cpp
|
||||
@ -29,6 +28,7 @@ SRCS(
|
||||
QuotaUsage.cpp
|
||||
Role.cpp
|
||||
RoleCache.cpp
|
||||
RolesOrUsersSet.cpp
|
||||
RowPolicy.cpp
|
||||
RowPolicyCache.cpp
|
||||
SettingsConstraints.cpp
|
||||
|
@ -36,7 +36,10 @@ public:
|
||||
}
|
||||
|
||||
AggregateFunctionPtr transformAggregateFunction(
|
||||
const AggregateFunctionPtr & nested_function, const DataTypes & arguments, const Array &) const override
|
||||
const AggregateFunctionPtr & nested_function,
|
||||
const AggregateFunctionProperties &,
|
||||
const DataTypes & arguments,
|
||||
const Array &) const override
|
||||
{
|
||||
return std::make_shared<AggregateFunctionArray>(nested_function, arguments);
|
||||
}
|
||||
|
@ -7,6 +7,12 @@
|
||||
namespace DB
|
||||
{
|
||||
|
||||
AggregateFunctionPtr AggregateFunctionCount::getOwnNullAdapter(
|
||||
const AggregateFunctionPtr &, const DataTypes & types, const Array & params) const
|
||||
{
|
||||
return std::make_shared<AggregateFunctionCountNotNullUnary>(types[0], params);
|
||||
}
|
||||
|
||||
namespace
|
||||
{
|
||||
|
||||
@ -22,7 +28,7 @@ AggregateFunctionPtr createAggregateFunctionCount(const std::string & name, cons
|
||||
|
||||
void registerAggregateFunctionCount(AggregateFunctionFactory & factory)
|
||||
{
|
||||
factory.registerFunction("count", createAggregateFunctionCount, AggregateFunctionFactory::CaseInsensitive);
|
||||
factory.registerFunction("count", {createAggregateFunctionCount, {true}}, AggregateFunctionFactory::CaseInsensitive);
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -68,16 +68,14 @@ public:
|
||||
data(place).count = new_count;
|
||||
}
|
||||
|
||||
/// The function returns non-Nullable type even when wrapped with Null combinator.
|
||||
bool returnDefaultWhenOnlyNull() const override
|
||||
{
|
||||
return true;
|
||||
}
|
||||
AggregateFunctionPtr getOwnNullAdapter(
|
||||
const AggregateFunctionPtr &, const DataTypes & types, const Array & params) const override;
|
||||
};
|
||||
|
||||
|
||||
/// Simply count number of not-NULL values.
|
||||
class AggregateFunctionCountNotNullUnary final : public IAggregateFunctionDataHelper<AggregateFunctionCountData, AggregateFunctionCountNotNullUnary>
|
||||
class AggregateFunctionCountNotNullUnary final
|
||||
: public IAggregateFunctionDataHelper<AggregateFunctionCountData, AggregateFunctionCountNotNullUnary>
|
||||
{
|
||||
public:
|
||||
AggregateFunctionCountNotNullUnary(const DataTypePtr & argument, const Array & params)
|
||||
|
@ -29,18 +29,18 @@ namespace ErrorCodes
|
||||
}
|
||||
|
||||
|
||||
void AggregateFunctionFactory::registerFunction(const String & name, Creator creator, CaseSensitiveness case_sensitiveness)
|
||||
void AggregateFunctionFactory::registerFunction(const String & name, Value creator_with_properties, CaseSensitiveness case_sensitiveness)
|
||||
{
|
||||
if (creator == nullptr)
|
||||
if (creator_with_properties.creator == nullptr)
|
||||
throw Exception("AggregateFunctionFactory: the aggregate function " + name + " has been provided "
|
||||
" a null constructor", ErrorCodes::LOGICAL_ERROR);
|
||||
|
||||
if (!aggregate_functions.emplace(name, creator).second)
|
||||
if (!aggregate_functions.emplace(name, creator_with_properties).second)
|
||||
throw Exception("AggregateFunctionFactory: the aggregate function name '" + name + "' is not unique",
|
||||
ErrorCodes::LOGICAL_ERROR);
|
||||
|
||||
if (case_sensitiveness == CaseInsensitive
|
||||
&& !case_insensitive_aggregate_functions.emplace(Poco::toLower(name), creator).second)
|
||||
&& !case_insensitive_aggregate_functions.emplace(Poco::toLower(name), creator_with_properties).second)
|
||||
throw Exception("AggregateFunctionFactory: the case insensitive aggregate function name '" + name + "' is not unique",
|
||||
ErrorCodes::LOGICAL_ERROR);
|
||||
}
|
||||
@ -59,6 +59,7 @@ AggregateFunctionPtr AggregateFunctionFactory::get(
|
||||
const String & name,
|
||||
const DataTypes & argument_types,
|
||||
const Array & parameters,
|
||||
AggregateFunctionProperties & out_properties,
|
||||
int recursion_level) const
|
||||
{
|
||||
auto type_without_low_cardinality = convertLowCardinalityTypesToNested(argument_types);
|
||||
@ -76,18 +77,15 @@ AggregateFunctionPtr AggregateFunctionFactory::get(
|
||||
DataTypes nested_types = combinator->transformArguments(type_without_low_cardinality);
|
||||
Array nested_parameters = combinator->transformParameters(parameters);
|
||||
|
||||
AggregateFunctionPtr nested_function;
|
||||
bool has_null_arguments = std::any_of(type_without_low_cardinality.begin(), type_without_low_cardinality.end(),
|
||||
[](const auto & type) { return type->onlyNull(); });
|
||||
|
||||
/// A little hack - if we have NULL arguments, don't even create nested function.
|
||||
/// Combinator will check if nested_function was created.
|
||||
if (name == "count" || std::none_of(type_without_low_cardinality.begin(), type_without_low_cardinality.end(),
|
||||
[](const auto & type) { return type->onlyNull(); }))
|
||||
nested_function = getImpl(name, nested_types, nested_parameters, recursion_level);
|
||||
|
||||
return combinator->transformAggregateFunction(nested_function, type_without_low_cardinality, parameters);
|
||||
AggregateFunctionPtr nested_function = getImpl(
|
||||
name, nested_types, nested_parameters, out_properties, has_null_arguments, recursion_level);
|
||||
return combinator->transformAggregateFunction(nested_function, out_properties, type_without_low_cardinality, parameters);
|
||||
}
|
||||
|
||||
auto res = getImpl(name, type_without_low_cardinality, parameters, recursion_level);
|
||||
auto res = getImpl(name, type_without_low_cardinality, parameters, out_properties, false, recursion_level);
|
||||
if (!res)
|
||||
throw Exception("Logical error: AggregateFunctionFactory returned nullptr", ErrorCodes::LOGICAL_ERROR);
|
||||
return res;
|
||||
@ -98,19 +96,35 @@ AggregateFunctionPtr AggregateFunctionFactory::getImpl(
|
||||
const String & name_param,
|
||||
const DataTypes & argument_types,
|
||||
const Array & parameters,
|
||||
AggregateFunctionProperties & out_properties,
|
||||
bool has_null_arguments,
|
||||
int recursion_level) const
|
||||
{
|
||||
String name = getAliasToOrName(name_param);
|
||||
Value found;
|
||||
|
||||
/// Find by exact match.
|
||||
if (auto it = aggregate_functions.find(name); it != aggregate_functions.end())
|
||||
return it->second(name, argument_types, parameters);
|
||||
|
||||
{
|
||||
found = it->second;
|
||||
}
|
||||
/// Find by case-insensitive name.
|
||||
/// Combinators cannot apply for case insensitive (SQL-style) aggregate function names. Only for native names.
|
||||
if (recursion_level == 0)
|
||||
else if (recursion_level == 0)
|
||||
{
|
||||
if (auto it = case_insensitive_aggregate_functions.find(Poco::toLower(name)); it != case_insensitive_aggregate_functions.end())
|
||||
return it->second(name, argument_types, parameters);
|
||||
if (auto jt = case_insensitive_aggregate_functions.find(Poco::toLower(name)); jt != case_insensitive_aggregate_functions.end())
|
||||
found = jt->second;
|
||||
}
|
||||
|
||||
if (found.creator)
|
||||
{
|
||||
out_properties = found.properties;
|
||||
|
||||
/// The case when aggregate function should return NULL on NULL arguments. This case is handled in "get" method.
|
||||
if (!out_properties.returns_default_when_only_null && has_null_arguments)
|
||||
return nullptr;
|
||||
|
||||
return found.creator(name, argument_types, parameters);
|
||||
}
|
||||
|
||||
/// Combinators of aggregate functions.
|
||||
@ -126,9 +140,8 @@ AggregateFunctionPtr AggregateFunctionFactory::getImpl(
|
||||
DataTypes nested_types = combinator->transformArguments(argument_types);
|
||||
Array nested_parameters = combinator->transformParameters(parameters);
|
||||
|
||||
AggregateFunctionPtr nested_function = get(nested_name, nested_types, nested_parameters, recursion_level + 1);
|
||||
|
||||
return combinator->transformAggregateFunction(nested_function, argument_types, parameters);
|
||||
AggregateFunctionPtr nested_function = get(nested_name, nested_types, nested_parameters, out_properties, recursion_level + 1);
|
||||
return combinator->transformAggregateFunction(nested_function, out_properties, argument_types, parameters);
|
||||
}
|
||||
|
||||
auto hints = this->getHints(name);
|
||||
@ -140,10 +153,11 @@ AggregateFunctionPtr AggregateFunctionFactory::getImpl(
|
||||
}
|
||||
|
||||
|
||||
AggregateFunctionPtr AggregateFunctionFactory::tryGet(const String & name, const DataTypes & argument_types, const Array & parameters) const
|
||||
AggregateFunctionPtr AggregateFunctionFactory::tryGet(
|
||||
const String & name, const DataTypes & argument_types, const Array & parameters, AggregateFunctionProperties & out_properties) const
|
||||
{
|
||||
return isAggregateFunctionName(name)
|
||||
? get(name, argument_types, parameters)
|
||||
? get(name, argument_types, parameters, out_properties)
|
||||
: nullptr;
|
||||
}
|
||||
|
||||
|
@ -26,34 +26,51 @@ using DataTypes = std::vector<DataTypePtr>;
|
||||
*/
|
||||
using AggregateFunctionCreator = std::function<AggregateFunctionPtr(const String &, const DataTypes &, const Array &)>;
|
||||
|
||||
struct AggregateFunctionWithProperties
|
||||
{
|
||||
AggregateFunctionCreator creator;
|
||||
AggregateFunctionProperties properties;
|
||||
|
||||
AggregateFunctionWithProperties() = default;
|
||||
AggregateFunctionWithProperties(const AggregateFunctionWithProperties &) = default;
|
||||
AggregateFunctionWithProperties & operator = (const AggregateFunctionWithProperties &) = default;
|
||||
|
||||
template <typename Creator, std::enable_if_t<!std::is_same_v<Creator, AggregateFunctionWithProperties>> * = nullptr>
|
||||
AggregateFunctionWithProperties(Creator creator_, AggregateFunctionProperties properties_ = {})
|
||||
: creator(std::forward<Creator>(creator_)), properties(std::move(properties_))
|
||||
{
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
/** Creates an aggregate function by name.
|
||||
*/
|
||||
class AggregateFunctionFactory final : private boost::noncopyable, public IFactoryWithAliases<AggregateFunctionCreator>
|
||||
class AggregateFunctionFactory final : private boost::noncopyable, public IFactoryWithAliases<AggregateFunctionWithProperties>
|
||||
{
|
||||
public:
|
||||
|
||||
static AggregateFunctionFactory & instance();
|
||||
|
||||
/// Register a function by its name.
|
||||
/// No locking, you must register all functions before usage of get.
|
||||
void registerFunction(
|
||||
const String & name,
|
||||
Creator creator,
|
||||
Value creator,
|
||||
CaseSensitiveness case_sensitiveness = CaseSensitive);
|
||||
|
||||
/// Throws an exception if not found.
|
||||
AggregateFunctionPtr get(
|
||||
const String & name,
|
||||
const DataTypes & argument_types,
|
||||
const Array & parameters = {},
|
||||
const Array & parameters,
|
||||
AggregateFunctionProperties & out_properties,
|
||||
int recursion_level = 0) const;
|
||||
|
||||
/// Returns nullptr if not found.
|
||||
AggregateFunctionPtr tryGet(
|
||||
const String & name,
|
||||
const DataTypes & argument_types,
|
||||
const Array & parameters = {}) const;
|
||||
const Array & parameters,
|
||||
AggregateFunctionProperties & out_properties) const;
|
||||
|
||||
bool isAggregateFunctionName(const String & name, int recursion_level = 0) const;
|
||||
|
||||
@ -62,19 +79,21 @@ private:
|
||||
const String & name,
|
||||
const DataTypes & argument_types,
|
||||
const Array & parameters,
|
||||
AggregateFunctionProperties & out_properties,
|
||||
bool has_null_arguments,
|
||||
int recursion_level) const;
|
||||
|
||||
private:
|
||||
using AggregateFunctions = std::unordered_map<String, Creator>;
|
||||
using AggregateFunctions = std::unordered_map<String, Value>;
|
||||
|
||||
AggregateFunctions aggregate_functions;
|
||||
|
||||
/// Case insensitive aggregate functions will be additionally added here with lowercased name.
|
||||
AggregateFunctions case_insensitive_aggregate_functions;
|
||||
|
||||
const AggregateFunctions & getCreatorMap() const override { return aggregate_functions; }
|
||||
const AggregateFunctions & getMap() const override { return aggregate_functions; }
|
||||
|
||||
const AggregateFunctions & getCaseInsensitiveCreatorMap() const override { return case_insensitive_aggregate_functions; }
|
||||
const AggregateFunctions & getCaseInsensitiveMap() const override { return case_insensitive_aggregate_functions; }
|
||||
|
||||
String getFactoryName() const override { return "AggregateFunctionFactory"; }
|
||||
|
||||
|
@ -33,7 +33,10 @@ public:
|
||||
}
|
||||
|
||||
AggregateFunctionPtr transformAggregateFunction(
|
||||
const AggregateFunctionPtr & nested_function, const DataTypes & arguments, const Array &) const override
|
||||
const AggregateFunctionPtr & nested_function,
|
||||
const AggregateFunctionProperties &,
|
||||
const DataTypes & arguments,
|
||||
const Array &) const override
|
||||
{
|
||||
return std::make_shared<AggregateFunctionForEach>(nested_function, arguments);
|
||||
}
|
||||
|
@ -31,7 +31,10 @@ public:
|
||||
}
|
||||
|
||||
AggregateFunctionPtr transformAggregateFunction(
|
||||
const AggregateFunctionPtr & nested_function, const DataTypes & arguments, const Array &) const override
|
||||
const AggregateFunctionPtr & nested_function,
|
||||
const AggregateFunctionProperties &,
|
||||
const DataTypes & arguments,
|
||||
const Array &) const override
|
||||
{
|
||||
return std::make_shared<AggregateFunctionIf>(nested_function, arguments);
|
||||
}
|
||||
|
@ -34,7 +34,10 @@ public:
|
||||
}
|
||||
|
||||
AggregateFunctionPtr transformAggregateFunction(
|
||||
const AggregateFunctionPtr & nested_function, const DataTypes & arguments, const Array &) const override
|
||||
const AggregateFunctionPtr & nested_function,
|
||||
const AggregateFunctionProperties &,
|
||||
const DataTypes & arguments,
|
||||
const Array &) const override
|
||||
{
|
||||
const DataTypePtr & argument = arguments[0];
|
||||
|
||||
|
@ -25,7 +25,7 @@ public:
|
||||
|
||||
DataTypePtr getReturnType() const override
|
||||
{
|
||||
return std::make_shared<DataTypeNullable>(std::make_shared<DataTypeNothing>());
|
||||
return argument_types.front();
|
||||
}
|
||||
|
||||
void create(AggregateDataPtr) const override
|
||||
|
@ -31,13 +31,11 @@ public:
|
||||
}
|
||||
|
||||
AggregateFunctionPtr transformAggregateFunction(
|
||||
const AggregateFunctionPtr & nested_function, const DataTypes & arguments, const Array & params) const override
|
||||
const AggregateFunctionPtr & nested_function,
|
||||
const AggregateFunctionProperties & properties,
|
||||
const DataTypes & arguments,
|
||||
const Array & params) const override
|
||||
{
|
||||
/// Special case for 'count' function. It could be called with Nullable arguments
|
||||
/// - that means - count number of calls, when all arguments are not NULL.
|
||||
if (nested_function && nested_function->getName() == "count")
|
||||
return std::make_shared<AggregateFunctionCountNotNullUnary>(arguments[0], params);
|
||||
|
||||
bool has_nullable_types = false;
|
||||
bool has_null_types = false;
|
||||
for (const auto & arg_type : arguments)
|
||||
@ -58,15 +56,23 @@ public:
|
||||
ErrorCodes::ILLEGAL_TYPE_OF_ARGUMENT);
|
||||
|
||||
if (has_null_types)
|
||||
return std::make_shared<AggregateFunctionNothing>(arguments, params);
|
||||
{
|
||||
/// Currently the only functions that returns not-NULL on all NULL arguments are count and uniq, and they returns UInt64.
|
||||
if (properties.returns_default_when_only_null)
|
||||
return std::make_shared<AggregateFunctionNothing>(DataTypes{
|
||||
std::make_shared<DataTypeUInt64>()}, params);
|
||||
else
|
||||
return std::make_shared<AggregateFunctionNothing>(DataTypes{
|
||||
std::make_shared<DataTypeNullable>(std::make_shared<DataTypeNothing>())}, params);
|
||||
}
|
||||
|
||||
assert(nested_function);
|
||||
|
||||
if (auto adapter = nested_function->getOwnNullAdapter(nested_function, arguments, params))
|
||||
return adapter;
|
||||
|
||||
bool return_type_is_nullable = !nested_function->returnDefaultWhenOnlyNull() && nested_function->getReturnType()->canBeInsideNullable();
|
||||
bool serialize_flag = return_type_is_nullable || nested_function->returnDefaultWhenOnlyNull();
|
||||
bool return_type_is_nullable = !properties.returns_default_when_only_null && nested_function->getReturnType()->canBeInsideNullable();
|
||||
bool serialize_flag = return_type_is_nullable || properties.returns_default_when_only_null;
|
||||
|
||||
if (arguments.size() == 1)
|
||||
{
|
||||
|
@ -21,6 +21,7 @@ public:
|
||||
|
||||
AggregateFunctionPtr transformAggregateFunction(
|
||||
const AggregateFunctionPtr & nested_function,
|
||||
const AggregateFunctionProperties &,
|
||||
const DataTypes & arguments,
|
||||
const Array & params) const override
|
||||
{
|
||||
|
@ -43,6 +43,7 @@ public:
|
||||
|
||||
AggregateFunctionPtr transformAggregateFunction(
|
||||
const AggregateFunctionPtr & nested_function,
|
||||
const AggregateFunctionProperties &,
|
||||
const DataTypes & arguments,
|
||||
const Array & params) const override
|
||||
{
|
||||
|
@ -24,7 +24,10 @@ public:
|
||||
}
|
||||
|
||||
AggregateFunctionPtr transformAggregateFunction(
|
||||
const AggregateFunctionPtr & nested_function, const DataTypes & arguments, const Array & params) const override
|
||||
const AggregateFunctionPtr & nested_function,
|
||||
const AggregateFunctionProperties &,
|
||||
const DataTypes & arguments,
|
||||
const Array & params) const override
|
||||
{
|
||||
return std::make_shared<AggregateFunctionState>(nested_function, arguments, params);
|
||||
}
|
||||
|
@ -18,21 +18,6 @@ namespace ErrorCodes
|
||||
namespace
|
||||
{
|
||||
|
||||
template <bool overflow, bool tuple_argument>
|
||||
struct SumMap
|
||||
{
|
||||
template <typename T>
|
||||
using F = AggregateFunctionSumMap<T, overflow, tuple_argument>;
|
||||
};
|
||||
|
||||
template <bool overflow, bool tuple_argument>
|
||||
struct SumMapFiltered
|
||||
{
|
||||
template <typename T>
|
||||
using F = AggregateFunctionSumMapFiltered<T, overflow, tuple_argument>;
|
||||
};
|
||||
|
||||
|
||||
auto parseArguments(const std::string & name, const DataTypes & arguments)
|
||||
{
|
||||
DataTypes args;
|
||||
@ -85,30 +70,32 @@ auto parseArguments(const std::string & name, const DataTypes & arguments)
|
||||
tuple_argument};
|
||||
}
|
||||
|
||||
template <bool overflow>
|
||||
AggregateFunctionPtr createAggregateFunctionSumMap(const std::string & name, const DataTypes & arguments, const Array & params)
|
||||
// This function instantiates a particular overload of the sumMap family of
|
||||
// functions.
|
||||
// The template parameter MappedFunction<bool template_argument> is an aggregate
|
||||
// function template that allows to choose the aggregate function variant that
|
||||
// accepts either normal arguments or tuple argument.
|
||||
template<template <bool tuple_argument> typename MappedFunction>
|
||||
AggregateFunctionPtr createAggregateFunctionMap(const std::string & name, const DataTypes & arguments, const Array & params)
|
||||
{
|
||||
assertNoParameters(name, params);
|
||||
|
||||
auto [keys_type, values_types, tuple_argument] = parseArguments(name,
|
||||
arguments);
|
||||
auto [keys_type, values_types, tuple_argument] = parseArguments(name, arguments);
|
||||
|
||||
AggregateFunctionPtr res;
|
||||
if (tuple_argument)
|
||||
{
|
||||
res.reset(createWithNumericBasedType<SumMap<overflow, true>::template F>(*keys_type, keys_type, values_types, arguments));
|
||||
res.reset(createWithNumericBasedType<MappedFunction<true>::template F>(*keys_type, keys_type, values_types, arguments, params));
|
||||
if (!res)
|
||||
res.reset(createWithDecimalType<SumMap<overflow, true>::template F>(*keys_type, keys_type, values_types, arguments));
|
||||
res.reset(createWithDecimalType<MappedFunction<true>::template F>(*keys_type, keys_type, values_types, arguments, params));
|
||||
if (!res)
|
||||
res.reset(createWithStringType<SumMap<overflow, true>::template F>(*keys_type, keys_type, values_types, arguments));
|
||||
res.reset(createWithStringType<MappedFunction<true>::template F>(*keys_type, keys_type, values_types, arguments, params));
|
||||
}
|
||||
else
|
||||
{
|
||||
res.reset(createWithNumericBasedType<SumMap<overflow, false>::template F>(*keys_type, keys_type, values_types, arguments));
|
||||
res.reset(createWithNumericBasedType<MappedFunction<false>::template F>(*keys_type, keys_type, values_types, arguments, params));
|
||||
if (!res)
|
||||
res.reset(createWithDecimalType<SumMap<overflow, false>::template F>(*keys_type, keys_type, values_types, arguments));
|
||||
res.reset(createWithDecimalType<MappedFunction<false>::template F>(*keys_type, keys_type, values_types, arguments, params));
|
||||
if (!res)
|
||||
res.reset(createWithStringType<SumMap<overflow, false>::template F>(*keys_type, keys_type, values_types, arguments));
|
||||
res.reset(createWithStringType<MappedFunction<false>::template F>(*keys_type, keys_type, values_types, arguments, params));
|
||||
}
|
||||
if (!res)
|
||||
throw Exception("Illegal type of argument for aggregate function " + name, ErrorCodes::ILLEGAL_TYPE_OF_ARGUMENT);
|
||||
@ -116,52 +103,66 @@ AggregateFunctionPtr createAggregateFunctionSumMap(const std::string & name, con
|
||||
return res;
|
||||
}
|
||||
|
||||
template <bool overflow>
|
||||
AggregateFunctionPtr createAggregateFunctionSumMapFiltered(const std::string & name, const DataTypes & arguments, const Array & params)
|
||||
// This template chooses the sumMap variant with given filtering and overflow
|
||||
// handling.
|
||||
template <bool filtered, bool overflow>
|
||||
struct SumMapVariants
|
||||
{
|
||||
if (params.size() != 1)
|
||||
throw Exception("Aggregate function " + name + " requires exactly one parameter of Array type.",
|
||||
ErrorCodes::NUMBER_OF_ARGUMENTS_DOESNT_MATCH);
|
||||
|
||||
Array keys_to_keep;
|
||||
if (!params.front().tryGet<Array>(keys_to_keep))
|
||||
throw Exception("Aggregate function " + name + " requires an Array as parameter.",
|
||||
ErrorCodes::ILLEGAL_TYPE_OF_ARGUMENT);
|
||||
|
||||
auto [keys_type, values_types, tuple_argument] = parseArguments(name,
|
||||
arguments);
|
||||
|
||||
AggregateFunctionPtr res;
|
||||
if (tuple_argument)
|
||||
// SumMapVariants chooses the `overflow` and `filtered` parameters of the
|
||||
// aggregate functions. The `tuple_argument` and the value type `T` are left
|
||||
// as free parameters.
|
||||
// DispatchOnTupleArgument chooses `tuple_argument`, and the value type `T`
|
||||
// is left free.
|
||||
template <bool tuple_argument>
|
||||
struct DispatchOnTupleArgument
|
||||
{
|
||||
res.reset(createWithNumericBasedType<SumMapFiltered<overflow, true>::template F>(*keys_type, keys_type, values_types, keys_to_keep, arguments, params));
|
||||
if (!res)
|
||||
res.reset(createWithDecimalType<SumMapFiltered<overflow, true>::template F>(*keys_type, keys_type, values_types, keys_to_keep, arguments, params));
|
||||
if (!res)
|
||||
res.reset(createWithStringType<SumMapFiltered<overflow, true>::template F>(*keys_type, keys_type, values_types, keys_to_keep, arguments, params));
|
||||
}
|
||||
else
|
||||
{
|
||||
res.reset(createWithNumericBasedType<SumMapFiltered<overflow, false>::template F>(*keys_type, keys_type, values_types, keys_to_keep, arguments, params));
|
||||
if (!res)
|
||||
res.reset(createWithDecimalType<SumMapFiltered<overflow, false>::template F>(*keys_type, keys_type, values_types, keys_to_keep, arguments, params));
|
||||
if (!res)
|
||||
res.reset(createWithStringType<SumMapFiltered<overflow, false>::template F>(*keys_type, keys_type, values_types, keys_to_keep, arguments, params));
|
||||
}
|
||||
if (!res)
|
||||
throw Exception("Illegal type of argument for aggregate function " + name, ErrorCodes::ILLEGAL_TYPE_OF_ARGUMENT);
|
||||
template <typename T>
|
||||
using F = std::conditional_t<filtered,
|
||||
AggregateFunctionSumMapFiltered<T, overflow, tuple_argument>,
|
||||
AggregateFunctionSumMap<T, overflow, tuple_argument>>;
|
||||
};
|
||||
};
|
||||
|
||||
return res;
|
||||
}
|
||||
// This template gives an aggregate function template that is narrowed
|
||||
// to accept either tuple argumen or normal argumens.
|
||||
template <bool tuple_argument>
|
||||
struct MinMapDispatchOnTupleArgument
|
||||
{
|
||||
template <typename T>
|
||||
using F = AggregateFunctionMinMap<T, tuple_argument>;
|
||||
};
|
||||
|
||||
// This template gives an aggregate function template that is narrowed
|
||||
// to accept either tuple argumen or normal argumens.
|
||||
template <bool tuple_argument>
|
||||
struct MaxMapDispatchOnTupleArgument
|
||||
{
|
||||
template <typename T>
|
||||
using F = AggregateFunctionMaxMap<T, tuple_argument>;
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
void registerAggregateFunctionSumMap(AggregateFunctionFactory & factory)
|
||||
{
|
||||
factory.registerFunction("sumMap", createAggregateFunctionSumMap<false /*overflow*/>);
|
||||
factory.registerFunction("sumMapWithOverflow", createAggregateFunctionSumMap<true /*overflow*/>);
|
||||
factory.registerFunction("sumMapFiltered", createAggregateFunctionSumMapFiltered<false /*overflow*/>);
|
||||
factory.registerFunction("sumMapFilteredWithOverflow", createAggregateFunctionSumMapFiltered<true /*overflow*/>);
|
||||
factory.registerFunction("sumMap", createAggregateFunctionMap<
|
||||
SumMapVariants<false, false>::DispatchOnTupleArgument>);
|
||||
|
||||
factory.registerFunction("sumMapWithOverflow", createAggregateFunctionMap<
|
||||
SumMapVariants<false, true>::DispatchOnTupleArgument>);
|
||||
|
||||
factory.registerFunction("sumMapFiltered", createAggregateFunctionMap<
|
||||
SumMapVariants<true, false>::DispatchOnTupleArgument>);
|
||||
|
||||
factory.registerFunction("sumMapFilteredWithOverflow",
|
||||
createAggregateFunctionMap<
|
||||
SumMapVariants<true, true>::DispatchOnTupleArgument>);
|
||||
|
||||
factory.registerFunction("minMap",
|
||||
createAggregateFunctionMap<MinMapDispatchOnTupleArgument>);
|
||||
|
||||
factory.registerFunction("maxMap",
|
||||
createAggregateFunctionMap<MaxMapDispatchOnTupleArgument>);
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -25,19 +25,20 @@ namespace ErrorCodes
|
||||
{
|
||||
extern const int BAD_ARGUMENTS;
|
||||
extern const int ILLEGAL_TYPE_OF_ARGUMENT;
|
||||
extern const int NUMBER_OF_ARGUMENTS_DOESNT_MATCH;
|
||||
}
|
||||
|
||||
template <typename T>
|
||||
struct AggregateFunctionSumMapData
|
||||
struct AggregateFunctionMapData
|
||||
{
|
||||
// Map needs to be ordered to maintain function properties
|
||||
std::map<T, Array> merged_maps;
|
||||
};
|
||||
|
||||
/** Aggregate function, that takes at least two arguments: keys and values, and as a result, builds a tuple of of at least 2 arrays -
|
||||
* ordered keys and variable number of argument values summed up by corresponding keys.
|
||||
* ordered keys and variable number of argument values aggregated by corresponding keys.
|
||||
*
|
||||
* This function is the most useful when using SummingMergeTree to sum Nested columns, which name ends in "Map".
|
||||
* sumMap function is the most useful when using SummingMergeTree to sum Nested columns, which name ends in "Map".
|
||||
*
|
||||
* Example: sumMap(k, v...) of:
|
||||
* k v
|
||||
@ -49,24 +50,27 @@ struct AggregateFunctionSumMapData
|
||||
* [8,9,10] [20,20,20]
|
||||
* will return:
|
||||
* ([1,2,3,4,5,6,7,8,9,10],[10,10,45,20,35,20,15,30,20,20])
|
||||
*
|
||||
* minMap and maxMap share the same idea, but calculate min and max correspondingly.
|
||||
*/
|
||||
|
||||
template <typename T, typename Derived, bool overflow, bool tuple_argument>
|
||||
class AggregateFunctionSumMapBase : public IAggregateFunctionDataHelper<
|
||||
AggregateFunctionSumMapData<NearestFieldType<T>>, Derived>
|
||||
template <typename T, typename Derived, typename Visitor, bool overflow, bool tuple_argument>
|
||||
class AggregateFunctionMapBase : public IAggregateFunctionDataHelper<
|
||||
AggregateFunctionMapData<NearestFieldType<T>>, Derived>
|
||||
{
|
||||
private:
|
||||
DataTypePtr keys_type;
|
||||
DataTypes values_types;
|
||||
|
||||
public:
|
||||
AggregateFunctionSumMapBase(
|
||||
const DataTypePtr & keys_type_, const DataTypes & values_types_,
|
||||
const DataTypes & argument_types_, const Array & params_)
|
||||
: IAggregateFunctionDataHelper<AggregateFunctionSumMapData<NearestFieldType<T>>, Derived>(argument_types_, params_)
|
||||
, keys_type(keys_type_), values_types(values_types_) {}
|
||||
using Base = IAggregateFunctionDataHelper<
|
||||
AggregateFunctionMapData<NearestFieldType<T>>, Derived>;
|
||||
|
||||
String getName() const override { return "sumMap"; }
|
||||
AggregateFunctionMapBase(const DataTypePtr & keys_type_,
|
||||
const DataTypes & values_types_, const DataTypes & argument_types_)
|
||||
: Base(argument_types_, {} /* parameters */), keys_type(keys_type_),
|
||||
values_types(values_types_)
|
||||
{}
|
||||
|
||||
DataTypePtr getReturnType() const override
|
||||
{
|
||||
@ -88,7 +92,7 @@ public:
|
||||
// No overflow, meaning we promote the types if necessary.
|
||||
if (!value_type->canBePromoted())
|
||||
{
|
||||
throw Exception{"Values to be summed are expected to be Numeric, Float or Decimal.", ErrorCodes::ILLEGAL_TYPE_OF_ARGUMENT};
|
||||
throw Exception{"Values for " + getName() + " are expected to be Numeric, Float or Decimal.", ErrorCodes::ILLEGAL_TYPE_OF_ARGUMENT};
|
||||
}
|
||||
|
||||
result_type = value_type->promoteNumericType();
|
||||
@ -161,7 +165,7 @@ public:
|
||||
|
||||
if (it != merged_maps.end())
|
||||
{
|
||||
applyVisitor(FieldVisitorSum(value), it->second[col]);
|
||||
applyVisitor(Visitor(value), it->second[col]);
|
||||
}
|
||||
else
|
||||
{
|
||||
@ -198,7 +202,7 @@ public:
|
||||
if (it != merged_maps.end())
|
||||
{
|
||||
for (size_t col = 0; col < values_types.size(); ++col)
|
||||
applyVisitor(FieldVisitorSum(elem.second[col]), it->second[col]);
|
||||
applyVisitor(Visitor(elem.second[col]), it->second[col]);
|
||||
}
|
||||
else
|
||||
merged_maps[elem.first] = elem.second;
|
||||
@ -300,20 +304,27 @@ public:
|
||||
}
|
||||
|
||||
bool keepKey(const T & key) const { return static_cast<const Derived &>(*this).keepKey(key); }
|
||||
String getName() const override { return static_cast<const Derived &>(*this).getName(); }
|
||||
};
|
||||
|
||||
template <typename T, bool overflow, bool tuple_argument>
|
||||
class AggregateFunctionSumMap final :
|
||||
public AggregateFunctionSumMapBase<T, AggregateFunctionSumMap<T, overflow, tuple_argument>, overflow, tuple_argument>
|
||||
public AggregateFunctionMapBase<T, AggregateFunctionSumMap<T, overflow, tuple_argument>, FieldVisitorSum, overflow, tuple_argument>
|
||||
{
|
||||
private:
|
||||
using Self = AggregateFunctionSumMap<T, overflow, tuple_argument>;
|
||||
using Base = AggregateFunctionSumMapBase<T, Self, overflow, tuple_argument>;
|
||||
using Base = AggregateFunctionMapBase<T, Self, FieldVisitorSum, overflow, tuple_argument>;
|
||||
|
||||
public:
|
||||
AggregateFunctionSumMap(const DataTypePtr & keys_type_, DataTypes & values_types_, const DataTypes & argument_types_)
|
||||
: Base{keys_type_, values_types_, argument_types_, {}}
|
||||
{}
|
||||
AggregateFunctionSumMap(const DataTypePtr & keys_type_,
|
||||
DataTypes & values_types_, const DataTypes & argument_types_,
|
||||
const Array & params_)
|
||||
: Base{keys_type_, values_types_, argument_types_}
|
||||
{
|
||||
// The constructor accepts parameters to have a uniform interface with
|
||||
// sumMapFiltered, but this function doesn't have any parameters.
|
||||
assertNoParameters(getName(), params_);
|
||||
}
|
||||
|
||||
String getName() const override { return "sumMap"; }
|
||||
|
||||
@ -322,23 +333,35 @@ public:
|
||||
|
||||
template <typename T, bool overflow, bool tuple_argument>
|
||||
class AggregateFunctionSumMapFiltered final :
|
||||
public AggregateFunctionSumMapBase<T,
|
||||
public AggregateFunctionMapBase<T,
|
||||
AggregateFunctionSumMapFiltered<T, overflow, tuple_argument>,
|
||||
FieldVisitorSum,
|
||||
overflow,
|
||||
tuple_argument>
|
||||
{
|
||||
private:
|
||||
using Self = AggregateFunctionSumMapFiltered<T, overflow, tuple_argument>;
|
||||
using Base = AggregateFunctionSumMapBase<T, Self, overflow, tuple_argument>;
|
||||
using Base = AggregateFunctionMapBase<T, Self, FieldVisitorSum, overflow, tuple_argument>;
|
||||
|
||||
std::unordered_set<T> keys_to_keep;
|
||||
|
||||
public:
|
||||
AggregateFunctionSumMapFiltered(
|
||||
const DataTypePtr & keys_type_, const DataTypes & values_types_, const Array & keys_to_keep_,
|
||||
const DataTypes & argument_types_, const Array & params_)
|
||||
: Base{keys_type_, values_types_, argument_types_, params_}
|
||||
AggregateFunctionSumMapFiltered(const DataTypePtr & keys_type_,
|
||||
const DataTypes & values_types_, const DataTypes & argument_types_,
|
||||
const Array & params_)
|
||||
: Base{keys_type_, values_types_, argument_types_}
|
||||
{
|
||||
if (params_.size() != 1)
|
||||
throw Exception(ErrorCodes::NUMBER_OF_ARGUMENTS_DOESNT_MATCH,
|
||||
"Aggregate function '{}' requires exactly one parameter "
|
||||
"of Array type", getName());
|
||||
|
||||
Array keys_to_keep_;
|
||||
if (!params_.front().tryGet<Array>(keys_to_keep_))
|
||||
throw Exception(ErrorCodes::ILLEGAL_TYPE_OF_ARGUMENT,
|
||||
"Aggregate function {} requires an Array as a parameter",
|
||||
getName());
|
||||
|
||||
keys_to_keep.reserve(keys_to_keep_.size());
|
||||
for (const Field & f : keys_to_keep_)
|
||||
{
|
||||
@ -346,9 +369,58 @@ public:
|
||||
}
|
||||
}
|
||||
|
||||
String getName() const override { return "sumMapFiltered"; }
|
||||
String getName() const override
|
||||
{ return overflow ? "sumMapFilteredWithOverflow" : "sumMapFiltered"; }
|
||||
|
||||
bool keepKey(const T & key) const { return keys_to_keep.count(key); }
|
||||
};
|
||||
|
||||
template <typename T, bool tuple_argument>
|
||||
class AggregateFunctionMinMap final :
|
||||
public AggregateFunctionMapBase<T, AggregateFunctionMinMap<T, tuple_argument>, FieldVisitorMin, true, tuple_argument>
|
||||
{
|
||||
private:
|
||||
using Self = AggregateFunctionMinMap<T, tuple_argument>;
|
||||
using Base = AggregateFunctionMapBase<T, Self, FieldVisitorMin, true, tuple_argument>;
|
||||
|
||||
public:
|
||||
AggregateFunctionMinMap(const DataTypePtr & keys_type_,
|
||||
DataTypes & values_types_, const DataTypes & argument_types_,
|
||||
const Array & params_)
|
||||
: Base{keys_type_, values_types_, argument_types_}
|
||||
{
|
||||
// The constructor accepts parameters to have a uniform interface with
|
||||
// sumMapFiltered, but this function doesn't have any parameters.
|
||||
assertNoParameters(getName(), params_);
|
||||
}
|
||||
|
||||
String getName() const override { return "minMap"; }
|
||||
|
||||
bool keepKey(const T &) const { return true; }
|
||||
};
|
||||
|
||||
template <typename T, bool tuple_argument>
|
||||
class AggregateFunctionMaxMap final :
|
||||
public AggregateFunctionMapBase<T, AggregateFunctionMaxMap<T, tuple_argument>, FieldVisitorMax, true, tuple_argument>
|
||||
{
|
||||
private:
|
||||
using Self = AggregateFunctionMaxMap<T, tuple_argument>;
|
||||
using Base = AggregateFunctionMapBase<T, Self, FieldVisitorMax, true, tuple_argument>;
|
||||
|
||||
public:
|
||||
AggregateFunctionMaxMap(const DataTypePtr & keys_type_,
|
||||
DataTypes & values_types_, const DataTypes & argument_types_,
|
||||
const Array & params_)
|
||||
: Base{keys_type_, values_types_, argument_types_}
|
||||
{
|
||||
// The constructor accepts parameters to have a uniform interface with
|
||||
// sumMapFiltered, but this function doesn't have any parameters.
|
||||
assertNoParameters(getName(), params_);
|
||||
}
|
||||
|
||||
String getName() const override { return "maxMap"; }
|
||||
|
||||
bool keepKey(const T &) const { return true; }
|
||||
};
|
||||
|
||||
}
|
||||
|
@ -123,13 +123,13 @@ AggregateFunctionPtr createAggregateFunctionUniq(const std::string & name, const
|
||||
void registerAggregateFunctionsUniq(AggregateFunctionFactory & factory)
|
||||
{
|
||||
factory.registerFunction("uniq",
|
||||
createAggregateFunctionUniq<AggregateFunctionUniqUniquesHashSetData, AggregateFunctionUniqUniquesHashSetDataForVariadic>);
|
||||
{createAggregateFunctionUniq<AggregateFunctionUniqUniquesHashSetData, AggregateFunctionUniqUniquesHashSetDataForVariadic>, {true}});
|
||||
|
||||
factory.registerFunction("uniqHLL12",
|
||||
createAggregateFunctionUniq<false, AggregateFunctionUniqHLL12Data, AggregateFunctionUniqHLL12DataForVariadic>);
|
||||
{createAggregateFunctionUniq<false, AggregateFunctionUniqHLL12Data, AggregateFunctionUniqHLL12DataForVariadic>, {true}});
|
||||
|
||||
factory.registerFunction("uniqExact",
|
||||
createAggregateFunctionUniq<true, AggregateFunctionUniqExactData, AggregateFunctionUniqExactData<String>>);
|
||||
{createAggregateFunctionUniq<true, AggregateFunctionUniqExactData, AggregateFunctionUniqExactData<String>>, {true}});
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -244,12 +244,6 @@ public:
|
||||
{
|
||||
assert_cast<ColumnUInt64 &>(to).getData().push_back(this->data(place).set.size());
|
||||
}
|
||||
|
||||
/// The function returns non-Nullable type even when wrapped with Null combinator.
|
||||
bool returnDefaultWhenOnlyNull() const override
|
||||
{
|
||||
return true;
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
@ -304,12 +298,6 @@ public:
|
||||
{
|
||||
assert_cast<ColumnUInt64 &>(to).getData().push_back(this->data(place).set.size());
|
||||
}
|
||||
|
||||
/// The function returns non-Nullable type even when wrapped with Null combinator.
|
||||
bool returnDefaultWhenOnlyNull() const override
|
||||
{
|
||||
return true;
|
||||
}
|
||||
};
|
||||
|
||||
}
|
||||
|
@ -85,7 +85,7 @@ AggregateFunctionPtr createAggregateFunctionUniqUpTo(const std::string & name, c
|
||||
|
||||
void registerAggregateFunctionUniqUpTo(AggregateFunctionFactory & factory)
|
||||
{
|
||||
factory.registerFunction("uniqUpTo", createAggregateFunctionUniqUpTo);
|
||||
factory.registerFunction("uniqUpTo", {createAggregateFunctionUniqUpTo, {true}});
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -166,17 +166,12 @@ public:
|
||||
* nested_function is a smart pointer to this aggregate function itself.
|
||||
* arguments and params are for nested_function.
|
||||
*/
|
||||
virtual AggregateFunctionPtr getOwnNullAdapter(const AggregateFunctionPtr & /*nested_function*/, const DataTypes & /*arguments*/, const Array & /*params*/) const
|
||||
virtual AggregateFunctionPtr getOwnNullAdapter(
|
||||
const AggregateFunctionPtr & /*nested_function*/, const DataTypes & /*arguments*/, const Array & /*params*/) const
|
||||
{
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
/** When the function is wrapped with Null combinator,
|
||||
* should we return Nullable type with NULL when no values were aggregated
|
||||
* or we should return non-Nullable type with default value (example: count, countDistinct).
|
||||
*/
|
||||
virtual bool returnDefaultWhenOnlyNull() const { return false; }
|
||||
|
||||
const DataTypes & getArgumentTypes() const { return argument_types; }
|
||||
const Array & getParameters() const { return parameters; }
|
||||
|
||||
@ -286,4 +281,15 @@ public:
|
||||
};
|
||||
|
||||
|
||||
/// Properties of aggregate function that are independent of argument types and parameters.
|
||||
struct AggregateFunctionProperties
|
||||
{
|
||||
/** When the function is wrapped with Null combinator,
|
||||
* should we return Nullable type with NULL when no values were aggregated
|
||||
* or we should return non-Nullable type with default value (example: count, countDistinct).
|
||||
*/
|
||||
bool returns_default_when_only_null = false;
|
||||
};
|
||||
|
||||
|
||||
}
|
||||
|
@ -59,6 +59,7 @@ public:
|
||||
*/
|
||||
virtual AggregateFunctionPtr transformAggregateFunction(
|
||||
const AggregateFunctionPtr & nested_function,
|
||||
const AggregateFunctionProperties & properties,
|
||||
const DataTypes & arguments,
|
||||
const Array & params) const = 0;
|
||||
|
||||
|
@ -381,6 +381,6 @@ if (ENABLE_TESTS AND USE_GTEST)
|
||||
-Wno-gnu-zero-variadic-macro-arguments
|
||||
)
|
||||
|
||||
target_link_libraries(unit_tests_dbms PRIVATE ${GTEST_BOTH_LIBRARIES} clickhouse_functions clickhouse_parsers dbms clickhouse_common_zookeeper string_utils)
|
||||
target_link_libraries(unit_tests_dbms PRIVATE ${GTEST_BOTH_LIBRARIES} clickhouse_functions clickhouse_aggregate_functions clickhouse_parsers dbms clickhouse_common_zookeeper string_utils)
|
||||
add_check(unit_tests_dbms)
|
||||
endif ()
|
||||
|
@ -11,7 +11,6 @@
|
||||
|
||||
#include <IO/ConnectionTimeouts.h>
|
||||
|
||||
|
||||
namespace ProfileEvents
|
||||
{
|
||||
extern const Event DistributedConnectionMissingTable;
|
||||
@ -71,6 +70,18 @@ IConnectionPool::Entry ConnectionPoolWithFailover::get(const ConnectionTimeouts
|
||||
case LoadBalancing::FIRST_OR_RANDOM:
|
||||
get_priority = [](size_t i) -> size_t { return i >= 1; };
|
||||
break;
|
||||
case LoadBalancing::ROUND_ROBIN:
|
||||
if (last_used >= nested_pools.size())
|
||||
last_used = 0;
|
||||
++last_used;
|
||||
/* Consider nested_pools.size() equals to 5
|
||||
* last_used = 1 -> get_priority: 0 1 2 3 4
|
||||
* last_used = 2 -> get_priority: 5 0 1 2 3
|
||||
* last_used = 3 -> get_priority: 5 4 0 1 2
|
||||
* ...
|
||||
* */
|
||||
get_priority = [&](size_t i) { ++i; return i < last_used ? nested_pools.size() - i : i - last_used; };
|
||||
break;
|
||||
}
|
||||
|
||||
return Base::get(try_get_entry, get_priority);
|
||||
@ -181,6 +192,18 @@ std::vector<ConnectionPoolWithFailover::TryResult> ConnectionPoolWithFailover::g
|
||||
case LoadBalancing::FIRST_OR_RANDOM:
|
||||
get_priority = [](size_t i) -> size_t { return i >= 1; };
|
||||
break;
|
||||
case LoadBalancing::ROUND_ROBIN:
|
||||
if (last_used >= nested_pools.size())
|
||||
last_used = 0;
|
||||
++last_used;
|
||||
/* Consider nested_pools.size() equals to 5
|
||||
* last_used = 1 -> get_priority: 0 1 2 3 4
|
||||
* last_used = 2 -> get_priority: 5 0 1 2 3
|
||||
* last_used = 3 -> get_priority: 5 4 0 1 2
|
||||
* ...
|
||||
* */
|
||||
get_priority = [&](size_t i) { ++i; return i < last_used ? nested_pools.size() - i : i - last_used; };
|
||||
break;
|
||||
}
|
||||
|
||||
bool fallback_to_stale_replicas = settings ? bool(settings->fallback_to_stale_replicas_for_distributed_queries) : true;
|
||||
|
@ -97,6 +97,7 @@ private:
|
||||
|
||||
private:
|
||||
std::vector<size_t> hostname_differences; /// Distances from name of this host to the names of hosts of pools.
|
||||
size_t last_used = 0; /// Last used for round_robin policy.
|
||||
LoadBalancing default_load_balancing;
|
||||
};
|
||||
|
||||
|
@ -1,2 +0,0 @@
|
||||
add_executable(test-connect test_connect.cpp)
|
||||
target_link_libraries (test-connect PRIVATE dbms)
|
@ -1,59 +0,0 @@
|
||||
#include <sys/types.h>
|
||||
#include <sys/socket.h>
|
||||
#include <unistd.h>
|
||||
|
||||
#include <iostream>
|
||||
#include <Poco/Net/StreamSocket.h>
|
||||
#include <Common/Exception.h>
|
||||
#include <IO/ReadHelpers.h>
|
||||
|
||||
|
||||
/** In a loop it connects to the server and immediately breaks the connection.
|
||||
* Using the SO_LINGER option, we ensure that the connection is terminated by sending a RST packet (not FIN).
|
||||
* This behavior causes a bug in the TCPServer implementation in the Poco library.
|
||||
*/
|
||||
int main(int argc, char ** argv)
|
||||
try
|
||||
{
|
||||
for (size_t i = 0, num_iters = argc >= 2 ? DB::parse<size_t>(argv[1]) : 1; i < num_iters; ++i)
|
||||
{
|
||||
std::cerr << ".";
|
||||
|
||||
Poco::Net::SocketAddress address("localhost", 9000);
|
||||
|
||||
int fd = socket(PF_INET, SOCK_STREAM, IPPROTO_IP);
|
||||
|
||||
if (fd < 0)
|
||||
DB::throwFromErrno("Cannot create socket", 0);
|
||||
|
||||
linger linger_value;
|
||||
linger_value.l_onoff = 1;
|
||||
linger_value.l_linger = 0;
|
||||
|
||||
if (0 != setsockopt(fd, SOL_SOCKET, SO_LINGER, &linger_value, sizeof(linger_value)))
|
||||
DB::throwFromErrno("Cannot set linger", 0);
|
||||
|
||||
try
|
||||
{
|
||||
int res = connect(fd, address.addr(), address.length());
|
||||
|
||||
if (res != 0 && errno != EINPROGRESS && errno != EWOULDBLOCK)
|
||||
{
|
||||
close(fd);
|
||||
DB::throwFromErrno("Cannot connect", 0);
|
||||
}
|
||||
|
||||
close(fd);
|
||||
}
|
||||
catch (const Poco::Exception & e)
|
||||
{
|
||||
std::cerr << e.displayText() << "\n";
|
||||
}
|
||||
}
|
||||
|
||||
std::cerr << "\n";
|
||||
}
|
||||
catch (const Poco::Exception & e)
|
||||
{
|
||||
std::cerr << e.displayText() << "\n";
|
||||
}
|
@ -210,4 +210,88 @@ public:
|
||||
}
|
||||
};
|
||||
|
||||
/** Implements `Max` operation.
|
||||
* Returns true if changed
|
||||
*/
|
||||
class FieldVisitorMax : public StaticVisitor<bool>
|
||||
{
|
||||
private:
|
||||
const Field & rhs;
|
||||
public:
|
||||
explicit FieldVisitorMax(const Field & rhs_) : rhs(rhs_) {}
|
||||
|
||||
bool operator() (Null &) const { throw Exception("Cannot compare Nulls", ErrorCodes::LOGICAL_ERROR); }
|
||||
bool operator() (Array &) const { throw Exception("Cannot compare Arrays", ErrorCodes::LOGICAL_ERROR); }
|
||||
bool operator() (Tuple &) const { throw Exception("Cannot compare Tuples", ErrorCodes::LOGICAL_ERROR); }
|
||||
bool operator() (AggregateFunctionStateData &) const { throw Exception("Cannot compare AggregateFunctionStates", ErrorCodes::LOGICAL_ERROR); }
|
||||
|
||||
template <typename T>
|
||||
bool operator() (DecimalField<T> & x) const
|
||||
{
|
||||
auto val = get<DecimalField<T>>(rhs);
|
||||
if (val > x)
|
||||
{
|
||||
x = val;
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
template <typename T>
|
||||
bool operator() (T & x) const
|
||||
{
|
||||
auto val = get<T>(rhs);
|
||||
if (val > x)
|
||||
{
|
||||
x = val;
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
};
|
||||
|
||||
/** Implements `Min` operation.
|
||||
* Returns true if changed
|
||||
*/
|
||||
class FieldVisitorMin : public StaticVisitor<bool>
|
||||
{
|
||||
private:
|
||||
const Field & rhs;
|
||||
public:
|
||||
explicit FieldVisitorMin(const Field & rhs_) : rhs(rhs_) {}
|
||||
|
||||
bool operator() (Null &) const { throw Exception("Cannot compare Nulls", ErrorCodes::LOGICAL_ERROR); }
|
||||
bool operator() (Array &) const { throw Exception("Cannot sum Arrays", ErrorCodes::LOGICAL_ERROR); }
|
||||
bool operator() (Tuple &) const { throw Exception("Cannot sum Tuples", ErrorCodes::LOGICAL_ERROR); }
|
||||
bool operator() (AggregateFunctionStateData &) const { throw Exception("Cannot sum AggregateFunctionStates", ErrorCodes::LOGICAL_ERROR); }
|
||||
|
||||
template <typename T>
|
||||
bool operator() (DecimalField<T> & x) const
|
||||
{
|
||||
auto val = get<DecimalField<T>>(rhs);
|
||||
if (val < x)
|
||||
{
|
||||
x = val;
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
template <typename T>
|
||||
bool operator() (T & x) const
|
||||
{
|
||||
auto val = get<T>(rhs);
|
||||
if (val < x)
|
||||
{
|
||||
x = val;
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
};
|
||||
|
||||
}
|
||||
|
@ -16,14 +16,14 @@ namespace ErrorCodes
|
||||
}
|
||||
|
||||
/** If stored objects may have several names (aliases)
|
||||
* this interface may be helpful
|
||||
* template parameter is available as Creator
|
||||
*/
|
||||
template <typename CreatorFunc>
|
||||
class IFactoryWithAliases : public IHints<2, IFactoryWithAliases<CreatorFunc>>
|
||||
* this interface may be helpful
|
||||
* template parameter is available as Value
|
||||
*/
|
||||
template <typename ValueType>
|
||||
class IFactoryWithAliases : public IHints<2, IFactoryWithAliases<ValueType>>
|
||||
{
|
||||
protected:
|
||||
using Creator = CreatorFunc;
|
||||
using Value = ValueType;
|
||||
|
||||
String getAliasToOrName(const String & name) const
|
||||
{
|
||||
@ -43,13 +43,13 @@ public:
|
||||
CaseInsensitive
|
||||
};
|
||||
|
||||
/** Register additional name for creator
|
||||
* real_name have to be already registered.
|
||||
*/
|
||||
/** Register additional name for value
|
||||
* real_name have to be already registered.
|
||||
*/
|
||||
void registerAlias(const String & alias_name, const String & real_name, CaseSensitiveness case_sensitiveness = CaseSensitive)
|
||||
{
|
||||
const auto & creator_map = getCreatorMap();
|
||||
const auto & case_insensitive_creator_map = getCaseInsensitiveCreatorMap();
|
||||
const auto & creator_map = getMap();
|
||||
const auto & case_insensitive_creator_map = getCaseInsensitiveMap();
|
||||
const String factory_name = getFactoryName();
|
||||
|
||||
String real_dict_name;
|
||||
@ -80,7 +80,7 @@ public:
|
||||
{
|
||||
std::vector<String> result;
|
||||
auto getter = [](const auto & pair) { return pair.first; };
|
||||
std::transform(getCreatorMap().begin(), getCreatorMap().end(), std::back_inserter(result), getter);
|
||||
std::transform(getMap().begin(), getMap().end(), std::back_inserter(result), getter);
|
||||
std::transform(aliases.begin(), aliases.end(), std::back_inserter(result), getter);
|
||||
return result;
|
||||
}
|
||||
@ -88,7 +88,7 @@ public:
|
||||
bool isCaseInsensitive(const String & name) const
|
||||
{
|
||||
String name_lowercase = Poco::toLower(name);
|
||||
return getCaseInsensitiveCreatorMap().count(name_lowercase) || case_insensitive_aliases.count(name_lowercase);
|
||||
return getCaseInsensitiveMap().count(name_lowercase) || case_insensitive_aliases.count(name_lowercase);
|
||||
}
|
||||
|
||||
const String & aliasTo(const String & name) const
|
||||
@ -109,11 +109,11 @@ public:
|
||||
virtual ~IFactoryWithAliases() override {}
|
||||
|
||||
private:
|
||||
using InnerMap = std::unordered_map<String, Creator>; // name -> creator
|
||||
using InnerMap = std::unordered_map<String, Value>; // name -> creator
|
||||
using AliasMap = std::unordered_map<String, String>; // alias -> original type
|
||||
|
||||
virtual const InnerMap & getCreatorMap() const = 0;
|
||||
virtual const InnerMap & getCaseInsensitiveCreatorMap() const = 0;
|
||||
virtual const InnerMap & getMap() const = 0;
|
||||
virtual const InnerMap & getCaseInsensitiveMap() const = 0;
|
||||
virtual String getFactoryName() const = 0;
|
||||
|
||||
/// Alias map to data_types from previous two maps
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user