mirror of
https://github.com/ClickHouse/ClickHouse.git
synced 2024-11-29 11:02:08 +00:00
Merge branch 'master' into revert-48277-revert-48196-jit-randomization
This commit is contained in:
commit
867c4ece64
7
.github/workflows/nightly.yml
vendored
7
.github/workflows/nightly.yml
vendored
@ -121,8 +121,6 @@ jobs:
|
|||||||
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
|
sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
|
||||||
SonarCloud:
|
SonarCloud:
|
||||||
# TODO: Remove if: whenever SonarCloud supports c++23
|
|
||||||
if: ${{ false }}
|
|
||||||
runs-on: [self-hosted, builder]
|
runs-on: [self-hosted, builder]
|
||||||
env:
|
env:
|
||||||
SONAR_SCANNER_VERSION: 4.8.0.2856
|
SONAR_SCANNER_VERSION: 4.8.0.2856
|
||||||
@ -159,7 +157,7 @@ jobs:
|
|||||||
- name: Set Up Build Tools
|
- name: Set Up Build Tools
|
||||||
run: |
|
run: |
|
||||||
sudo apt-get update
|
sudo apt-get update
|
||||||
sudo apt-get install -yq git cmake ccache ninja-build python3 yasm
|
sudo apt-get install -yq git cmake ccache ninja-build python3 yasm nasm
|
||||||
sudo bash -c "$(wget -O - https://apt.llvm.org/llvm.sh)"
|
sudo bash -c "$(wget -O - https://apt.llvm.org/llvm.sh)"
|
||||||
- name: Run build-wrapper
|
- name: Run build-wrapper
|
||||||
run: |
|
run: |
|
||||||
@ -178,4 +176,5 @@ jobs:
|
|||||||
--define sonar.cfamily.build-wrapper-output="${{ env.BUILD_WRAPPER_OUT_DIR }}" \
|
--define sonar.cfamily.build-wrapper-output="${{ env.BUILD_WRAPPER_OUT_DIR }}" \
|
||||||
--define sonar.projectKey="ClickHouse_ClickHouse" \
|
--define sonar.projectKey="ClickHouse_ClickHouse" \
|
||||||
--define sonar.organization="clickhouse-java" \
|
--define sonar.organization="clickhouse-java" \
|
||||||
--define sonar.exclusions="**/*.java,**/*.ts,**/*.js,**/*.css,**/*.sql" \
|
--define sonar.cfamily.cpp23.enabled=true \
|
||||||
|
--define sonar.exclusions="**/*.java,**/*.ts,**/*.js,**/*.css,**/*.sql"
|
||||||
|
101
CHANGELOG.md
101
CHANGELOG.md
@ -1,4 +1,5 @@
|
|||||||
### Table of Contents
|
### Table of Contents
|
||||||
|
**[ClickHouse release v23.6, 2023-06-30](#236)**<br/>
|
||||||
**[ClickHouse release v23.5, 2023-06-08](#235)**<br/>
|
**[ClickHouse release v23.5, 2023-06-08](#235)**<br/>
|
||||||
**[ClickHouse release v23.4, 2023-04-26](#234)**<br/>
|
**[ClickHouse release v23.4, 2023-04-26](#234)**<br/>
|
||||||
**[ClickHouse release v23.3 LTS, 2023-03-30](#233)**<br/>
|
**[ClickHouse release v23.3 LTS, 2023-03-30](#233)**<br/>
|
||||||
@ -8,6 +9,106 @@
|
|||||||
|
|
||||||
# 2023 Changelog
|
# 2023 Changelog
|
||||||
|
|
||||||
|
### <a id="236"></a> ClickHouse release 23.6, 2023-06-29
|
||||||
|
|
||||||
|
#### Backward Incompatible Change
|
||||||
|
* Delete feature `do_not_evict_index_and_mark_files` in the fs cache. This feature was only making things worse. [#51253](https://github.com/ClickHouse/ClickHouse/pull/51253) ([Kseniia Sumarokova](https://github.com/kssenii)).
|
||||||
|
* Remove ALTER support for experimental LIVE VIEW. [#51287](https://github.com/ClickHouse/ClickHouse/pull/51287) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* Decrease the default values for `http_max_field_value_size` and `http_max_field_name_size` to 128 KiB. [#51163](https://github.com/ClickHouse/ClickHouse/pull/51163) ([Mikhail f. Shiryaev](https://github.com/Felixoid)).
|
||||||
|
* CGroups metrics related to CPU are replaced with one metric, `CGroupMaxCPU` for better usability. The `Normalized` CPU usage metrics will be normalized to CGroups limits instead of the total number of CPUs when they are set. This closes [#50836](https://github.com/ClickHouse/ClickHouse/issues/50836). [#50835](https://github.com/ClickHouse/ClickHouse/pull/50835) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
|
||||||
|
#### New Feature
|
||||||
|
* The function `transform` as well as `CASE` with value matching started to support all data types. This closes [#29730](https://github.com/ClickHouse/ClickHouse/issues/29730). This closes [#32387](https://github.com/ClickHouse/ClickHouse/issues/32387). This closes [#50827](https://github.com/ClickHouse/ClickHouse/issues/50827). This closes [#31336](https://github.com/ClickHouse/ClickHouse/issues/31336). This closes [#40493](https://github.com/ClickHouse/ClickHouse/issues/40493). [#51351](https://github.com/ClickHouse/ClickHouse/pull/51351) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* Added option `--rename_files_after_processing <pattern>`. This closes [#34207](https://github.com/ClickHouse/ClickHouse/issues/34207). [#49626](https://github.com/ClickHouse/ClickHouse/pull/49626) ([alekseygolub](https://github.com/alekseygolub)).
|
||||||
|
* Add support for `TRUNCATE` modifier in `INTO OUTFILE` clause. Suggest using `APPEND` or `TRUNCATE` for `INTO OUTFILE` when file exists. [#50950](https://github.com/ClickHouse/ClickHouse/pull/50950) ([alekar](https://github.com/alekar)).
|
||||||
|
* Add table engine `Redis` and table function `redis`. It allows querying external Redis servers. [#50150](https://github.com/ClickHouse/ClickHouse/pull/50150) ([JackyWoo](https://github.com/JackyWoo)).
|
||||||
|
* Allow to skip empty files in file/s3/url/hdfs table functions using settings `s3_skip_empty_files`, `hdfs_skip_empty_files`, `engine_file_skip_empty_files`, `engine_url_skip_empty_files`. [#50364](https://github.com/ClickHouse/ClickHouse/pull/50364) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||||
|
* Add a new setting named `use_mysql_types_in_show_columns` to alter the `SHOW COLUMNS` SQL statement to display MySQL equivalent types when a client is connected via the MySQL compatibility port. [#49577](https://github.com/ClickHouse/ClickHouse/pull/49577) ([Thomas Panetti](https://github.com/tpanetti)).
|
||||||
|
* Clickhouse-client can now be called with a connection string instead of "--host", "--port", "--user" etc. [#50689](https://github.com/ClickHouse/ClickHouse/pull/50689) ([Alexey Gerasimchuck](https://github.com/Demilivor)).
|
||||||
|
* Add setting `session_timezone`; it is used as the default timezone for a session when not explicitly specified. [#44149](https://github.com/ClickHouse/ClickHouse/pull/44149) ([Andrey Zvonov](https://github.com/zvonand)).
|
||||||
|
* Codec DEFLATE_QPL is now controlled via server setting "enable_deflate_qpl_codec" (default: false) instead of setting "allow_experimental_codecs". This marks DEFLATE_QPL non-experimental. [#50775](https://github.com/ClickHouse/ClickHouse/pull/50775) ([Robert Schulze](https://github.com/rschu1ze)).
|
||||||
|
|
||||||
|
#### Performance Improvement
|
||||||
|
* Improved scheduling of merge selecting and cleanup tasks in `ReplicatedMergeTree`. The tasks will not be executed too frequently when there's nothing to merge or cleanup. Added settings `max_merge_selecting_sleep_ms`, `merge_selecting_sleep_slowdown_factor`, `max_cleanup_delay_period` and `cleanup_thread_preferred_points_per_iteration`. It should close [#31919](https://github.com/ClickHouse/ClickHouse/issues/31919). [#50107](https://github.com/ClickHouse/ClickHouse/pull/50107) ([Alexander Tokmakov](https://github.com/tavplubix)).
|
||||||
|
* Make filter push down through cross join. [#50605](https://github.com/ClickHouse/ClickHouse/pull/50605) ([Han Fei](https://github.com/hanfei1991)).
|
||||||
|
* Improve performance with enabled QueryProfiler using thread-local timer_id instead of global object. [#48778](https://github.com/ClickHouse/ClickHouse/pull/48778) ([Jiebin Sun](https://github.com/jiebinn)).
|
||||||
|
* Rewrite CapnProto input/output format to improve its performance. Map column names and CapnProto fields case insensitive, fix reading/writing of nested structure fields. [#49752](https://github.com/ClickHouse/ClickHouse/pull/49752) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||||
|
* Optimize parquet write performance for parallel threads. [#50102](https://github.com/ClickHouse/ClickHouse/pull/50102) ([Hongbin Ma](https://github.com/binmahone)).
|
||||||
|
* Disable `parallelize_output_from_storages` for processing MATERIALIZED VIEWs and storages with one block only. [#50214](https://github.com/ClickHouse/ClickHouse/pull/50214) ([Azat Khuzhin](https://github.com/azat)).
|
||||||
|
* Merge PR [#46558](https://github.com/ClickHouse/ClickHouse/pull/46558). Avoid block permutation during sort if the block is already sorted. [#50697](https://github.com/ClickHouse/ClickHouse/pull/50697) ([Alexey Milovidov](https://github.com/alexey-milovidov), [Maksim Kita](https://github.com/kitaisreal)).
|
||||||
|
* Make multiple list requests to ZooKeeper in parallel to speed up reading from system.zookeeper table. [#51042](https://github.com/ClickHouse/ClickHouse/pull/51042) ([Alexander Gololobov](https://github.com/davenger)).
|
||||||
|
* Speedup initialization of DateTime lookup tables for time zones. This should reduce startup/connect time of clickhouse-client especially in debug build as it is rather heavy. [#51347](https://github.com/ClickHouse/ClickHouse/pull/51347) ([Alexander Gololobov](https://github.com/davenger)).
|
||||||
|
* Fix data lakes slowness because of synchronous head requests. (Related to Iceberg/Deltalake/Hudi being slow with a lot of files). [#50976](https://github.com/ClickHouse/ClickHouse/pull/50976) ([Kseniia Sumarokova](https://github.com/kssenii)).
|
||||||
|
* Do not read all the columns from right GLOBAL JOIN table. [#50721](https://github.com/ClickHouse/ClickHouse/pull/50721) ([Nikolai Kochetov](https://github.com/KochetovNicolai)).
|
||||||
|
|
||||||
|
#### Experimental Feature
|
||||||
|
* Support parallel replicas with the analyzer. [#50441](https://github.com/ClickHouse/ClickHouse/pull/50441) ([Raúl Marín](https://github.com/Algunenano)).
|
||||||
|
* Add random sleep before large merges/mutations execution to split load more evenly between replicas in case of zero-copy replication. [#51282](https://github.com/ClickHouse/ClickHouse/pull/51282) ([alesapin](https://github.com/alesapin)).
|
||||||
|
* Do not replicate `ALTER PARTITION` queries and mutations through `Replicated` database if it has only one shard and the underlying table is `ReplicatedMergeTree`. [#51049](https://github.com/ClickHouse/ClickHouse/pull/51049) ([Alexander Tokmakov](https://github.com/tavplubix)).
|
||||||
|
|
||||||
|
#### Improvement
|
||||||
|
* Relax the thresholds for "too many parts" to be more modern. Return the backpressure during long-running insert queries. [#50856](https://github.com/ClickHouse/ClickHouse/pull/50856) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* Allow to cast IPv6 to IPv4 address for CIDR ::ffff:0:0/96 (IPv4-mapped addresses). [#49759](https://github.com/ClickHouse/ClickHouse/pull/49759) ([Yakov Olkhovskiy](https://github.com/yakov-olkhovskiy)).
|
||||||
|
* Update MongoDB protocol to support MongoDB 5.1 version and newer. Support for the versions with the old protocol (<3.6) is preserved. Closes [#45621](https://github.com/ClickHouse/ClickHouse/issues/45621), [#49879](https://github.com/ClickHouse/ClickHouse/issues/49879). [#50061](https://github.com/ClickHouse/ClickHouse/pull/50061) ([Nikolay Degterinsky](https://github.com/evillique)).
|
||||||
|
* Add setting `input_format_max_bytes_to_read_for_schema_inference` to limit the number of bytes to read in schema inference. Closes [#50577](https://github.com/ClickHouse/ClickHouse/issues/50577). [#50592](https://github.com/ClickHouse/ClickHouse/pull/50592) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||||
|
* Respect setting `input_format_null_as_default` in schema inference. [#50602](https://github.com/ClickHouse/ClickHouse/pull/50602) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||||
|
* Allow to skip trailing empty lines in CSV/TSV/CustomSeparated formats via settings `input_format_csv_skip_trailing_empty_lines`, `input_format_tsv_skip_trailing_empty_lines` and `input_format_custom_skip_trailing_empty_lines` (disabled by default). Closes [#49315](https://github.com/ClickHouse/ClickHouse/issues/49315). [#50635](https://github.com/ClickHouse/ClickHouse/pull/50635) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||||
|
* Functions "toDateOrDefault|OrNull" and "accuateCast[OrDefault|OrNull]" now correctly parse numeric arguments. [#50709](https://github.com/ClickHouse/ClickHouse/pull/50709) ([Dmitry Kardymon](https://github.com/kardymonds)).
|
||||||
|
* Support CSV with whitespace or `\t` field delimiters, and these delimiters are supported in Spark. [#50712](https://github.com/ClickHouse/ClickHouse/pull/50712) ([KevinyhZou](https://github.com/KevinyhZou)).
|
||||||
|
* Settings `number_of_mutations_to_delay` and `number_of_mutations_to_throw` are enabled by default now with values 500 and 1000 respectively. [#50726](https://github.com/ClickHouse/ClickHouse/pull/50726) ([Anton Popov](https://github.com/CurtizJ)).
|
||||||
|
* The dashboard correctly shows missing values. This closes [#50831](https://github.com/ClickHouse/ClickHouse/issues/50831). [#50832](https://github.com/ClickHouse/ClickHouse/pull/50832) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* Added the possibility to use date and time arguments in the syslog timestamp format in functions `parseDateTimeBestEffort*` and `parseDateTime64BestEffort*`. [#50925](https://github.com/ClickHouse/ClickHouse/pull/50925) ([Victor Krasnov](https://github.com/sirvickr)).
|
||||||
|
* Command line parameter "--password" in clickhouse-client can now be specified only once. [#50966](https://github.com/ClickHouse/ClickHouse/pull/50966) ([Alexey Gerasimchuck](https://github.com/Demilivor)).
|
||||||
|
* Use `hash_of_all_files` from `system.parts` to check identity of parts during on-cluster backups. [#50997](https://github.com/ClickHouse/ClickHouse/pull/50997) ([Vitaly Baranov](https://github.com/vitlibar)).
|
||||||
|
* The system table zookeeper_connection connected_time identifies the time when the connection is established (standard format), and session_uptime_elapsed_seconds is added, which labels the duration of the established connection session (in seconds). [#51026](https://github.com/ClickHouse/ClickHouse/pull/51026) ([郭小龙](https://github.com/guoxiaolongzte)).
|
||||||
|
* Improve the progress bar for file/s3/hdfs/url table functions by using chunk size from source data and using incremental total size counting in each thread. Fix the progress bar for *Cluster functions. This closes [#47250](https://github.com/ClickHouse/ClickHouse/issues/47250). [#51088](https://github.com/ClickHouse/ClickHouse/pull/51088) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||||
|
* Add total_bytes_to_read to the Progress packet in TCP protocol for better Progress bar. [#51158](https://github.com/ClickHouse/ClickHouse/pull/51158) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||||
|
* Better checking of data parts on disks with filesystem cache. [#51164](https://github.com/ClickHouse/ClickHouse/pull/51164) ([Anton Popov](https://github.com/CurtizJ)).
|
||||||
|
* Fix sometimes not correct current_elements_num in fs cache. [#51242](https://github.com/ClickHouse/ClickHouse/pull/51242) ([Kseniia Sumarokova](https://github.com/kssenii)).
|
||||||
|
|
||||||
|
#### Build/Testing/Packaging Improvement
|
||||||
|
* Add embedded keeper-client to standalone keeper binary. [#50964](https://github.com/ClickHouse/ClickHouse/pull/50964) ([pufit](https://github.com/pufit)).
|
||||||
|
* Actual LZ4 version is used now. [#50621](https://github.com/ClickHouse/ClickHouse/pull/50621) ([Nikita Taranov](https://github.com/nickitat)).
|
||||||
|
* ClickHouse server will print the list of changed settings on fatal errors. This closes [#51137](https://github.com/ClickHouse/ClickHouse/issues/51137). [#51138](https://github.com/ClickHouse/ClickHouse/pull/51138) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* Allow building ClickHouse with clang-17. [#51300](https://github.com/ClickHouse/ClickHouse/pull/51300) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* [SQLancer](https://github.com/sqlancer/sqlancer) check is considered stable as bugs that were triggered by it are fixed. Now failures of SQLancer check will be reported as failed check status. [#51340](https://github.com/ClickHouse/ClickHouse/pull/51340) ([Ilya Yatsishin](https://github.com/qoega)).
|
||||||
|
* Split huge `RUN` in Dockerfile into smaller conditional. Install the necessary tools on demand in the same `RUN` layer, and remove them after that. Upgrade the OS only once at the beginning. Use a modern way to check the signed repository. Downgrade the base repo to ubuntu:20.04 to address the issues on older docker versions. Upgrade golang version to address golang vulnerabilities. [#51504](https://github.com/ClickHouse/ClickHouse/pull/51504) ([Mikhail f. Shiryaev](https://github.com/Felixoid)).
|
||||||
|
|
||||||
|
#### Bug Fix (user-visible misbehavior in an official stable release)
|
||||||
|
|
||||||
|
* Report loading status for executable dictionaries correctly [#48775](https://github.com/ClickHouse/ClickHouse/pull/48775) ([Anton Kozlov](https://github.com/tonickkozlov)).
|
||||||
|
* Proper mutation of skip indices and projections [#50104](https://github.com/ClickHouse/ClickHouse/pull/50104) ([Amos Bird](https://github.com/amosbird)).
|
||||||
|
* Cleanup moving parts [#50489](https://github.com/ClickHouse/ClickHouse/pull/50489) ([vdimir](https://github.com/vdimir)).
|
||||||
|
* Fix backward compatibility for IP types hashing in aggregate functions [#50551](https://github.com/ClickHouse/ClickHouse/pull/50551) ([Yakov Olkhovskiy](https://github.com/yakov-olkhovskiy)).
|
||||||
|
* Fix Log family table return wrong rows count after truncate [#50585](https://github.com/ClickHouse/ClickHouse/pull/50585) ([flynn](https://github.com/ucasfl)).
|
||||||
|
* Fix bug in `uniqExact` parallel merging [#50590](https://github.com/ClickHouse/ClickHouse/pull/50590) ([Nikita Taranov](https://github.com/nickitat)).
|
||||||
|
* Revert recent grace hash join changes [#50699](https://github.com/ClickHouse/ClickHouse/pull/50699) ([vdimir](https://github.com/vdimir)).
|
||||||
|
* Query Cache: Try to fix bad cast from `ColumnConst` to `ColumnVector<char8_t>` [#50704](https://github.com/ClickHouse/ClickHouse/pull/50704) ([Robert Schulze](https://github.com/rschu1ze)).
|
||||||
|
* Avoid storing logs in Keeper containing unknown operation [#50751](https://github.com/ClickHouse/ClickHouse/pull/50751) ([Antonio Andelic](https://github.com/antonio2368)).
|
||||||
|
* SummingMergeTree support for DateTime64 [#50797](https://github.com/ClickHouse/ClickHouse/pull/50797) ([Jordi Villar](https://github.com/jrdi)).
|
||||||
|
* Add compatibility setting for non-const timezones [#50834](https://github.com/ClickHouse/ClickHouse/pull/50834) ([Robert Schulze](https://github.com/rschu1ze)).
|
||||||
|
* Fix hashing of LDAP params in the cache entries [#50865](https://github.com/ClickHouse/ClickHouse/pull/50865) ([Julian Maicher](https://github.com/jmaicher)).
|
||||||
|
* Fallback to parsing big integer from String instead of exception in Parquet format [#50873](https://github.com/ClickHouse/ClickHouse/pull/50873) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||||
|
* Fix checking the lock file too often while writing a backup [#50889](https://github.com/ClickHouse/ClickHouse/pull/50889) ([Vitaly Baranov](https://github.com/vitlibar)).
|
||||||
|
* Do not apply projection if read-in-order was enabled. [#50923](https://github.com/ClickHouse/ClickHouse/pull/50923) ([Nikolai Kochetov](https://github.com/KochetovNicolai)).
|
||||||
|
* Fix race in the Azure blob storage iterator [#50936](https://github.com/ClickHouse/ClickHouse/pull/50936) ([SmitaRKulkarni](https://github.com/SmitaRKulkarni)).
|
||||||
|
* Fix erroneous `sort_description` propagation in `CreatingSets` [#50955](https://github.com/ClickHouse/ClickHouse/pull/50955) ([Nikita Taranov](https://github.com/nickitat)).
|
||||||
|
* Fix Iceberg v2 optional metadata parsing [#50974](https://github.com/ClickHouse/ClickHouse/pull/50974) ([Kseniia Sumarokova](https://github.com/kssenii)).
|
||||||
|
* MaterializedMySQL: Keep parentheses for empty table overrides [#50977](https://github.com/ClickHouse/ClickHouse/pull/50977) ([Val Doroshchuk](https://github.com/valbok)).
|
||||||
|
* Fix crash in BackupCoordinationStageSync::setError() [#51012](https://github.com/ClickHouse/ClickHouse/pull/51012) ([Vitaly Baranov](https://github.com/vitlibar)).
|
||||||
|
* Fix subtly broken copy-on-write of ColumnLowCardinality dictionary [#51064](https://github.com/ClickHouse/ClickHouse/pull/51064) ([Michael Kolupaev](https://github.com/al13n321)).
|
||||||
|
* Generate safe IVs [#51086](https://github.com/ClickHouse/ClickHouse/pull/51086) ([Salvatore Mesoraca](https://github.com/aiven-sal)).
|
||||||
|
* Fix ineffective query cache for SELECTs with subqueries [#51132](https://github.com/ClickHouse/ClickHouse/pull/51132) ([Robert Schulze](https://github.com/rschu1ze)).
|
||||||
|
* Fix Set index with constant nullable comparison. [#51205](https://github.com/ClickHouse/ClickHouse/pull/51205) ([Nikolai Kochetov](https://github.com/KochetovNicolai)).
|
||||||
|
* Fix a crash in s3 and s3Cluster functions [#51209](https://github.com/ClickHouse/ClickHouse/pull/51209) ([Nikolay Degterinsky](https://github.com/evillique)).
|
||||||
|
* Fix a crash with compiled expressions [#51231](https://github.com/ClickHouse/ClickHouse/pull/51231) ([LiuNeng](https://github.com/liuneng1994)).
|
||||||
|
* Fix use-after-free in StorageURL when switching URLs [#51260](https://github.com/ClickHouse/ClickHouse/pull/51260) ([Michael Kolupaev](https://github.com/al13n321)).
|
||||||
|
* Updated check for parameterized view [#51272](https://github.com/ClickHouse/ClickHouse/pull/51272) ([SmitaRKulkarni](https://github.com/SmitaRKulkarni)).
|
||||||
|
* Fix multiple writing of same file to backup [#51299](https://github.com/ClickHouse/ClickHouse/pull/51299) ([Vitaly Baranov](https://github.com/vitlibar)).
|
||||||
|
* Fix fuzzer failure in ActionsDAG [#51301](https://github.com/ClickHouse/ClickHouse/pull/51301) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* Remove garbage from function `transform` [#51350](https://github.com/ClickHouse/ClickHouse/pull/51350) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
|
||||||
|
|
||||||
### <a id="235"></a> ClickHouse release 23.5, 2023-06-08
|
### <a id="235"></a> ClickHouse release 23.5, 2023-06-08
|
||||||
|
|
||||||
#### Upgrade Notes
|
#### Upgrade Notes
|
||||||
|
@ -16,17 +16,19 @@ curl https://clickhouse.com/ | sh
|
|||||||
* [YouTube channel](https://www.youtube.com/c/ClickHouseDB) has a lot of content about ClickHouse in video format.
|
* [YouTube channel](https://www.youtube.com/c/ClickHouseDB) has a lot of content about ClickHouse in video format.
|
||||||
* [Slack](https://clickhouse.com/slack) and [Telegram](https://telegram.me/clickhouse_en) allow chatting with ClickHouse users in real-time.
|
* [Slack](https://clickhouse.com/slack) and [Telegram](https://telegram.me/clickhouse_en) allow chatting with ClickHouse users in real-time.
|
||||||
* [Blog](https://clickhouse.com/blog/) contains various ClickHouse-related articles, as well as announcements and reports about events.
|
* [Blog](https://clickhouse.com/blog/) contains various ClickHouse-related articles, as well as announcements and reports about events.
|
||||||
* [Code Browser (Woboq)](https://clickhouse.com/codebrowser/ClickHouse/index.html) with syntax highlight and navigation.
|
* [Code Browser (Woboq)](https://clickhouse.com/codebrowser/ClickHouse/index.html) with syntax highlighting and navigation.
|
||||||
* [Code Browser (github.dev)](https://github.dev/ClickHouse/ClickHouse) with syntax highlight, powered by github.dev.
|
* [Code Browser (github.dev)](https://github.dev/ClickHouse/ClickHouse) with syntax highlighting, powered by github.dev.
|
||||||
|
* [Static Analysis (SonarCloud)](https://sonarcloud.io/project/issues?resolved=false&id=ClickHouse_ClickHouse) proposes C++ quality improvements.
|
||||||
* [Contacts](https://clickhouse.com/company/contact) can help to get your questions answered if there are any.
|
* [Contacts](https://clickhouse.com/company/contact) can help to get your questions answered if there are any.
|
||||||
|
|
||||||
## Upcoming Events
|
## Upcoming Events
|
||||||
|
|
||||||
* [**v23.6 Release Webinar**](https://clickhouse.com/company/events/v23-6-release-call?utm_source=github&utm_medium=social&utm_campaign=release-webinar-2023-06) - Jun 29 - 23.6 is rapidly approaching. Original creator, co-founder, and CTO of ClickHouse Alexey Milovidov will walk us through the highlights of the release.
|
* [**v23.6 Release Webinar**](https://clickhouse.com/company/events/v23-6-release-call?utm_source=github&utm_medium=social&utm_campaign=release-webinar-2023-06) - Jun 29 - 23.6 is rapidly approaching. Original creator, co-founder, and CTO of ClickHouse Alexey Milovidov will walk us through the highlights of the release.
|
||||||
* [**ClickHouse Meetup in Paris**](https://www.meetup.com/clickhouse-france-user-group/events/294283460) - Jul 4
|
|
||||||
* [**ClickHouse Meetup in Boston**](https://www.meetup.com/clickhouse-boston-user-group/events/293913596) - Jul 18
|
* [**ClickHouse Meetup in Boston**](https://www.meetup.com/clickhouse-boston-user-group/events/293913596) - Jul 18
|
||||||
* [**ClickHouse Meetup in NYC**](https://www.meetup.com/clickhouse-new-york-user-group/events/293913441) - Jul 19
|
* [**ClickHouse Meetup in NYC**](https://www.meetup.com/clickhouse-new-york-user-group/events/293913441) - Jul 19
|
||||||
* [**ClickHouse Meetup in Toronto**](https://www.meetup.com/clickhouse-toronto-user-group/events/294183127) - Jul 20
|
* [**ClickHouse Meetup in Toronto**](https://www.meetup.com/clickhouse-toronto-user-group/events/294183127) - Jul 20
|
||||||
|
* [**ClickHouse Meetup in Singapore**](https://www.meetup.com/clickhouse-singapore-meetup-group/events/294428050/) - Jul 27
|
||||||
|
* [**ClickHouse Meetup in Paris**](https://www.meetup.com/clickhouse-france-user-group/events/294283460) - Sep 12
|
||||||
|
|
||||||
Also, keep an eye out for upcoming meetups around the world. Somewhere else you want us to be? Please feel free to reach out to tyler <at> clickhouse <dot> com.
|
Also, keep an eye out for upcoming meetups around the world. Somewhere else you want us to be? Please feel free to reach out to tyler <at> clickhouse <dot> com.
|
||||||
|
|
||||||
|
@ -13,6 +13,7 @@ The following versions of ClickHouse server are currently being supported with s
|
|||||||
|
|
||||||
| Version | Supported |
|
| Version | Supported |
|
||||||
|:-|:-|
|
|:-|:-|
|
||||||
|
| 23.6 | ✔️ |
|
||||||
| 23.5 | ✔️ |
|
| 23.5 | ✔️ |
|
||||||
| 23.4 | ✔️ |
|
| 23.4 | ✔️ |
|
||||||
| 23.3 | ✔️ |
|
| 23.3 | ✔️ |
|
||||||
|
@ -2,21 +2,23 @@
|
|||||||
|
|
||||||
#include <base/strong_typedef.h>
|
#include <base/strong_typedef.h>
|
||||||
#include <base/extended_types.h>
|
#include <base/extended_types.h>
|
||||||
|
#include <Common/formatIPv6.h>
|
||||||
#include <Common/memcmpSmall.h>
|
#include <Common/memcmpSmall.h>
|
||||||
|
|
||||||
namespace DB
|
namespace DB
|
||||||
{
|
{
|
||||||
|
|
||||||
using IPv4 = StrongTypedef<UInt32, struct IPv4Tag>;
|
struct IPv4 : StrongTypedef<UInt32, struct IPv4Tag>
|
||||||
|
{
|
||||||
|
using StrongTypedef::StrongTypedef;
|
||||||
|
using StrongTypedef::operator=;
|
||||||
|
constexpr explicit IPv4(UInt64 value): StrongTypedef(static_cast<UnderlyingType>(value)) {}
|
||||||
|
};
|
||||||
|
|
||||||
struct IPv6 : StrongTypedef<UInt128, struct IPv6Tag>
|
struct IPv6 : StrongTypedef<UInt128, struct IPv6Tag>
|
||||||
{
|
{
|
||||||
constexpr IPv6() = default;
|
using StrongTypedef::StrongTypedef;
|
||||||
constexpr explicit IPv6(const UInt128 & x) : StrongTypedef(x) {}
|
using StrongTypedef::operator=;
|
||||||
constexpr explicit IPv6(UInt128 && x) : StrongTypedef(std::move(x)) {}
|
|
||||||
|
|
||||||
IPv6 & operator=(const UInt128 & rhs) { StrongTypedef::operator=(rhs); return *this; }
|
|
||||||
IPv6 & operator=(UInt128 && rhs) { StrongTypedef::operator=(std::move(rhs)); return *this; }
|
|
||||||
|
|
||||||
bool operator<(const IPv6 & rhs) const
|
bool operator<(const IPv6 & rhs) const
|
||||||
{
|
{
|
||||||
@ -54,12 +56,22 @@ namespace DB
|
|||||||
|
|
||||||
namespace std
|
namespace std
|
||||||
{
|
{
|
||||||
|
/// For historical reasons we hash IPv6 as a FixedString(16)
|
||||||
template <>
|
template <>
|
||||||
struct hash<DB::IPv6>
|
struct hash<DB::IPv6>
|
||||||
{
|
{
|
||||||
size_t operator()(const DB::IPv6 & x) const
|
size_t operator()(const DB::IPv6 & x) const
|
||||||
{
|
{
|
||||||
return std::hash<DB::IPv6::UnderlyingType>()(x.toUnderType());
|
return std::hash<std::string_view>{}(std::string_view(reinterpret_cast<const char*>(&x.toUnderType()), IPV6_BINARY_LENGTH));
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
template <>
|
||||||
|
struct hash<DB::IPv4>
|
||||||
|
{
|
||||||
|
size_t operator()(const DB::IPv4 & x) const
|
||||||
|
{
|
||||||
|
return std::hash<DB::IPv4::UnderlyingType>()(x.toUnderType());
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
@ -7,7 +7,13 @@
|
|||||||
|
|
||||||
/** Returns value `from` converted to type `To` while retaining bit representation.
|
/** Returns value `from` converted to type `To` while retaining bit representation.
|
||||||
* `To` and `From` must satisfy `CopyConstructible`.
|
* `To` and `From` must satisfy `CopyConstructible`.
|
||||||
|
*
|
||||||
* In contrast to std::bit_cast can cast types of different width.
|
* In contrast to std::bit_cast can cast types of different width.
|
||||||
|
*
|
||||||
|
* Note: for signed types of narrower size, the casted result is zero-extended
|
||||||
|
* instead of sign-extended as with regular static_cast.
|
||||||
|
* For example, -1 Int8 (represented as 0xFF) bit_casted to UInt64
|
||||||
|
* gives 255 (represented as 0x00000000000000FF) instead of 0xFFFFFFFFFFFFFFFF
|
||||||
*/
|
*/
|
||||||
template <typename To, typename From>
|
template <typename To, typename From>
|
||||||
std::decay_t<To> bit_cast(const From & from)
|
std::decay_t<To> bit_cast(const From & from)
|
||||||
|
284
base/base/hex.h
284
base/base/hex.h
@ -4,23 +4,22 @@
|
|||||||
#include <cstring>
|
#include <cstring>
|
||||||
#include "types.h"
|
#include "types.h"
|
||||||
|
|
||||||
/// Maps 0..15 to 0..9A..F or 0..9a..f correspondingly.
|
namespace CityHash_v1_0_2 { struct uint128; }
|
||||||
|
|
||||||
constexpr inline std::string_view hex_digit_to_char_uppercase_table = "0123456789ABCDEF";
|
namespace wide
|
||||||
constexpr inline std::string_view hex_digit_to_char_lowercase_table = "0123456789abcdef";
|
|
||||||
|
|
||||||
constexpr char hexDigitUppercase(unsigned char c)
|
|
||||||
{
|
{
|
||||||
return hex_digit_to_char_uppercase_table[c];
|
template <size_t Bits, typename Signed>
|
||||||
}
|
class integer;
|
||||||
constexpr char hexDigitLowercase(unsigned char c)
|
|
||||||
{
|
|
||||||
return hex_digit_to_char_lowercase_table[c];
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Maps 0..255 to 00..FF or 00..ff correspondingly
|
namespace impl
|
||||||
|
{
|
||||||
|
/// Maps 0..15 to 0..9A..F or 0..9a..f correspondingly.
|
||||||
|
constexpr inline std::string_view hex_digit_to_char_uppercase_table = "0123456789ABCDEF";
|
||||||
|
constexpr inline std::string_view hex_digit_to_char_lowercase_table = "0123456789abcdef";
|
||||||
|
|
||||||
constexpr inline std::string_view hex_byte_to_char_uppercase_table = //
|
/// Maps 0..255 to 00..FF or 00..ff correspondingly.
|
||||||
|
constexpr inline std::string_view hex_byte_to_char_uppercase_table = //
|
||||||
"000102030405060708090A0B0C0D0E0F"
|
"000102030405060708090A0B0C0D0E0F"
|
||||||
"101112131415161718191A1B1C1D1E1F"
|
"101112131415161718191A1B1C1D1E1F"
|
||||||
"202122232425262728292A2B2C2D2E2F"
|
"202122232425262728292A2B2C2D2E2F"
|
||||||
@ -38,7 +37,7 @@ constexpr inline std::string_view hex_byte_to_char_uppercase_table = //
|
|||||||
"E0E1E2E3E4E5E6E7E8E9EAEBECEDEEEF"
|
"E0E1E2E3E4E5E6E7E8E9EAEBECEDEEEF"
|
||||||
"F0F1F2F3F4F5F6F7F8F9FAFBFCFDFEFF";
|
"F0F1F2F3F4F5F6F7F8F9FAFBFCFDFEFF";
|
||||||
|
|
||||||
constexpr inline std::string_view hex_byte_to_char_lowercase_table = //
|
constexpr inline std::string_view hex_byte_to_char_lowercase_table = //
|
||||||
"000102030405060708090a0b0c0d0e0f"
|
"000102030405060708090a0b0c0d0e0f"
|
||||||
"101112131415161718191a1b1c1d1e1f"
|
"101112131415161718191a1b1c1d1e1f"
|
||||||
"202122232425262728292a2b2c2d2e2f"
|
"202122232425262728292a2b2c2d2e2f"
|
||||||
@ -56,17 +55,8 @@ constexpr inline std::string_view hex_byte_to_char_lowercase_table = //
|
|||||||
"e0e1e2e3e4e5e6e7e8e9eaebecedeeef"
|
"e0e1e2e3e4e5e6e7e8e9eaebecedeeef"
|
||||||
"f0f1f2f3f4f5f6f7f8f9fafbfcfdfeff";
|
"f0f1f2f3f4f5f6f7f8f9fafbfcfdfeff";
|
||||||
|
|
||||||
inline void writeHexByteUppercase(UInt8 byte, void * out)
|
/// Maps 0..255 to 00000000..11111111 correspondingly.
|
||||||
{
|
constexpr inline std::string_view bin_byte_to_char_table = //
|
||||||
memcpy(out, &hex_byte_to_char_uppercase_table[static_cast<size_t>(byte) * 2], 2);
|
|
||||||
}
|
|
||||||
|
|
||||||
inline void writeHexByteLowercase(UInt8 byte, void * out)
|
|
||||||
{
|
|
||||||
memcpy(out, &hex_byte_to_char_lowercase_table[static_cast<size_t>(byte) * 2], 2);
|
|
||||||
}
|
|
||||||
|
|
||||||
constexpr inline std::string_view bin_byte_to_char_table = //
|
|
||||||
"0000000000000001000000100000001100000100000001010000011000000111"
|
"0000000000000001000000100000001100000100000001010000011000000111"
|
||||||
"0000100000001001000010100000101100001100000011010000111000001111"
|
"0000100000001001000010100000101100001100000011010000111000001111"
|
||||||
"0001000000010001000100100001001100010100000101010001011000010111"
|
"0001000000010001000100100001001100010100000101010001011000010111"
|
||||||
@ -100,63 +90,8 @@ constexpr inline std::string_view bin_byte_to_char_table = //
|
|||||||
"1111000011110001111100101111001111110100111101011111011011110111"
|
"1111000011110001111100101111001111110100111101011111011011110111"
|
||||||
"1111100011111001111110101111101111111100111111011111111011111111";
|
"1111100011111001111110101111101111111100111111011111111011111111";
|
||||||
|
|
||||||
inline void writeBinByte(UInt8 byte, void * out)
|
/// Maps 0..9, A..F, a..f to 0..15. Other chars are mapped to implementation specific value.
|
||||||
{
|
constexpr inline std::string_view hex_char_to_digit_table
|
||||||
memcpy(out, &bin_byte_to_char_table[static_cast<size_t>(byte) * 8], 8);
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Produces hex representation of an unsigned int with leading zeros (for checksums)
|
|
||||||
template <typename TUInt>
|
|
||||||
inline void writeHexUIntImpl(TUInt uint_, char * out, std::string_view table)
|
|
||||||
{
|
|
||||||
union
|
|
||||||
{
|
|
||||||
TUInt value;
|
|
||||||
UInt8 uint8[sizeof(TUInt)];
|
|
||||||
};
|
|
||||||
|
|
||||||
value = uint_;
|
|
||||||
|
|
||||||
for (size_t i = 0; i < sizeof(TUInt); ++i)
|
|
||||||
{
|
|
||||||
if constexpr (std::endian::native == std::endian::little)
|
|
||||||
memcpy(out + i * 2, &table[static_cast<size_t>(uint8[sizeof(TUInt) - 1 - i]) * 2], 2);
|
|
||||||
else
|
|
||||||
memcpy(out + i * 2, &table[static_cast<size_t>(uint8[i]) * 2], 2);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
template <typename TUInt>
|
|
||||||
inline void writeHexUIntUppercase(TUInt uint_, char * out)
|
|
||||||
{
|
|
||||||
writeHexUIntImpl(uint_, out, hex_byte_to_char_uppercase_table);
|
|
||||||
}
|
|
||||||
|
|
||||||
template <typename TUInt>
|
|
||||||
inline void writeHexUIntLowercase(TUInt uint_, char * out)
|
|
||||||
{
|
|
||||||
writeHexUIntImpl(uint_, out, hex_byte_to_char_lowercase_table);
|
|
||||||
}
|
|
||||||
|
|
||||||
template <typename TUInt>
|
|
||||||
std::string getHexUIntUppercase(TUInt uint_)
|
|
||||||
{
|
|
||||||
std::string res(sizeof(TUInt) * 2, '\0');
|
|
||||||
writeHexUIntUppercase(uint_, res.data());
|
|
||||||
return res;
|
|
||||||
}
|
|
||||||
|
|
||||||
template <typename TUInt>
|
|
||||||
std::string getHexUIntLowercase(TUInt uint_)
|
|
||||||
{
|
|
||||||
std::string res(sizeof(TUInt) * 2, '\0');
|
|
||||||
writeHexUIntLowercase(uint_, res.data());
|
|
||||||
return res;
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Maps 0..9, A..F, a..f to 0..15. Other chars are mapped to implementation specific value.
|
|
||||||
|
|
||||||
constexpr inline std::string_view hex_char_to_digit_table
|
|
||||||
= {"\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff"
|
= {"\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff"
|
||||||
"\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff"
|
"\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff"
|
||||||
"\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff"
|
"\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff"
|
||||||
@ -175,41 +110,182 @@ constexpr inline std::string_view hex_char_to_digit_table
|
|||||||
"\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff",
|
"\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff",
|
||||||
256};
|
256};
|
||||||
|
|
||||||
constexpr UInt8 unhex(char c)
|
/// Converts a hex digit '0'..'f' or '0'..'F' to its value 0..15.
|
||||||
{
|
constexpr UInt8 unhexDigit(char c)
|
||||||
return hex_char_to_digit_table[static_cast<UInt8>(c)];
|
|
||||||
}
|
|
||||||
|
|
||||||
constexpr UInt8 unhex2(const char * data)
|
|
||||||
{
|
|
||||||
return static_cast<UInt8>(unhex(data[0])) * 0x10 + static_cast<UInt8>(unhex(data[1]));
|
|
||||||
}
|
|
||||||
|
|
||||||
constexpr UInt16 unhex4(const char * data)
|
|
||||||
{
|
|
||||||
return static_cast<UInt16>(unhex(data[0])) * 0x1000 + static_cast<UInt16>(unhex(data[1])) * 0x100
|
|
||||||
+ static_cast<UInt16>(unhex(data[2])) * 0x10 + static_cast<UInt16>(unhex(data[3]));
|
|
||||||
}
|
|
||||||
|
|
||||||
template <typename TUInt>
|
|
||||||
constexpr TUInt unhexUInt(const char * data)
|
|
||||||
{
|
|
||||||
TUInt res = 0;
|
|
||||||
if constexpr ((sizeof(TUInt) <= 8) || ((sizeof(TUInt) % 8) != 0))
|
|
||||||
{
|
{
|
||||||
|
return hex_char_to_digit_table[static_cast<UInt8>(c)];
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Converts an unsigned integer in the native endian to hexadecimal representation and back. Used as a base class for HexConversion<T>.
|
||||||
|
template <typename TUInt, typename = void>
|
||||||
|
struct HexConversionUInt
|
||||||
|
{
|
||||||
|
static const constexpr size_t num_hex_digits = sizeof(TUInt) * 2;
|
||||||
|
|
||||||
|
static void hex(TUInt uint_, char * out, std::string_view table)
|
||||||
|
{
|
||||||
|
union
|
||||||
|
{
|
||||||
|
TUInt value;
|
||||||
|
UInt8 uint8[sizeof(TUInt)];
|
||||||
|
};
|
||||||
|
|
||||||
|
value = uint_;
|
||||||
|
|
||||||
|
for (size_t i = 0; i < sizeof(TUInt); ++i)
|
||||||
|
{
|
||||||
|
if constexpr (std::endian::native == std::endian::little)
|
||||||
|
memcpy(out + i * 2, &table[static_cast<size_t>(uint8[sizeof(TUInt) - 1 - i]) * 2], 2);
|
||||||
|
else
|
||||||
|
memcpy(out + i * 2, &table[static_cast<size_t>(uint8[i]) * 2], 2);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
static TUInt unhex(const char * data)
|
||||||
|
{
|
||||||
|
TUInt res;
|
||||||
|
if constexpr (sizeof(TUInt) == 1)
|
||||||
|
{
|
||||||
|
res = static_cast<UInt8>(unhexDigit(data[0])) * 0x10 + static_cast<UInt8>(unhexDigit(data[1]));
|
||||||
|
}
|
||||||
|
else if constexpr (sizeof(TUInt) == 2)
|
||||||
|
{
|
||||||
|
res = static_cast<UInt16>(unhexDigit(data[0])) * 0x1000 + static_cast<UInt16>(unhexDigit(data[1])) * 0x100
|
||||||
|
+ static_cast<UInt16>(unhexDigit(data[2])) * 0x10 + static_cast<UInt16>(unhexDigit(data[3]));
|
||||||
|
}
|
||||||
|
else if constexpr ((sizeof(TUInt) <= 8) || ((sizeof(TUInt) % 8) != 0))
|
||||||
|
{
|
||||||
|
res = 0;
|
||||||
for (size_t i = 0; i < sizeof(TUInt) * 2; ++i, ++data)
|
for (size_t i = 0; i < sizeof(TUInt) * 2; ++i, ++data)
|
||||||
{
|
{
|
||||||
res <<= 4;
|
res <<= 4;
|
||||||
res += unhex(*data);
|
res += unhexDigit(*data);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
|
res = 0;
|
||||||
for (size_t i = 0; i < sizeof(TUInt) / 8; ++i, data += 16)
|
for (size_t i = 0; i < sizeof(TUInt) / 8; ++i, data += 16)
|
||||||
{
|
{
|
||||||
res <<= 64;
|
res <<= 64;
|
||||||
res += unhexUInt<UInt64>(data);
|
res += HexConversionUInt<UInt64>::unhex(data);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return res;
|
return res;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
/// Helper template class to convert a value of any supported type to hexadecimal representation and back.
|
||||||
|
template <typename T, typename SFINAE = void>
|
||||||
|
struct HexConversion;
|
||||||
|
|
||||||
|
template <typename TUInt>
|
||||||
|
struct HexConversion<TUInt, std::enable_if_t<std::is_integral_v<TUInt>>> : public HexConversionUInt<TUInt> {};
|
||||||
|
|
||||||
|
template <size_t Bits, typename Signed>
|
||||||
|
struct HexConversion<wide::integer<Bits, Signed>> : public HexConversionUInt<wide::integer<Bits, Signed>> {};
|
||||||
|
|
||||||
|
template <typename CityHashUInt128> /// Partial specialization here allows not to include <city.h> in this header.
|
||||||
|
struct HexConversion<CityHashUInt128, std::enable_if_t<std::is_same_v<CityHashUInt128, typename CityHash_v1_0_2::uint128>>>
|
||||||
|
{
|
||||||
|
static const constexpr size_t num_hex_digits = 32;
|
||||||
|
|
||||||
|
static void hex(const CityHashUInt128 & uint_, char * out, std::string_view table)
|
||||||
|
{
|
||||||
|
HexConversion<UInt64>::hex(uint_.high64, out, table);
|
||||||
|
HexConversion<UInt64>::hex(uint_.low64, out + 16, table);
|
||||||
|
}
|
||||||
|
|
||||||
|
static CityHashUInt128 unhex(const char * data)
|
||||||
|
{
|
||||||
|
CityHashUInt128 res;
|
||||||
|
res.high64 = HexConversion<UInt64>::unhex(data);
|
||||||
|
res.low64 = HexConversion<UInt64>::unhex(data + 16);
|
||||||
|
return res;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Produces a hexadecimal representation of an integer value with leading zeros (for checksums).
|
||||||
|
/// The function supports native integer types, wide::integer, CityHash_v1_0_2::uint128.
|
||||||
|
/// It can be used with signed types as well, however they are written as corresponding unsigned numbers
|
||||||
|
/// using two's complement (i.e. for example "-1" is written as "0xFF", not as "-0x01").
|
||||||
|
template <typename T>
|
||||||
|
void writeHexUIntUppercase(const T & value, char * out)
|
||||||
|
{
|
||||||
|
impl::HexConversion<T>::hex(value, out, impl::hex_byte_to_char_uppercase_table);
|
||||||
|
}
|
||||||
|
|
||||||
|
template <typename T>
|
||||||
|
void writeHexUIntLowercase(const T & value, char * out)
|
||||||
|
{
|
||||||
|
impl::HexConversion<T>::hex(value, out, impl::hex_byte_to_char_lowercase_table);
|
||||||
|
}
|
||||||
|
|
||||||
|
template <typename T>
|
||||||
|
std::string getHexUIntUppercase(const T & value)
|
||||||
|
{
|
||||||
|
std::string res(impl::HexConversion<T>::num_hex_digits, '\0');
|
||||||
|
writeHexUIntUppercase(value, res.data());
|
||||||
|
return res;
|
||||||
|
}
|
||||||
|
|
||||||
|
template <typename T>
|
||||||
|
std::string getHexUIntLowercase(const T & value)
|
||||||
|
{
|
||||||
|
std::string res(impl::HexConversion<T>::num_hex_digits, '\0');
|
||||||
|
writeHexUIntLowercase(value, res.data());
|
||||||
|
return res;
|
||||||
|
}
|
||||||
|
|
||||||
|
constexpr char hexDigitUppercase(unsigned char c)
|
||||||
|
{
|
||||||
|
return impl::hex_digit_to_char_uppercase_table[c];
|
||||||
|
}
|
||||||
|
|
||||||
|
constexpr char hexDigitLowercase(unsigned char c)
|
||||||
|
{
|
||||||
|
return impl::hex_digit_to_char_lowercase_table[c];
|
||||||
|
}
|
||||||
|
|
||||||
|
inline void writeHexByteUppercase(UInt8 byte, void * out)
|
||||||
|
{
|
||||||
|
memcpy(out, &impl::hex_byte_to_char_uppercase_table[static_cast<size_t>(byte) * 2], 2);
|
||||||
|
}
|
||||||
|
|
||||||
|
inline void writeHexByteLowercase(UInt8 byte, void * out)
|
||||||
|
{
|
||||||
|
memcpy(out, &impl::hex_byte_to_char_lowercase_table[static_cast<size_t>(byte) * 2], 2);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Converts a hex representation with leading zeros back to an integer value.
|
||||||
|
/// The function supports native integer types, wide::integer, CityHash_v1_0_2::uint128.
|
||||||
|
template <typename T>
|
||||||
|
constexpr T unhexUInt(const char * data)
|
||||||
|
{
|
||||||
|
return impl::HexConversion<T>::unhex(data);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Converts a hexadecimal digit '0'..'f' or '0'..'F' to UInt8.
|
||||||
|
constexpr UInt8 unhex(char c)
|
||||||
|
{
|
||||||
|
return impl::unhexDigit(c);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Converts two hexadecimal digits to UInt8.
|
||||||
|
constexpr UInt8 unhex2(const char * data)
|
||||||
|
{
|
||||||
|
return unhexUInt<UInt8>(data);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Converts four hexadecimal digits to UInt16.
|
||||||
|
constexpr UInt16 unhex4(const char * data)
|
||||||
|
{
|
||||||
|
return unhexUInt<UInt16>(data);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Produces a binary representation of a single byte.
|
||||||
|
inline void writeBinByte(UInt8 byte, void * out)
|
||||||
|
{
|
||||||
|
memcpy(out, &impl::bin_byte_to_char_table[static_cast<size_t>(byte) * 8], 8);
|
||||||
}
|
}
|
||||||
|
@ -27,6 +27,8 @@ using FromDoubleIntermediateType = long double;
|
|||||||
using FromDoubleIntermediateType = boost::multiprecision::cpp_bin_float_double_extended;
|
using FromDoubleIntermediateType = boost::multiprecision::cpp_bin_float_double_extended;
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
namespace CityHash_v1_0_2 { struct uint128; }
|
||||||
|
|
||||||
namespace wide
|
namespace wide
|
||||||
{
|
{
|
||||||
|
|
||||||
@ -281,6 +283,17 @@ struct integer<Bits, Signed>::_impl
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
template <typename CityHashUInt128 = CityHash_v1_0_2::uint128>
|
||||||
|
constexpr static void wide_integer_from_cityhash_uint128(integer<Bits, Signed> & self, const CityHashUInt128 & value) noexcept
|
||||||
|
{
|
||||||
|
static_assert(sizeof(item_count) >= 2);
|
||||||
|
|
||||||
|
if constexpr (std::endian::native == std::endian::little)
|
||||||
|
wide_integer_from_tuple_like(self, std::make_pair(value.low64, value.high64));
|
||||||
|
else
|
||||||
|
wide_integer_from_tuple_like(self, std::make_pair(value.high64, value.low64));
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* N.B. t is constructed from double, so max(t) = max(double) ~ 2^310
|
* N.B. t is constructed from double, so max(t) = max(double) ~ 2^310
|
||||||
* the recursive call happens when t / 2^64 > 2^64, so there won't be more than 5 of them.
|
* the recursive call happens when t / 2^64 > 2^64, so there won't be more than 5 of them.
|
||||||
@ -1036,6 +1049,8 @@ constexpr integer<Bits, Signed>::integer(T rhs) noexcept
|
|||||||
_impl::wide_integer_from_wide_integer(*this, rhs);
|
_impl::wide_integer_from_wide_integer(*this, rhs);
|
||||||
else if constexpr (IsTupleLike<T>::value)
|
else if constexpr (IsTupleLike<T>::value)
|
||||||
_impl::wide_integer_from_tuple_like(*this, rhs);
|
_impl::wide_integer_from_tuple_like(*this, rhs);
|
||||||
|
else if constexpr (std::is_same_v<std::remove_cvref_t<T>, CityHash_v1_0_2::uint128>)
|
||||||
|
_impl::wide_integer_from_cityhash_uint128(*this, rhs);
|
||||||
else
|
else
|
||||||
_impl::wide_integer_from_builtin(*this, rhs);
|
_impl::wide_integer_from_builtin(*this, rhs);
|
||||||
}
|
}
|
||||||
@ -1051,6 +1066,8 @@ constexpr integer<Bits, Signed>::integer(std::initializer_list<T> il) noexcept
|
|||||||
_impl::wide_integer_from_wide_integer(*this, *il.begin());
|
_impl::wide_integer_from_wide_integer(*this, *il.begin());
|
||||||
else if constexpr (IsTupleLike<T>::value)
|
else if constexpr (IsTupleLike<T>::value)
|
||||||
_impl::wide_integer_from_tuple_like(*this, *il.begin());
|
_impl::wide_integer_from_tuple_like(*this, *il.begin());
|
||||||
|
else if constexpr (std::is_same_v<std::remove_cvref_t<T>, CityHash_v1_0_2::uint128>)
|
||||||
|
_impl::wide_integer_from_cityhash_uint128(*this, *il.begin());
|
||||||
else
|
else
|
||||||
_impl::wide_integer_from_builtin(*this, *il.begin());
|
_impl::wide_integer_from_builtin(*this, *il.begin());
|
||||||
}
|
}
|
||||||
@ -1088,6 +1105,8 @@ constexpr integer<Bits, Signed> & integer<Bits, Signed>::operator=(T rhs) noexce
|
|||||||
{
|
{
|
||||||
if constexpr (IsTupleLike<T>::value)
|
if constexpr (IsTupleLike<T>::value)
|
||||||
_impl::wide_integer_from_tuple_like(*this, rhs);
|
_impl::wide_integer_from_tuple_like(*this, rhs);
|
||||||
|
else if constexpr (std::is_same_v<std::remove_cvref_t<T>, CityHash_v1_0_2::uint128>)
|
||||||
|
_impl::wide_integer_from_cityhash_uint128(*this, rhs);
|
||||||
else
|
else
|
||||||
_impl::wide_integer_from_builtin(*this, rhs);
|
_impl::wide_integer_from_builtin(*this, rhs);
|
||||||
return *this;
|
return *this;
|
||||||
|
@ -87,7 +87,6 @@ set (SRCS
|
|||||||
src/LoggingRegistry.cpp
|
src/LoggingRegistry.cpp
|
||||||
src/LogStream.cpp
|
src/LogStream.cpp
|
||||||
src/MD5Engine.cpp
|
src/MD5Engine.cpp
|
||||||
src/MemoryPool.cpp
|
|
||||||
src/MemoryStream.cpp
|
src/MemoryStream.cpp
|
||||||
src/Message.cpp
|
src/Message.cpp
|
||||||
src/Mutex.cpp
|
src/Mutex.cpp
|
||||||
|
@ -1,116 +0,0 @@
|
|||||||
//
|
|
||||||
// MemoryPool.h
|
|
||||||
//
|
|
||||||
// Library: Foundation
|
|
||||||
// Package: Core
|
|
||||||
// Module: MemoryPool
|
|
||||||
//
|
|
||||||
// Definition of the MemoryPool class.
|
|
||||||
//
|
|
||||||
// Copyright (c) 2005-2006, Applied Informatics Software Engineering GmbH.
|
|
||||||
// and Contributors.
|
|
||||||
//
|
|
||||||
// SPDX-License-Identifier: BSL-1.0
|
|
||||||
//
|
|
||||||
|
|
||||||
|
|
||||||
#ifndef Foundation_MemoryPool_INCLUDED
|
|
||||||
#define Foundation_MemoryPool_INCLUDED
|
|
||||||
|
|
||||||
|
|
||||||
#include <cstddef>
|
|
||||||
#include <vector>
|
|
||||||
#include "Poco/Foundation.h"
|
|
||||||
#include "Poco/Mutex.h"
|
|
||||||
|
|
||||||
|
|
||||||
namespace Poco
|
|
||||||
{
|
|
||||||
|
|
||||||
|
|
||||||
class Foundation_API MemoryPool
|
|
||||||
/// A simple pool for fixed-size memory blocks.
|
|
||||||
///
|
|
||||||
/// The main purpose of this class is to speed-up
|
|
||||||
/// memory allocations, as well as to reduce memory
|
|
||||||
/// fragmentation in situations where the same blocks
|
|
||||||
/// are allocated all over again, such as in server
|
|
||||||
/// applications.
|
|
||||||
///
|
|
||||||
/// All allocated blocks are retained for future use.
|
|
||||||
/// A limit on the number of blocks can be specified.
|
|
||||||
/// Blocks can be preallocated.
|
|
||||||
{
|
|
||||||
public:
|
|
||||||
MemoryPool(std::size_t blockSize, int preAlloc = 0, int maxAlloc = 0);
|
|
||||||
/// Creates a MemoryPool for blocks with the given blockSize.
|
|
||||||
/// The number of blocks given in preAlloc are preallocated.
|
|
||||||
|
|
||||||
~MemoryPool();
|
|
||||||
|
|
||||||
void * get();
|
|
||||||
/// Returns a memory block. If there are no more blocks
|
|
||||||
/// in the pool, a new block will be allocated.
|
|
||||||
///
|
|
||||||
/// If maxAlloc blocks are already allocated, an
|
|
||||||
/// OutOfMemoryException is thrown.
|
|
||||||
|
|
||||||
void release(void * ptr);
|
|
||||||
/// Releases a memory block and returns it to the pool.
|
|
||||||
|
|
||||||
std::size_t blockSize() const;
|
|
||||||
/// Returns the block size.
|
|
||||||
|
|
||||||
int allocated() const;
|
|
||||||
/// Returns the number of allocated blocks.
|
|
||||||
|
|
||||||
int available() const;
|
|
||||||
/// Returns the number of available blocks in the pool.
|
|
||||||
|
|
||||||
private:
|
|
||||||
MemoryPool();
|
|
||||||
MemoryPool(const MemoryPool &);
|
|
||||||
MemoryPool & operator=(const MemoryPool &);
|
|
||||||
|
|
||||||
void clear();
|
|
||||||
|
|
||||||
enum
|
|
||||||
{
|
|
||||||
BLOCK_RESERVE = 128
|
|
||||||
};
|
|
||||||
|
|
||||||
typedef std::vector<char *> BlockVec;
|
|
||||||
|
|
||||||
std::size_t _blockSize;
|
|
||||||
int _maxAlloc;
|
|
||||||
int _allocated;
|
|
||||||
BlockVec _blocks;
|
|
||||||
FastMutex _mutex;
|
|
||||||
};
|
|
||||||
|
|
||||||
|
|
||||||
//
|
|
||||||
// inlines
|
|
||||||
//
|
|
||||||
inline std::size_t MemoryPool::blockSize() const
|
|
||||||
{
|
|
||||||
return _blockSize;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
inline int MemoryPool::allocated() const
|
|
||||||
{
|
|
||||||
return _allocated;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
inline int MemoryPool::available() const
|
|
||||||
{
|
|
||||||
return (int)_blocks.size();
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
} // namespace Poco
|
|
||||||
|
|
||||||
|
|
||||||
#endif // Foundation_MemoryPool_INCLUDED
|
|
@ -1,105 +0,0 @@
|
|||||||
//
|
|
||||||
// MemoryPool.cpp
|
|
||||||
//
|
|
||||||
// Library: Foundation
|
|
||||||
// Package: Core
|
|
||||||
// Module: MemoryPool
|
|
||||||
//
|
|
||||||
// Copyright (c) 2005-2006, Applied Informatics Software Engineering GmbH.
|
|
||||||
// and Contributors.
|
|
||||||
//
|
|
||||||
// SPDX-License-Identifier: BSL-1.0
|
|
||||||
//
|
|
||||||
|
|
||||||
|
|
||||||
#include "Poco/MemoryPool.h"
|
|
||||||
#include "Poco/Exception.h"
|
|
||||||
|
|
||||||
|
|
||||||
namespace Poco {
|
|
||||||
|
|
||||||
|
|
||||||
MemoryPool::MemoryPool(std::size_t blockSize, int preAlloc, int maxAlloc):
|
|
||||||
_blockSize(blockSize),
|
|
||||||
_maxAlloc(maxAlloc),
|
|
||||||
_allocated(preAlloc)
|
|
||||||
{
|
|
||||||
poco_assert (maxAlloc == 0 || maxAlloc >= preAlloc);
|
|
||||||
poco_assert (preAlloc >= 0 && maxAlloc >= 0);
|
|
||||||
|
|
||||||
int r = BLOCK_RESERVE;
|
|
||||||
if (preAlloc > r)
|
|
||||||
r = preAlloc;
|
|
||||||
if (maxAlloc > 0 && maxAlloc < r)
|
|
||||||
r = maxAlloc;
|
|
||||||
_blocks.reserve(r);
|
|
||||||
|
|
||||||
try
|
|
||||||
{
|
|
||||||
for (int i = 0; i < preAlloc; ++i)
|
|
||||||
{
|
|
||||||
_blocks.push_back(new char[_blockSize]);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
catch (...)
|
|
||||||
{
|
|
||||||
clear();
|
|
||||||
throw;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
MemoryPool::~MemoryPool()
|
|
||||||
{
|
|
||||||
clear();
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
void MemoryPool::clear()
|
|
||||||
{
|
|
||||||
for (BlockVec::iterator it = _blocks.begin(); it != _blocks.end(); ++it)
|
|
||||||
{
|
|
||||||
delete [] *it;
|
|
||||||
}
|
|
||||||
_blocks.clear();
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
void* MemoryPool::get()
|
|
||||||
{
|
|
||||||
FastMutex::ScopedLock lock(_mutex);
|
|
||||||
|
|
||||||
if (_blocks.empty())
|
|
||||||
{
|
|
||||||
if (_maxAlloc == 0 || _allocated < _maxAlloc)
|
|
||||||
{
|
|
||||||
++_allocated;
|
|
||||||
return new char[_blockSize];
|
|
||||||
}
|
|
||||||
else throw OutOfMemoryException("MemoryPool exhausted");
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
char* ptr = _blocks.back();
|
|
||||||
_blocks.pop_back();
|
|
||||||
return ptr;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
void MemoryPool::release(void* ptr)
|
|
||||||
{
|
|
||||||
FastMutex::ScopedLock lock(_mutex);
|
|
||||||
|
|
||||||
try
|
|
||||||
{
|
|
||||||
_blocks.push_back(reinterpret_cast<char*>(ptr));
|
|
||||||
}
|
|
||||||
catch (...)
|
|
||||||
{
|
|
||||||
delete [] reinterpret_cast<char*>(ptr);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
} // namespace Poco
|
|
@ -19,7 +19,6 @@
|
|||||||
|
|
||||||
|
|
||||||
#include "Poco/BufferedStreamBuf.h"
|
#include "Poco/BufferedStreamBuf.h"
|
||||||
#include "Poco/Net/HTTPBufferAllocator.h"
|
|
||||||
#include "Poco/Net/Net.h"
|
#include "Poco/Net/Net.h"
|
||||||
|
|
||||||
|
|
||||||
@ -27,9 +26,9 @@ namespace Poco
|
|||||||
{
|
{
|
||||||
namespace Net
|
namespace Net
|
||||||
{
|
{
|
||||||
|
constexpr size_t HTTP_DEFAULT_BUFFER_SIZE = 8 * 1024;
|
||||||
|
|
||||||
|
typedef Poco::BasicBufferedStreamBuf<char, std::char_traits<char>> HTTPBasicStreamBuf;
|
||||||
typedef Poco::BasicBufferedStreamBuf<char, std::char_traits<char>, HTTPBufferAllocator> HTTPBasicStreamBuf;
|
|
||||||
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -1,53 +0,0 @@
|
|||||||
//
|
|
||||||
// HTTPBufferAllocator.h
|
|
||||||
//
|
|
||||||
// Library: Net
|
|
||||||
// Package: HTTP
|
|
||||||
// Module: HTTPBufferAllocator
|
|
||||||
//
|
|
||||||
// Definition of the HTTPBufferAllocator class.
|
|
||||||
//
|
|
||||||
// Copyright (c) 2005-2006, Applied Informatics Software Engineering GmbH.
|
|
||||||
// and Contributors.
|
|
||||||
//
|
|
||||||
// SPDX-License-Identifier: BSL-1.0
|
|
||||||
//
|
|
||||||
|
|
||||||
|
|
||||||
#ifndef Net_HTTPBufferAllocator_INCLUDED
|
|
||||||
#define Net_HTTPBufferAllocator_INCLUDED
|
|
||||||
|
|
||||||
|
|
||||||
#include <ios>
|
|
||||||
#include "Poco/MemoryPool.h"
|
|
||||||
#include "Poco/Net/Net.h"
|
|
||||||
|
|
||||||
|
|
||||||
namespace Poco
|
|
||||||
{
|
|
||||||
namespace Net
|
|
||||||
{
|
|
||||||
|
|
||||||
|
|
||||||
class Net_API HTTPBufferAllocator
|
|
||||||
/// A BufferAllocator for HTTP streams.
|
|
||||||
{
|
|
||||||
public:
|
|
||||||
static char * allocate(std::streamsize size);
|
|
||||||
static void deallocate(char * ptr, std::streamsize size);
|
|
||||||
|
|
||||||
enum
|
|
||||||
{
|
|
||||||
BUFFER_SIZE = 128 * 1024
|
|
||||||
};
|
|
||||||
|
|
||||||
private:
|
|
||||||
static Poco::MemoryPool _pool;
|
|
||||||
};
|
|
||||||
|
|
||||||
|
|
||||||
}
|
|
||||||
} // namespace Poco::Net
|
|
||||||
|
|
||||||
|
|
||||||
#endif // Net_HTTPBufferAllocator_INCLUDED
|
|
@ -21,7 +21,6 @@
|
|||||||
#include <cstddef>
|
#include <cstddef>
|
||||||
#include <istream>
|
#include <istream>
|
||||||
#include <ostream>
|
#include <ostream>
|
||||||
#include "Poco/MemoryPool.h"
|
|
||||||
#include "Poco/Net/HTTPBasicStreamBuf.h"
|
#include "Poco/Net/HTTPBasicStreamBuf.h"
|
||||||
#include "Poco/Net/Net.h"
|
#include "Poco/Net/Net.h"
|
||||||
|
|
||||||
@ -80,12 +79,6 @@ namespace Net
|
|||||||
public:
|
public:
|
||||||
HTTPChunkedInputStream(HTTPSession & session);
|
HTTPChunkedInputStream(HTTPSession & session);
|
||||||
~HTTPChunkedInputStream();
|
~HTTPChunkedInputStream();
|
||||||
|
|
||||||
void * operator new(std::size_t size);
|
|
||||||
void operator delete(void * ptr);
|
|
||||||
|
|
||||||
private:
|
|
||||||
static Poco::MemoryPool _pool;
|
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
@ -95,12 +88,6 @@ namespace Net
|
|||||||
public:
|
public:
|
||||||
HTTPChunkedOutputStream(HTTPSession & session);
|
HTTPChunkedOutputStream(HTTPSession & session);
|
||||||
~HTTPChunkedOutputStream();
|
~HTTPChunkedOutputStream();
|
||||||
|
|
||||||
void * operator new(std::size_t size);
|
|
||||||
void operator delete(void * ptr);
|
|
||||||
|
|
||||||
private:
|
|
||||||
static Poco::MemoryPool _pool;
|
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
|
@ -78,12 +78,6 @@ namespace Net
|
|||||||
public:
|
public:
|
||||||
HTTPFixedLengthInputStream(HTTPSession & session, HTTPFixedLengthStreamBuf::ContentLength length);
|
HTTPFixedLengthInputStream(HTTPSession & session, HTTPFixedLengthStreamBuf::ContentLength length);
|
||||||
~HTTPFixedLengthInputStream();
|
~HTTPFixedLengthInputStream();
|
||||||
|
|
||||||
void * operator new(std::size_t size);
|
|
||||||
void operator delete(void * ptr);
|
|
||||||
|
|
||||||
private:
|
|
||||||
static Poco::MemoryPool _pool;
|
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
@ -93,12 +87,6 @@ namespace Net
|
|||||||
public:
|
public:
|
||||||
HTTPFixedLengthOutputStream(HTTPSession & session, HTTPFixedLengthStreamBuf::ContentLength length);
|
HTTPFixedLengthOutputStream(HTTPSession & session, HTTPFixedLengthStreamBuf::ContentLength length);
|
||||||
~HTTPFixedLengthOutputStream();
|
~HTTPFixedLengthOutputStream();
|
||||||
|
|
||||||
void * operator new(std::size_t size);
|
|
||||||
void operator delete(void * ptr);
|
|
||||||
|
|
||||||
private:
|
|
||||||
static Poco::MemoryPool _pool;
|
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
|
@ -21,7 +21,6 @@
|
|||||||
#include <cstddef>
|
#include <cstddef>
|
||||||
#include <istream>
|
#include <istream>
|
||||||
#include <ostream>
|
#include <ostream>
|
||||||
#include "Poco/MemoryPool.h"
|
|
||||||
#include "Poco/Net/HTTPBasicStreamBuf.h"
|
#include "Poco/Net/HTTPBasicStreamBuf.h"
|
||||||
#include "Poco/Net/Net.h"
|
#include "Poco/Net/Net.h"
|
||||||
|
|
||||||
@ -74,12 +73,6 @@ namespace Net
|
|||||||
public:
|
public:
|
||||||
HTTPHeaderInputStream(HTTPSession & session);
|
HTTPHeaderInputStream(HTTPSession & session);
|
||||||
~HTTPHeaderInputStream();
|
~HTTPHeaderInputStream();
|
||||||
|
|
||||||
void * operator new(std::size_t size);
|
|
||||||
void operator delete(void * ptr);
|
|
||||||
|
|
||||||
private:
|
|
||||||
static Poco::MemoryPool _pool;
|
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
@ -89,12 +82,6 @@ namespace Net
|
|||||||
public:
|
public:
|
||||||
HTTPHeaderOutputStream(HTTPSession & session);
|
HTTPHeaderOutputStream(HTTPSession & session);
|
||||||
~HTTPHeaderOutputStream();
|
~HTTPHeaderOutputStream();
|
||||||
|
|
||||||
void * operator new(std::size_t size);
|
|
||||||
void operator delete(void * ptr);
|
|
||||||
|
|
||||||
private:
|
|
||||||
static Poco::MemoryPool _pool;
|
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
|
@ -192,7 +192,7 @@ namespace Net
|
|||||||
HTTPSession & operator=(const HTTPSession &);
|
HTTPSession & operator=(const HTTPSession &);
|
||||||
|
|
||||||
StreamSocket _socket;
|
StreamSocket _socket;
|
||||||
char * _pBuffer;
|
std::unique_ptr<char[]> _pBuffer;
|
||||||
char * _pCurrent;
|
char * _pCurrent;
|
||||||
char * _pEnd;
|
char * _pEnd;
|
||||||
bool _keepAlive;
|
bool _keepAlive;
|
||||||
|
@ -21,7 +21,6 @@
|
|||||||
#include <cstddef>
|
#include <cstddef>
|
||||||
#include <istream>
|
#include <istream>
|
||||||
#include <ostream>
|
#include <ostream>
|
||||||
#include "Poco/MemoryPool.h"
|
|
||||||
#include "Poco/Net/HTTPBasicStreamBuf.h"
|
#include "Poco/Net/HTTPBasicStreamBuf.h"
|
||||||
#include "Poco/Net/Net.h"
|
#include "Poco/Net/Net.h"
|
||||||
|
|
||||||
@ -75,12 +74,6 @@ namespace Net
|
|||||||
public:
|
public:
|
||||||
HTTPInputStream(HTTPSession & session);
|
HTTPInputStream(HTTPSession & session);
|
||||||
~HTTPInputStream();
|
~HTTPInputStream();
|
||||||
|
|
||||||
void * operator new(std::size_t size);
|
|
||||||
void operator delete(void * ptr);
|
|
||||||
|
|
||||||
private:
|
|
||||||
static Poco::MemoryPool _pool;
|
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
@ -90,12 +83,6 @@ namespace Net
|
|||||||
public:
|
public:
|
||||||
HTTPOutputStream(HTTPSession & session);
|
HTTPOutputStream(HTTPSession & session);
|
||||||
~HTTPOutputStream();
|
~HTTPOutputStream();
|
||||||
|
|
||||||
void * operator new(std::size_t size);
|
|
||||||
void operator delete(void * ptr);
|
|
||||||
|
|
||||||
private:
|
|
||||||
static Poco::MemoryPool _pool;
|
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
|
@ -1,44 +0,0 @@
|
|||||||
//
|
|
||||||
// HTTPBufferAllocator.cpp
|
|
||||||
//
|
|
||||||
// Library: Net
|
|
||||||
// Package: HTTP
|
|
||||||
// Module: HTTPBufferAllocator
|
|
||||||
//
|
|
||||||
// Copyright (c) 2005-2006, Applied Informatics Software Engineering GmbH.
|
|
||||||
// and Contributors.
|
|
||||||
//
|
|
||||||
// SPDX-License-Identifier: BSL-1.0
|
|
||||||
//
|
|
||||||
|
|
||||||
|
|
||||||
#include "Poco/Net/HTTPBufferAllocator.h"
|
|
||||||
|
|
||||||
|
|
||||||
using Poco::MemoryPool;
|
|
||||||
|
|
||||||
|
|
||||||
namespace Poco {
|
|
||||||
namespace Net {
|
|
||||||
|
|
||||||
|
|
||||||
MemoryPool HTTPBufferAllocator::_pool(HTTPBufferAllocator::BUFFER_SIZE, 16);
|
|
||||||
|
|
||||||
|
|
||||||
char* HTTPBufferAllocator::allocate(std::streamsize size)
|
|
||||||
{
|
|
||||||
poco_assert_dbg (size == BUFFER_SIZE);
|
|
||||||
|
|
||||||
return reinterpret_cast<char*>(_pool.get());
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
void HTTPBufferAllocator::deallocate(char* ptr, std::streamsize size)
|
|
||||||
{
|
|
||||||
poco_assert_dbg (size == BUFFER_SIZE);
|
|
||||||
|
|
||||||
_pool.release(ptr);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
} } // namespace Poco::Net
|
|
@ -34,7 +34,7 @@ namespace Net {
|
|||||||
|
|
||||||
|
|
||||||
HTTPChunkedStreamBuf::HTTPChunkedStreamBuf(HTTPSession& session, openmode mode):
|
HTTPChunkedStreamBuf::HTTPChunkedStreamBuf(HTTPSession& session, openmode mode):
|
||||||
HTTPBasicStreamBuf(HTTPBufferAllocator::BUFFER_SIZE, mode),
|
HTTPBasicStreamBuf(HTTP_DEFAULT_BUFFER_SIZE, mode),
|
||||||
_session(session),
|
_session(session),
|
||||||
_mode(mode),
|
_mode(mode),
|
||||||
_chunk(0)
|
_chunk(0)
|
||||||
@ -181,10 +181,6 @@ HTTPChunkedStreamBuf* HTTPChunkedIOS::rdbuf()
|
|||||||
// HTTPChunkedInputStream
|
// HTTPChunkedInputStream
|
||||||
//
|
//
|
||||||
|
|
||||||
|
|
||||||
Poco::MemoryPool HTTPChunkedInputStream::_pool(sizeof(HTTPChunkedInputStream));
|
|
||||||
|
|
||||||
|
|
||||||
HTTPChunkedInputStream::HTTPChunkedInputStream(HTTPSession& session):
|
HTTPChunkedInputStream::HTTPChunkedInputStream(HTTPSession& session):
|
||||||
HTTPChunkedIOS(session, std::ios::in),
|
HTTPChunkedIOS(session, std::ios::in),
|
||||||
std::istream(&_buf)
|
std::istream(&_buf)
|
||||||
@ -196,34 +192,10 @@ HTTPChunkedInputStream::~HTTPChunkedInputStream()
|
|||||||
{
|
{
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
void* HTTPChunkedInputStream::operator new(std::size_t size)
|
|
||||||
{
|
|
||||||
return _pool.get();
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
void HTTPChunkedInputStream::operator delete(void* ptr)
|
|
||||||
{
|
|
||||||
try
|
|
||||||
{
|
|
||||||
_pool.release(ptr);
|
|
||||||
}
|
|
||||||
catch (...)
|
|
||||||
{
|
|
||||||
poco_unexpected();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
//
|
//
|
||||||
// HTTPChunkedOutputStream
|
// HTTPChunkedOutputStream
|
||||||
//
|
//
|
||||||
|
|
||||||
|
|
||||||
Poco::MemoryPool HTTPChunkedOutputStream::_pool(sizeof(HTTPChunkedOutputStream));
|
|
||||||
|
|
||||||
|
|
||||||
HTTPChunkedOutputStream::HTTPChunkedOutputStream(HTTPSession& session):
|
HTTPChunkedOutputStream::HTTPChunkedOutputStream(HTTPSession& session):
|
||||||
HTTPChunkedIOS(session, std::ios::out),
|
HTTPChunkedIOS(session, std::ios::out),
|
||||||
std::ostream(&_buf)
|
std::ostream(&_buf)
|
||||||
@ -235,24 +207,4 @@ HTTPChunkedOutputStream::~HTTPChunkedOutputStream()
|
|||||||
{
|
{
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
void* HTTPChunkedOutputStream::operator new(std::size_t size)
|
|
||||||
{
|
|
||||||
return _pool.get();
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
void HTTPChunkedOutputStream::operator delete(void* ptr)
|
|
||||||
{
|
|
||||||
try
|
|
||||||
{
|
|
||||||
_pool.release(ptr);
|
|
||||||
}
|
|
||||||
catch (...)
|
|
||||||
{
|
|
||||||
poco_unexpected();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
} } // namespace Poco::Net
|
} } // namespace Poco::Net
|
||||||
|
@ -30,7 +30,7 @@ namespace Net {
|
|||||||
|
|
||||||
|
|
||||||
HTTPFixedLengthStreamBuf::HTTPFixedLengthStreamBuf(HTTPSession& session, ContentLength length, openmode mode):
|
HTTPFixedLengthStreamBuf::HTTPFixedLengthStreamBuf(HTTPSession& session, ContentLength length, openmode mode):
|
||||||
HTTPBasicStreamBuf(HTTPBufferAllocator::BUFFER_SIZE, mode),
|
HTTPBasicStreamBuf(HTTP_DEFAULT_BUFFER_SIZE, mode),
|
||||||
_session(session),
|
_session(session),
|
||||||
_length(length),
|
_length(length),
|
||||||
_count(0)
|
_count(0)
|
||||||
@ -109,9 +109,6 @@ HTTPFixedLengthStreamBuf* HTTPFixedLengthIOS::rdbuf()
|
|||||||
//
|
//
|
||||||
|
|
||||||
|
|
||||||
Poco::MemoryPool HTTPFixedLengthInputStream::_pool(sizeof(HTTPFixedLengthInputStream));
|
|
||||||
|
|
||||||
|
|
||||||
HTTPFixedLengthInputStream::HTTPFixedLengthInputStream(HTTPSession& session, HTTPFixedLengthStreamBuf::ContentLength length):
|
HTTPFixedLengthInputStream::HTTPFixedLengthInputStream(HTTPSession& session, HTTPFixedLengthStreamBuf::ContentLength length):
|
||||||
HTTPFixedLengthIOS(session, length, std::ios::in),
|
HTTPFixedLengthIOS(session, length, std::ios::in),
|
||||||
std::istream(&_buf)
|
std::istream(&_buf)
|
||||||
@ -124,33 +121,10 @@ HTTPFixedLengthInputStream::~HTTPFixedLengthInputStream()
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
void* HTTPFixedLengthInputStream::operator new(std::size_t size)
|
|
||||||
{
|
|
||||||
return _pool.get();
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
void HTTPFixedLengthInputStream::operator delete(void* ptr)
|
|
||||||
{
|
|
||||||
try
|
|
||||||
{
|
|
||||||
_pool.release(ptr);
|
|
||||||
}
|
|
||||||
catch (...)
|
|
||||||
{
|
|
||||||
poco_unexpected();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
//
|
//
|
||||||
// HTTPFixedLengthOutputStream
|
// HTTPFixedLengthOutputStream
|
||||||
//
|
//
|
||||||
|
|
||||||
|
|
||||||
Poco::MemoryPool HTTPFixedLengthOutputStream::_pool(sizeof(HTTPFixedLengthOutputStream));
|
|
||||||
|
|
||||||
|
|
||||||
HTTPFixedLengthOutputStream::HTTPFixedLengthOutputStream(HTTPSession& session, HTTPFixedLengthStreamBuf::ContentLength length):
|
HTTPFixedLengthOutputStream::HTTPFixedLengthOutputStream(HTTPSession& session, HTTPFixedLengthStreamBuf::ContentLength length):
|
||||||
HTTPFixedLengthIOS(session, length, std::ios::out),
|
HTTPFixedLengthIOS(session, length, std::ios::out),
|
||||||
std::ostream(&_buf)
|
std::ostream(&_buf)
|
||||||
@ -163,23 +137,4 @@ HTTPFixedLengthOutputStream::~HTTPFixedLengthOutputStream()
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
void* HTTPFixedLengthOutputStream::operator new(std::size_t size)
|
|
||||||
{
|
|
||||||
return _pool.get();
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
void HTTPFixedLengthOutputStream::operator delete(void* ptr)
|
|
||||||
{
|
|
||||||
try
|
|
||||||
{
|
|
||||||
_pool.release(ptr);
|
|
||||||
}
|
|
||||||
catch (...)
|
|
||||||
{
|
|
||||||
poco_unexpected();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
} } // namespace Poco::Net
|
} } // namespace Poco::Net
|
||||||
|
@ -26,7 +26,7 @@ namespace Net {
|
|||||||
|
|
||||||
|
|
||||||
HTTPHeaderStreamBuf::HTTPHeaderStreamBuf(HTTPSession& session, openmode mode):
|
HTTPHeaderStreamBuf::HTTPHeaderStreamBuf(HTTPSession& session, openmode mode):
|
||||||
HTTPBasicStreamBuf(HTTPBufferAllocator::BUFFER_SIZE, mode),
|
HTTPBasicStreamBuf(HTTP_DEFAULT_BUFFER_SIZE, mode),
|
||||||
_session(session),
|
_session(session),
|
||||||
_end(false)
|
_end(false)
|
||||||
{
|
{
|
||||||
@ -101,10 +101,6 @@ HTTPHeaderStreamBuf* HTTPHeaderIOS::rdbuf()
|
|||||||
// HTTPHeaderInputStream
|
// HTTPHeaderInputStream
|
||||||
//
|
//
|
||||||
|
|
||||||
|
|
||||||
Poco::MemoryPool HTTPHeaderInputStream::_pool(sizeof(HTTPHeaderInputStream));
|
|
||||||
|
|
||||||
|
|
||||||
HTTPHeaderInputStream::HTTPHeaderInputStream(HTTPSession& session):
|
HTTPHeaderInputStream::HTTPHeaderInputStream(HTTPSession& session):
|
||||||
HTTPHeaderIOS(session, std::ios::in),
|
HTTPHeaderIOS(session, std::ios::in),
|
||||||
std::istream(&_buf)
|
std::istream(&_buf)
|
||||||
@ -116,34 +112,10 @@ HTTPHeaderInputStream::~HTTPHeaderInputStream()
|
|||||||
{
|
{
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
void* HTTPHeaderInputStream::operator new(std::size_t size)
|
|
||||||
{
|
|
||||||
return _pool.get();
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
void HTTPHeaderInputStream::operator delete(void* ptr)
|
|
||||||
{
|
|
||||||
try
|
|
||||||
{
|
|
||||||
_pool.release(ptr);
|
|
||||||
}
|
|
||||||
catch (...)
|
|
||||||
{
|
|
||||||
poco_unexpected();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
//
|
//
|
||||||
// HTTPHeaderOutputStream
|
// HTTPHeaderOutputStream
|
||||||
//
|
//
|
||||||
|
|
||||||
|
|
||||||
Poco::MemoryPool HTTPHeaderOutputStream::_pool(sizeof(HTTPHeaderOutputStream));
|
|
||||||
|
|
||||||
|
|
||||||
HTTPHeaderOutputStream::HTTPHeaderOutputStream(HTTPSession& session):
|
HTTPHeaderOutputStream::HTTPHeaderOutputStream(HTTPSession& session):
|
||||||
HTTPHeaderIOS(session, std::ios::out),
|
HTTPHeaderIOS(session, std::ios::out),
|
||||||
std::ostream(&_buf)
|
std::ostream(&_buf)
|
||||||
@ -155,24 +127,4 @@ HTTPHeaderOutputStream::~HTTPHeaderOutputStream()
|
|||||||
{
|
{
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
void* HTTPHeaderOutputStream::operator new(std::size_t size)
|
|
||||||
{
|
|
||||||
return _pool.get();
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
void HTTPHeaderOutputStream::operator delete(void* ptr)
|
|
||||||
{
|
|
||||||
try
|
|
||||||
{
|
|
||||||
_pool.release(ptr);
|
|
||||||
}
|
|
||||||
catch (...)
|
|
||||||
{
|
|
||||||
poco_unexpected();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
} } // namespace Poco::Net
|
} } // namespace Poco::Net
|
||||||
|
@ -13,8 +13,8 @@
|
|||||||
|
|
||||||
|
|
||||||
#include "Poco/Net/HTTPSession.h"
|
#include "Poco/Net/HTTPSession.h"
|
||||||
#include "Poco/Net/HTTPBufferAllocator.h"
|
|
||||||
#include "Poco/Net/NetException.h"
|
#include "Poco/Net/NetException.h"
|
||||||
|
#include "Poco/Net/HTTPBasicStreamBuf.h"
|
||||||
#include <cstring>
|
#include <cstring>
|
||||||
|
|
||||||
|
|
||||||
@ -68,14 +68,6 @@ HTTPSession::HTTPSession(const StreamSocket& socket, bool keepAlive):
|
|||||||
|
|
||||||
HTTPSession::~HTTPSession()
|
HTTPSession::~HTTPSession()
|
||||||
{
|
{
|
||||||
try
|
|
||||||
{
|
|
||||||
if (_pBuffer) HTTPBufferAllocator::deallocate(_pBuffer, HTTPBufferAllocator::BUFFER_SIZE);
|
|
||||||
}
|
|
||||||
catch (...)
|
|
||||||
{
|
|
||||||
poco_unexpected();
|
|
||||||
}
|
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
close();
|
close();
|
||||||
@ -177,10 +169,10 @@ void HTTPSession::refill()
|
|||||||
{
|
{
|
||||||
if (!_pBuffer)
|
if (!_pBuffer)
|
||||||
{
|
{
|
||||||
_pBuffer = HTTPBufferAllocator::allocate(HTTPBufferAllocator::BUFFER_SIZE);
|
_pBuffer = std::make_unique<char[]>(HTTP_DEFAULT_BUFFER_SIZE);
|
||||||
}
|
}
|
||||||
_pCurrent = _pEnd = _pBuffer;
|
_pCurrent = _pEnd = _pBuffer.get();
|
||||||
int n = receive(_pBuffer, HTTPBufferAllocator::BUFFER_SIZE);
|
int n = receive(_pBuffer.get(), HTTP_DEFAULT_BUFFER_SIZE);
|
||||||
_pEnd += n;
|
_pEnd += n;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -199,7 +191,7 @@ void HTTPSession::connect(const SocketAddress& address)
|
|||||||
_socket.setNoDelay(true);
|
_socket.setNoDelay(true);
|
||||||
// There may be leftover data from a previous (failed) request in the buffer,
|
// There may be leftover data from a previous (failed) request in the buffer,
|
||||||
// so we clear it.
|
// so we clear it.
|
||||||
_pCurrent = _pEnd = _pBuffer;
|
_pCurrent = _pEnd = _pBuffer.get();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -26,7 +26,7 @@ namespace Net {
|
|||||||
|
|
||||||
|
|
||||||
HTTPStreamBuf::HTTPStreamBuf(HTTPSession& session, openmode mode):
|
HTTPStreamBuf::HTTPStreamBuf(HTTPSession& session, openmode mode):
|
||||||
HTTPBasicStreamBuf(HTTPBufferAllocator::BUFFER_SIZE, mode),
|
HTTPBasicStreamBuf(HTTP_DEFAULT_BUFFER_SIZE, mode),
|
||||||
_session(session),
|
_session(session),
|
||||||
_mode(mode)
|
_mode(mode)
|
||||||
{
|
{
|
||||||
@ -96,10 +96,6 @@ HTTPStreamBuf* HTTPIOS::rdbuf()
|
|||||||
// HTTPInputStream
|
// HTTPInputStream
|
||||||
//
|
//
|
||||||
|
|
||||||
|
|
||||||
Poco::MemoryPool HTTPInputStream::_pool(sizeof(HTTPInputStream));
|
|
||||||
|
|
||||||
|
|
||||||
HTTPInputStream::HTTPInputStream(HTTPSession& session):
|
HTTPInputStream::HTTPInputStream(HTTPSession& session):
|
||||||
HTTPIOS(session, std::ios::in),
|
HTTPIOS(session, std::ios::in),
|
||||||
std::istream(&_buf)
|
std::istream(&_buf)
|
||||||
@ -112,33 +108,11 @@ HTTPInputStream::~HTTPInputStream()
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
void* HTTPInputStream::operator new(std::size_t size)
|
|
||||||
{
|
|
||||||
return _pool.get();
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
void HTTPInputStream::operator delete(void* ptr)
|
|
||||||
{
|
|
||||||
try
|
|
||||||
{
|
|
||||||
_pool.release(ptr);
|
|
||||||
}
|
|
||||||
catch (...)
|
|
||||||
{
|
|
||||||
poco_unexpected();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
//
|
//
|
||||||
// HTTPOutputStream
|
// HTTPOutputStream
|
||||||
//
|
//
|
||||||
|
|
||||||
|
|
||||||
Poco::MemoryPool HTTPOutputStream::_pool(sizeof(HTTPOutputStream));
|
|
||||||
|
|
||||||
|
|
||||||
HTTPOutputStream::HTTPOutputStream(HTTPSession& session):
|
HTTPOutputStream::HTTPOutputStream(HTTPSession& session):
|
||||||
HTTPIOS(session, std::ios::out),
|
HTTPIOS(session, std::ios::out),
|
||||||
std::ostream(&_buf)
|
std::ostream(&_buf)
|
||||||
@ -150,24 +124,4 @@ HTTPOutputStream::~HTTPOutputStream()
|
|||||||
{
|
{
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
void* HTTPOutputStream::operator new(std::size_t size)
|
|
||||||
{
|
|
||||||
return _pool.get();
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
void HTTPOutputStream::operator delete(void* ptr)
|
|
||||||
{
|
|
||||||
try
|
|
||||||
{
|
|
||||||
_pool.release(ptr);
|
|
||||||
}
|
|
||||||
catch (...)
|
|
||||||
{
|
|
||||||
poco_unexpected();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
} } // namespace Poco::Net
|
} } // namespace Poco::Net
|
||||||
|
@ -1,53 +0,0 @@
|
|||||||
//
|
|
||||||
// ConsoleCertificateHandler.h
|
|
||||||
//
|
|
||||||
// Library: NetSSL_OpenSSL
|
|
||||||
// Package: SSLCore
|
|
||||||
// Module: ConsoleCertificateHandler
|
|
||||||
//
|
|
||||||
// Definition of the ConsoleCertificateHandler class.
|
|
||||||
//
|
|
||||||
// Copyright (c) 2006-2009, Applied Informatics Software Engineering GmbH.
|
|
||||||
// and Contributors.
|
|
||||||
//
|
|
||||||
// SPDX-License-Identifier: BSL-1.0
|
|
||||||
//
|
|
||||||
|
|
||||||
|
|
||||||
#ifndef NetSSL_ConsoleCertificateHandler_INCLUDED
|
|
||||||
#define NetSSL_ConsoleCertificateHandler_INCLUDED
|
|
||||||
|
|
||||||
|
|
||||||
#include "Poco/Net/InvalidCertificateHandler.h"
|
|
||||||
#include "Poco/Net/NetSSL.h"
|
|
||||||
|
|
||||||
|
|
||||||
namespace Poco
|
|
||||||
{
|
|
||||||
namespace Net
|
|
||||||
{
|
|
||||||
|
|
||||||
|
|
||||||
class NetSSL_API ConsoleCertificateHandler : public InvalidCertificateHandler
|
|
||||||
/// A ConsoleCertificateHandler is invoked whenever an error occurs verifying the certificate.
|
|
||||||
///
|
|
||||||
/// The certificate is printed to stdout and the user is asked via console if he wants to accept it.
|
|
||||||
{
|
|
||||||
public:
|
|
||||||
ConsoleCertificateHandler(bool handleErrorsOnServerSide);
|
|
||||||
/// Creates the ConsoleCertificateHandler.
|
|
||||||
|
|
||||||
virtual ~ConsoleCertificateHandler();
|
|
||||||
/// Destroys the ConsoleCertificateHandler.
|
|
||||||
|
|
||||||
void onInvalidCertificate(const void * pSender, VerificationErrorArgs & errorCert);
|
|
||||||
/// Prints the certificate to stdout and waits for user input on the console
|
|
||||||
/// to decide if a certificate should be accepted/rejected.
|
|
||||||
};
|
|
||||||
|
|
||||||
|
|
||||||
}
|
|
||||||
} // namespace Poco::Net
|
|
||||||
|
|
||||||
|
|
||||||
#endif // NetSSL_ConsoleCertificateHandler_INCLUDED
|
|
@ -85,7 +85,7 @@ namespace Net
|
|||||||
/// </options>
|
/// </options>
|
||||||
/// </privateKeyPassphraseHandler>
|
/// </privateKeyPassphraseHandler>
|
||||||
/// <invalidCertificateHandler>
|
/// <invalidCertificateHandler>
|
||||||
/// <name>ConsoleCertificateHandler</name>
|
/// <name>RejectCertificateHandler</name>
|
||||||
/// </invalidCertificateHandler>
|
/// </invalidCertificateHandler>
|
||||||
/// <cacheSessions>true|false</cacheSessions>
|
/// <cacheSessions>true|false</cacheSessions>
|
||||||
/// <sessionIdContext>someString</sessionIdContext> <!-- server only -->
|
/// <sessionIdContext>someString</sessionIdContext> <!-- server only -->
|
||||||
@ -186,7 +186,7 @@ namespace Net
|
|||||||
///
|
///
|
||||||
/// Valid initialization code would be:
|
/// Valid initialization code would be:
|
||||||
/// SharedPtr<PrivateKeyPassphraseHandler> pConsoleHandler = new KeyConsoleHandler;
|
/// SharedPtr<PrivateKeyPassphraseHandler> pConsoleHandler = new KeyConsoleHandler;
|
||||||
/// SharedPtr<InvalidCertificateHandler> pInvalidCertHandler = new ConsoleCertificateHandler;
|
/// SharedPtr<InvalidCertificateHandler> pInvalidCertHandler = new RejectCertificateHandler;
|
||||||
/// Context::Ptr pContext = new Context(Context::SERVER_USE, "any.pem", "any.pem", "rootcert.pem", Context::VERIFY_RELAXED, 9, false, "ALL:!ADH:!LOW:!EXP:!MD5:@STRENGTH");
|
/// Context::Ptr pContext = new Context(Context::SERVER_USE, "any.pem", "any.pem", "rootcert.pem", Context::VERIFY_RELAXED, 9, false, "ALL:!ADH:!LOW:!EXP:!MD5:@STRENGTH");
|
||||||
/// SSLManager::instance().initializeServer(pConsoleHandler, pInvalidCertHandler, pContext);
|
/// SSLManager::instance().initializeServer(pConsoleHandler, pInvalidCertHandler, pContext);
|
||||||
|
|
||||||
@ -203,7 +203,7 @@ namespace Net
|
|||||||
///
|
///
|
||||||
/// Valid initialization code would be:
|
/// Valid initialization code would be:
|
||||||
/// SharedPtr<PrivateKeyPassphraseHandler> pConsoleHandler = new KeyConsoleHandler;
|
/// SharedPtr<PrivateKeyPassphraseHandler> pConsoleHandler = new KeyConsoleHandler;
|
||||||
/// SharedPtr<InvalidCertificateHandler> pInvalidCertHandler = new ConsoleCertificateHandler;
|
/// SharedPtr<InvalidCertificateHandler> pInvalidCertHandler = new RejectCertificateHandler;
|
||||||
/// Context::Ptr pContext = new Context(Context::CLIENT_USE, "", "", "rootcert.pem", Context::VERIFY_RELAXED, 9, false, "ALL:!ADH:!LOW:!EXP:!MD5:@STRENGTH");
|
/// Context::Ptr pContext = new Context(Context::CLIENT_USE, "", "", "rootcert.pem", Context::VERIFY_RELAXED, 9, false, "ALL:!ADH:!LOW:!EXP:!MD5:@STRENGTH");
|
||||||
/// SSLManager::instance().initializeClient(pConsoleHandler, pInvalidCertHandler, pContext);
|
/// SSLManager::instance().initializeClient(pConsoleHandler, pInvalidCertHandler, pContext);
|
||||||
|
|
||||||
|
@ -13,7 +13,6 @@
|
|||||||
|
|
||||||
|
|
||||||
#include "Poco/Net/CertificateHandlerFactoryMgr.h"
|
#include "Poco/Net/CertificateHandlerFactoryMgr.h"
|
||||||
#include "Poco/Net/ConsoleCertificateHandler.h"
|
|
||||||
#include "Poco/Net/AcceptCertificateHandler.h"
|
#include "Poco/Net/AcceptCertificateHandler.h"
|
||||||
#include "Poco/Net/RejectCertificateHandler.h"
|
#include "Poco/Net/RejectCertificateHandler.h"
|
||||||
|
|
||||||
@ -24,7 +23,6 @@ namespace Net {
|
|||||||
|
|
||||||
CertificateHandlerFactoryMgr::CertificateHandlerFactoryMgr()
|
CertificateHandlerFactoryMgr::CertificateHandlerFactoryMgr()
|
||||||
{
|
{
|
||||||
setFactory("ConsoleCertificateHandler", new CertificateHandlerFactoryImpl<ConsoleCertificateHandler>());
|
|
||||||
setFactory("AcceptCertificateHandler", new CertificateHandlerFactoryImpl<AcceptCertificateHandler>());
|
setFactory("AcceptCertificateHandler", new CertificateHandlerFactoryImpl<AcceptCertificateHandler>());
|
||||||
setFactory("RejectCertificateHandler", new CertificateHandlerFactoryImpl<RejectCertificateHandler>());
|
setFactory("RejectCertificateHandler", new CertificateHandlerFactoryImpl<RejectCertificateHandler>());
|
||||||
}
|
}
|
||||||
|
@ -1,53 +0,0 @@
|
|||||||
//
|
|
||||||
// ConsoleCertificateHandler.cpp
|
|
||||||
//
|
|
||||||
// Library: NetSSL_OpenSSL
|
|
||||||
// Package: SSLCore
|
|
||||||
// Module: ConsoleCertificateHandler
|
|
||||||
//
|
|
||||||
// Copyright (c) 2006-2009, Applied Informatics Software Engineering GmbH.
|
|
||||||
// and Contributors.
|
|
||||||
//
|
|
||||||
// SPDX-License-Identifier: BSL-1.0
|
|
||||||
//
|
|
||||||
|
|
||||||
|
|
||||||
#include "Poco/Net/ConsoleCertificateHandler.h"
|
|
||||||
#include <iostream>
|
|
||||||
|
|
||||||
|
|
||||||
namespace Poco {
|
|
||||||
namespace Net {
|
|
||||||
|
|
||||||
|
|
||||||
ConsoleCertificateHandler::ConsoleCertificateHandler(bool server): InvalidCertificateHandler(server)
|
|
||||||
{
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
ConsoleCertificateHandler::~ConsoleCertificateHandler()
|
|
||||||
{
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
void ConsoleCertificateHandler::onInvalidCertificate(const void*, VerificationErrorArgs& errorCert)
|
|
||||||
{
|
|
||||||
const X509Certificate& aCert = errorCert.certificate();
|
|
||||||
std::cout << "\n";
|
|
||||||
std::cout << "WARNING: Certificate verification failed\n";
|
|
||||||
std::cout << "----------------------------------------\n";
|
|
||||||
std::cout << "Issuer Name: " << aCert.issuerName() << "\n";
|
|
||||||
std::cout << "Subject Name: " << aCert.subjectName() << "\n\n";
|
|
||||||
std::cout << "The certificate yielded the error: " << errorCert.errorMessage() << "\n\n";
|
|
||||||
std::cout << "The error occurred in the certificate chain at position " << errorCert.errorDepth() << "\n";
|
|
||||||
std::cout << "Accept the certificate (y,n)? ";
|
|
||||||
char c = 0;
|
|
||||||
std::cin >> c;
|
|
||||||
if (c == 'y' || c == 'Y')
|
|
||||||
errorCert.setIgnoreError(true);
|
|
||||||
else
|
|
||||||
errorCert.setIgnoreError(false);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
} } // namespace Poco::Net
|
|
@ -46,7 +46,7 @@ const std::string SSLManager::CFG_PREFER_SERVER_CIPHERS("preferServerCiphers");
|
|||||||
const std::string SSLManager::CFG_DELEGATE_HANDLER("privateKeyPassphraseHandler.name");
|
const std::string SSLManager::CFG_DELEGATE_HANDLER("privateKeyPassphraseHandler.name");
|
||||||
const std::string SSLManager::VAL_DELEGATE_HANDLER("KeyConsoleHandler");
|
const std::string SSLManager::VAL_DELEGATE_HANDLER("KeyConsoleHandler");
|
||||||
const std::string SSLManager::CFG_CERTIFICATE_HANDLER("invalidCertificateHandler.name");
|
const std::string SSLManager::CFG_CERTIFICATE_HANDLER("invalidCertificateHandler.name");
|
||||||
const std::string SSLManager::VAL_CERTIFICATE_HANDLER("ConsoleCertificateHandler");
|
const std::string SSLManager::VAL_CERTIFICATE_HANDLER("RejectCertificateHandler");
|
||||||
const std::string SSLManager::CFG_SERVER_PREFIX("openSSL.server.");
|
const std::string SSLManager::CFG_SERVER_PREFIX("openSSL.server.");
|
||||||
const std::string SSLManager::CFG_CLIENT_PREFIX("openSSL.client.");
|
const std::string SSLManager::CFG_CLIENT_PREFIX("openSSL.client.");
|
||||||
const std::string SSLManager::CFG_CACHE_SESSIONS("cacheSessions");
|
const std::string SSLManager::CFG_CACHE_SESSIONS("cacheSessions");
|
||||||
|
@ -2,11 +2,11 @@
|
|||||||
|
|
||||||
# NOTE: has nothing common with DBMS_TCP_PROTOCOL_VERSION,
|
# NOTE: has nothing common with DBMS_TCP_PROTOCOL_VERSION,
|
||||||
# only DBMS_TCP_PROTOCOL_VERSION should be incremented on protocol changes.
|
# only DBMS_TCP_PROTOCOL_VERSION should be incremented on protocol changes.
|
||||||
SET(VERSION_REVISION 54475)
|
SET(VERSION_REVISION 54476)
|
||||||
SET(VERSION_MAJOR 23)
|
SET(VERSION_MAJOR 23)
|
||||||
SET(VERSION_MINOR 6)
|
SET(VERSION_MINOR 7)
|
||||||
SET(VERSION_PATCH 1)
|
SET(VERSION_PATCH 1)
|
||||||
SET(VERSION_GITHASH 2fec796e73efda10a538a03af3205ce8ffa1b2de)
|
SET(VERSION_GITHASH d1c7e13d08868cb04d3562dcced704dd577cb1df)
|
||||||
SET(VERSION_DESCRIBE v23.6.1.1-testing)
|
SET(VERSION_DESCRIBE v23.7.1.1-testing)
|
||||||
SET(VERSION_STRING 23.6.1.1)
|
SET(VERSION_STRING 23.7.1.1)
|
||||||
# end of autochange
|
# end of autochange
|
||||||
|
@ -17,3 +17,17 @@ get_target_property(FLAT_HASH_SET_INCLUDE_DIR absl::flat_hash_set INTERFACE_INCL
|
|||||||
target_include_directories (_abseil_swiss_tables SYSTEM BEFORE INTERFACE ${FLAT_HASH_SET_INCLUDE_DIR})
|
target_include_directories (_abseil_swiss_tables SYSTEM BEFORE INTERFACE ${FLAT_HASH_SET_INCLUDE_DIR})
|
||||||
|
|
||||||
add_library(ch_contrib::abseil_swiss_tables ALIAS _abseil_swiss_tables)
|
add_library(ch_contrib::abseil_swiss_tables ALIAS _abseil_swiss_tables)
|
||||||
|
|
||||||
|
set(ABSL_FORMAT_SRC
|
||||||
|
${ABSL_ROOT_DIR}/absl/strings/internal/str_format/arg.cc
|
||||||
|
${ABSL_ROOT_DIR}/absl/strings/internal/str_format/bind.cc
|
||||||
|
${ABSL_ROOT_DIR}/absl/strings/internal/str_format/extension.cc
|
||||||
|
${ABSL_ROOT_DIR}/absl/strings/internal/str_format/float_conversion.cc
|
||||||
|
${ABSL_ROOT_DIR}/absl/strings/internal/str_format/output.cc
|
||||||
|
${ABSL_ROOT_DIR}/absl/strings/internal/str_format/parser.cc
|
||||||
|
)
|
||||||
|
|
||||||
|
add_library(_abseil_str_format ${ABSL_FORMAT_SRC})
|
||||||
|
target_include_directories(_abseil_str_format PUBLIC ${ABSL_ROOT_DIR})
|
||||||
|
|
||||||
|
add_library(ch_contrib::abseil_str_format ALIAS _abseil_str_format)
|
||||||
|
@ -31,12 +31,12 @@ endif()
|
|||||||
|
|
||||||
set (CMAKE_CXX_STANDARD 17)
|
set (CMAKE_CXX_STANDARD 17)
|
||||||
|
|
||||||
set(ARROW_VERSION "6.0.1")
|
set(ARROW_VERSION "11.0.0")
|
||||||
string(REGEX MATCH "^[0-9]+\\.[0-9]+\\.[0-9]+" ARROW_BASE_VERSION "${ARROW_VERSION}")
|
string(REGEX MATCH "^[0-9]+\\.[0-9]+\\.[0-9]+" ARROW_BASE_VERSION "${ARROW_VERSION}")
|
||||||
|
|
||||||
set(ARROW_VERSION_MAJOR "6")
|
set(ARROW_VERSION_MAJOR "11")
|
||||||
set(ARROW_VERSION_MINOR "0")
|
set(ARROW_VERSION_MINOR "0")
|
||||||
set(ARROW_VERSION_PATCH "1")
|
set(ARROW_VERSION_PATCH "0")
|
||||||
|
|
||||||
if(ARROW_VERSION_MAJOR STREQUAL "0")
|
if(ARROW_VERSION_MAJOR STREQUAL "0")
|
||||||
# Arrow 0.x.y => SO version is "x", full SO version is "x.y.0"
|
# Arrow 0.x.y => SO version is "x", full SO version is "x.y.0"
|
||||||
@ -514,6 +514,10 @@ if (SANITIZE STREQUAL "undefined")
|
|||||||
target_compile_options(_arrow PRIVATE -fno-sanitize=undefined)
|
target_compile_options(_arrow PRIVATE -fno-sanitize=undefined)
|
||||||
endif ()
|
endif ()
|
||||||
|
|
||||||
|
# Define Thrift version for parquet (we use 0.16.0)
|
||||||
|
add_definitions(-DPARQUET_THRIFT_VERSION_MAJOR=0)
|
||||||
|
add_definitions(-DPARQUET_THRIFT_VERSION_MINOR=16)
|
||||||
|
|
||||||
# === tools
|
# === tools
|
||||||
|
|
||||||
set(TOOLS_DIR "${ClickHouse_SOURCE_DIR}/contrib/arrow/cpp/tools/parquet")
|
set(TOOLS_DIR "${ClickHouse_SOURCE_DIR}/contrib/arrow/cpp/tools/parquet")
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
option (ENABLE_AZURE_BLOB_STORAGE "Enable Azure blob storage" ${ENABLE_LIBRARIES})
|
option (ENABLE_AZURE_BLOB_STORAGE "Enable Azure blob storage" ${ENABLE_LIBRARIES})
|
||||||
|
|
||||||
if (NOT ENABLE_AZURE_BLOB_STORAGE OR BUILD_STANDALONE_KEEPER OR OS_FREEBSD)
|
if (NOT ENABLE_AZURE_BLOB_STORAGE OR OS_FREEBSD)
|
||||||
message(STATUS "Not using Azure blob storage")
|
message(STATUS "Not using Azure blob storage")
|
||||||
return()
|
return()
|
||||||
endif()
|
endif()
|
||||||
|
@ -61,11 +61,24 @@ namespace CityHash_v1_0_2
|
|||||||
typedef uint8_t uint8;
|
typedef uint8_t uint8;
|
||||||
typedef uint32_t uint32;
|
typedef uint32_t uint32;
|
||||||
typedef uint64_t uint64;
|
typedef uint64_t uint64;
|
||||||
typedef std::pair<uint64, uint64> uint128;
|
|
||||||
|
|
||||||
|
/// Represent an unsigned integer of 128 bits as it's used in CityHash.
|
||||||
|
/// Originally CityHash used `std::pair<uint64, uint64>` instead of this struct,
|
||||||
|
/// however the members `first` and `second` could be easily confused so they were renamed to `low64` and `high64`:
|
||||||
|
/// `first` -> `low64`, `second` -> `high64`.
|
||||||
|
struct uint128
|
||||||
|
{
|
||||||
|
uint64 low64 = 0;
|
||||||
|
uint64 high64 = 0;
|
||||||
|
|
||||||
inline uint64 Uint128Low64(const uint128& x) { return x.first; }
|
uint128() = default;
|
||||||
inline uint64 Uint128High64(const uint128& x) { return x.second; }
|
uint128(uint64 low64_, uint64 high64_) : low64(low64_), high64(high64_) {}
|
||||||
|
friend bool operator ==(const uint128 & x, const uint128 & y) { return (x.low64 == y.low64) && (x.high64 == y.high64); }
|
||||||
|
friend bool operator !=(const uint128 & x, const uint128 & y) { return !(x == y); }
|
||||||
|
};
|
||||||
|
|
||||||
|
inline uint64 Uint128Low64(const uint128 & x) { return x.low64; }
|
||||||
|
inline uint64 Uint128High64(const uint128 & x) { return x.high64; }
|
||||||
|
|
||||||
// Hash function for a byte array.
|
// Hash function for a byte array.
|
||||||
uint64 CityHash64(const char *buf, size_t len);
|
uint64 CityHash64(const char *buf, size_t len);
|
||||||
|
2
contrib/libhdfs3
vendored
2
contrib/libhdfs3
vendored
@ -1 +1 @@
|
|||||||
Subproject commit 164b89253fad7991bce77882f01b51ab81d19f3d
|
Subproject commit 377220ef351ae24994a5fcd2b5fa3930d00c4db0
|
2
contrib/re2
vendored
2
contrib/re2
vendored
@ -1 +1 @@
|
|||||||
Subproject commit 13ebb377c6ad763ca61d12dd6f88b1126bd0b911
|
Subproject commit 03da4fc0857c285e3a26782f6bc8931c4c950df4
|
@ -12,6 +12,7 @@ endif()
|
|||||||
set(SRC_DIR "${ClickHouse_SOURCE_DIR}/contrib/re2")
|
set(SRC_DIR "${ClickHouse_SOURCE_DIR}/contrib/re2")
|
||||||
|
|
||||||
set(RE2_SOURCES
|
set(RE2_SOURCES
|
||||||
|
${SRC_DIR}/re2/bitmap256.cc
|
||||||
${SRC_DIR}/re2/bitstate.cc
|
${SRC_DIR}/re2/bitstate.cc
|
||||||
${SRC_DIR}/re2/compile.cc
|
${SRC_DIR}/re2/compile.cc
|
||||||
${SRC_DIR}/re2/dfa.cc
|
${SRC_DIR}/re2/dfa.cc
|
||||||
@ -28,15 +29,16 @@ set(RE2_SOURCES
|
|||||||
${SRC_DIR}/re2/regexp.cc
|
${SRC_DIR}/re2/regexp.cc
|
||||||
${SRC_DIR}/re2/set.cc
|
${SRC_DIR}/re2/set.cc
|
||||||
${SRC_DIR}/re2/simplify.cc
|
${SRC_DIR}/re2/simplify.cc
|
||||||
${SRC_DIR}/re2/stringpiece.cc
|
|
||||||
${SRC_DIR}/re2/tostring.cc
|
${SRC_DIR}/re2/tostring.cc
|
||||||
${SRC_DIR}/re2/unicode_casefold.cc
|
${SRC_DIR}/re2/unicode_casefold.cc
|
||||||
${SRC_DIR}/re2/unicode_groups.cc
|
${SRC_DIR}/re2/unicode_groups.cc
|
||||||
|
${SRC_DIR}/util/pcre.cc
|
||||||
${SRC_DIR}/util/rune.cc
|
${SRC_DIR}/util/rune.cc
|
||||||
${SRC_DIR}/util/strutil.cc
|
${SRC_DIR}/util/strutil.cc
|
||||||
)
|
)
|
||||||
add_library(re2 ${RE2_SOURCES})
|
add_library(re2 ${RE2_SOURCES})
|
||||||
target_include_directories(re2 PUBLIC "${SRC_DIR}")
|
target_include_directories(re2 PUBLIC "${SRC_DIR}")
|
||||||
|
target_link_libraries(re2 ch_contrib::abseil_str_format)
|
||||||
|
|
||||||
# Building re2 which is thread-safe and re2_st which is not.
|
# Building re2 which is thread-safe and re2_st which is not.
|
||||||
# re2 changes its state during matching of regular expression, e.g. creates temporary DFA.
|
# re2 changes its state during matching of regular expression, e.g. creates temporary DFA.
|
||||||
@ -48,6 +50,7 @@ target_compile_definitions (re2_st PRIVATE NDEBUG NO_THREADS re2=re2_st)
|
|||||||
target_include_directories (re2_st PRIVATE .)
|
target_include_directories (re2_st PRIVATE .)
|
||||||
target_include_directories (re2_st SYSTEM PUBLIC ${CMAKE_CURRENT_BINARY_DIR})
|
target_include_directories (re2_st SYSTEM PUBLIC ${CMAKE_CURRENT_BINARY_DIR})
|
||||||
target_include_directories (re2_st SYSTEM BEFORE PUBLIC ${SRC_DIR})
|
target_include_directories (re2_st SYSTEM BEFORE PUBLIC ${SRC_DIR})
|
||||||
|
target_link_libraries (re2_st ch_contrib::abseil_str_format)
|
||||||
|
|
||||||
file (MAKE_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/re2_st)
|
file (MAKE_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/re2_st)
|
||||||
foreach (FILENAME filtered_re2.h re2.h set.h stringpiece.h)
|
foreach (FILENAME filtered_re2.h re2.h set.h stringpiece.h)
|
||||||
@ -60,17 +63,6 @@ foreach (FILENAME filtered_re2.h re2.h set.h stringpiece.h)
|
|||||||
add_dependencies (re2_st transform_${FILENAME})
|
add_dependencies (re2_st transform_${FILENAME})
|
||||||
endforeach ()
|
endforeach ()
|
||||||
|
|
||||||
file (MAKE_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/util)
|
|
||||||
foreach (FILENAME mutex.h)
|
|
||||||
add_custom_command (OUTPUT "${CMAKE_CURRENT_BINARY_DIR}/util/${FILENAME}"
|
|
||||||
COMMAND ${CMAKE_COMMAND} -DSOURCE_FILENAME="${SRC_DIR}/util/${FILENAME}"
|
|
||||||
-DTARGET_FILENAME="${CMAKE_CURRENT_BINARY_DIR}/util/${FILENAME}"
|
|
||||||
-P "${CMAKE_CURRENT_SOURCE_DIR}/re2_transform.cmake"
|
|
||||||
COMMENT "Creating ${FILENAME} for re2_st library.")
|
|
||||||
add_custom_target (transform_${FILENAME} DEPENDS "${CMAKE_CURRENT_BINARY_DIR}/util/${FILENAME}")
|
|
||||||
add_dependencies (re2_st transform_${FILENAME})
|
|
||||||
endforeach ()
|
|
||||||
|
|
||||||
# NOTE: you should not change name of library here, since it is used to generate required header (see above)
|
# NOTE: you should not change name of library here, since it is used to generate required header (see above)
|
||||||
add_library(ch_contrib::re2 ALIAS re2)
|
add_library(ch_contrib::re2 ALIAS re2)
|
||||||
add_library(ch_contrib::re2_st ALIAS re2_st)
|
add_library(ch_contrib::re2_st ALIAS re2_st)
|
||||||
|
@ -120,11 +120,12 @@
|
|||||||
"docker/test/base": {
|
"docker/test/base": {
|
||||||
"name": "clickhouse/test-base",
|
"name": "clickhouse/test-base",
|
||||||
"dependent": [
|
"dependent": [
|
||||||
"docker/test/stateless",
|
|
||||||
"docker/test/integration/base",
|
|
||||||
"docker/test/fuzzer",
|
"docker/test/fuzzer",
|
||||||
|
"docker/test/integration/base",
|
||||||
"docker/test/keeper-jepsen",
|
"docker/test/keeper-jepsen",
|
||||||
"docker/test/server-jepsen"
|
"docker/test/server-jepsen",
|
||||||
|
"docker/test/sqllogic",
|
||||||
|
"docker/test/stateless"
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"docker/test/integration/kerberized_hadoop": {
|
"docker/test/integration/kerberized_hadoop": {
|
||||||
|
@ -32,7 +32,7 @@ RUN arch=${TARGETARCH:-amd64} \
|
|||||||
esac
|
esac
|
||||||
|
|
||||||
ARG REPOSITORY="https://s3.amazonaws.com/clickhouse-builds/22.4/31c367d3cd3aefd316778601ff6565119fe36682/package_release"
|
ARG REPOSITORY="https://s3.amazonaws.com/clickhouse-builds/22.4/31c367d3cd3aefd316778601ff6565119fe36682/package_release"
|
||||||
ARG VERSION="23.5.3.24"
|
ARG VERSION="23.6.1.1524"
|
||||||
ARG PACKAGES="clickhouse-keeper"
|
ARG PACKAGES="clickhouse-keeper"
|
||||||
|
|
||||||
# user/group precreated explicitly with fixed uid/gid on purpose.
|
# user/group precreated explicitly with fixed uid/gid on purpose.
|
||||||
|
@ -89,7 +89,7 @@ RUN arch=${TARGETARCH:-amd64} \
|
|||||||
&& dpkg -i /tmp/nfpm.deb \
|
&& dpkg -i /tmp/nfpm.deb \
|
||||||
&& rm /tmp/nfpm.deb
|
&& rm /tmp/nfpm.deb
|
||||||
|
|
||||||
ARG GO_VERSION=1.19.5
|
ARG GO_VERSION=1.19.10
|
||||||
# We need go for clickhouse-diagnostics
|
# We need go for clickhouse-diagnostics
|
||||||
RUN arch=${TARGETARCH:-amd64} \
|
RUN arch=${TARGETARCH:-amd64} \
|
||||||
&& curl -Lo /tmp/go.tgz "https://go.dev/dl/go${GO_VERSION}.linux-${arch}.tar.gz" \
|
&& curl -Lo /tmp/go.tgz "https://go.dev/dl/go${GO_VERSION}.linux-${arch}.tar.gz" \
|
||||||
|
@ -33,7 +33,7 @@ RUN arch=${TARGETARCH:-amd64} \
|
|||||||
# lts / testing / prestable / etc
|
# lts / testing / prestable / etc
|
||||||
ARG REPO_CHANNEL="stable"
|
ARG REPO_CHANNEL="stable"
|
||||||
ARG REPOSITORY="https://packages.clickhouse.com/tgz/${REPO_CHANNEL}"
|
ARG REPOSITORY="https://packages.clickhouse.com/tgz/${REPO_CHANNEL}"
|
||||||
ARG VERSION="23.5.3.24"
|
ARG VERSION="23.6.1.1524"
|
||||||
ARG PACKAGES="clickhouse-client clickhouse-server clickhouse-common-static"
|
ARG PACKAGES="clickhouse-client clickhouse-server clickhouse-common-static"
|
||||||
|
|
||||||
# user/group precreated explicitly with fixed uid/gid on purpose.
|
# user/group precreated explicitly with fixed uid/gid on purpose.
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
FROM ubuntu:22.04
|
FROM ubuntu:20.04
|
||||||
|
|
||||||
# see https://github.com/moby/moby/issues/4032#issuecomment-192327844
|
# see https://github.com/moby/moby/issues/4032#issuecomment-192327844
|
||||||
ARG DEBIAN_FRONTEND=noninteractive
|
ARG DEBIAN_FRONTEND=noninteractive
|
||||||
@ -11,18 +11,19 @@ RUN sed -i "s|http://archive.ubuntu.com|${apt_archive}|g" /etc/apt/sources.list
|
|||||||
&& apt-get update \
|
&& apt-get update \
|
||||||
&& apt-get upgrade -yq \
|
&& apt-get upgrade -yq \
|
||||||
&& apt-get install --yes --no-install-recommends \
|
&& apt-get install --yes --no-install-recommends \
|
||||||
apt-transport-https \
|
|
||||||
ca-certificates \
|
ca-certificates \
|
||||||
dirmngr \
|
|
||||||
gnupg2 \
|
|
||||||
wget \
|
|
||||||
locales \
|
locales \
|
||||||
tzdata \
|
tzdata \
|
||||||
&& apt-get clean
|
wget \
|
||||||
|
&& apt-get clean \
|
||||||
|
&& rm -rf \
|
||||||
|
/var/lib/apt/lists/* \
|
||||||
|
/var/cache/debconf \
|
||||||
|
/tmp/*
|
||||||
|
|
||||||
ARG REPO_CHANNEL="stable"
|
ARG REPO_CHANNEL="stable"
|
||||||
ARG REPOSITORY="deb https://packages.clickhouse.com/deb ${REPO_CHANNEL} main"
|
ARG REPOSITORY="deb [signed-by=/usr/share/keyrings/clickhouse-keyring.gpg] https://packages.clickhouse.com/deb ${REPO_CHANNEL} main"
|
||||||
ARG VERSION="23.5.3.24"
|
ARG VERSION="23.6.1.1524"
|
||||||
ARG PACKAGES="clickhouse-client clickhouse-server clickhouse-common-static"
|
ARG PACKAGES="clickhouse-client clickhouse-server clickhouse-common-static"
|
||||||
|
|
||||||
# set non-empty deb_location_url url to create a docker image
|
# set non-empty deb_location_url url to create a docker image
|
||||||
@ -43,49 +44,68 @@ ARG single_binary_location_url=""
|
|||||||
|
|
||||||
ARG TARGETARCH
|
ARG TARGETARCH
|
||||||
|
|
||||||
RUN arch=${TARGETARCH:-amd64} \
|
# install from a web location with deb packages
|
||||||
|
RUN arch="${TARGETARCH:-amd64}" \
|
||||||
&& if [ -n "${deb_location_url}" ]; then \
|
&& if [ -n "${deb_location_url}" ]; then \
|
||||||
echo "installing from custom url with deb packages: ${deb_location_url}" \
|
echo "installing from custom url with deb packages: ${deb_location_url}" \
|
||||||
rm -rf /tmp/clickhouse_debs \
|
&& rm -rf /tmp/clickhouse_debs \
|
||||||
&& mkdir -p /tmp/clickhouse_debs \
|
&& mkdir -p /tmp/clickhouse_debs \
|
||||||
&& for package in ${PACKAGES}; do \
|
&& for package in ${PACKAGES}; do \
|
||||||
{ wget --progress=bar:force:noscroll "${deb_location_url}/${package}_${VERSION}_${arch}.deb" -P /tmp/clickhouse_debs || \
|
{ wget --progress=bar:force:noscroll "${deb_location_url}/${package}_${VERSION}_${arch}.deb" -P /tmp/clickhouse_debs || \
|
||||||
wget --progress=bar:force:noscroll "${deb_location_url}/${package}_${VERSION}_all.deb" -P /tmp/clickhouse_debs ; } \
|
wget --progress=bar:force:noscroll "${deb_location_url}/${package}_${VERSION}_all.deb" -P /tmp/clickhouse_debs ; } \
|
||||||
|| exit 1 \
|
|| exit 1 \
|
||||||
; done \
|
; done \
|
||||||
&& dpkg -i /tmp/clickhouse_debs/*.deb ; \
|
&& dpkg -i /tmp/clickhouse_debs/*.deb \
|
||||||
elif [ -n "${single_binary_location_url}" ]; then \
|
&& rm -rf /tmp/* ; \
|
||||||
|
fi
|
||||||
|
|
||||||
|
# install from a single binary
|
||||||
|
RUN if [ -n "${single_binary_location_url}" ]; then \
|
||||||
echo "installing from single binary url: ${single_binary_location_url}" \
|
echo "installing from single binary url: ${single_binary_location_url}" \
|
||||||
&& rm -rf /tmp/clickhouse_binary \
|
&& rm -rf /tmp/clickhouse_binary \
|
||||||
&& mkdir -p /tmp/clickhouse_binary \
|
&& mkdir -p /tmp/clickhouse_binary \
|
||||||
&& wget --progress=bar:force:noscroll "${single_binary_location_url}" -O /tmp/clickhouse_binary/clickhouse \
|
&& wget --progress=bar:force:noscroll "${single_binary_location_url}" -O /tmp/clickhouse_binary/clickhouse \
|
||||||
&& chmod +x /tmp/clickhouse_binary/clickhouse \
|
&& chmod +x /tmp/clickhouse_binary/clickhouse \
|
||||||
&& /tmp/clickhouse_binary/clickhouse install --user "clickhouse" --group "clickhouse" ; \
|
&& /tmp/clickhouse_binary/clickhouse install --user "clickhouse" --group "clickhouse" \
|
||||||
else \
|
&& rm -rf /tmp/* ; \
|
||||||
mkdir -p /etc/apt/sources.list.d \
|
fi
|
||||||
&& apt-key adv --keyserver keyserver.ubuntu.com --recv 8919F6BD2B48D754 \
|
|
||||||
&& echo ${REPOSITORY} > /etc/apt/sources.list.d/clickhouse.list \
|
# A fallback to installation from ClickHouse repository
|
||||||
|
RUN if ! clickhouse local -q "SELECT ''" > /dev/null 2>&1; then \
|
||||||
|
apt-get update \
|
||||||
|
&& apt-get install --yes --no-install-recommends \
|
||||||
|
apt-transport-https \
|
||||||
|
ca-certificates \
|
||||||
|
dirmngr \
|
||||||
|
gnupg2 \
|
||||||
|
&& mkdir -p /etc/apt/sources.list.d \
|
||||||
|
&& GNUPGHOME=$(mktemp -d) \
|
||||||
|
&& GNUPGHOME="$GNUPGHOME" gpg --no-default-keyring \
|
||||||
|
--keyring /usr/share/keyrings/clickhouse-keyring.gpg \
|
||||||
|
--keyserver hkp://keyserver.ubuntu.com:80 --recv-keys 8919F6BD2B48D754 \
|
||||||
|
&& rm -r "$GNUPGHOME" \
|
||||||
|
&& chmod +r /usr/share/keyrings/clickhouse-keyring.gpg \
|
||||||
|
&& echo "${REPOSITORY}" > /etc/apt/sources.list.d/clickhouse.list \
|
||||||
&& echo "installing from repository: ${REPOSITORY}" \
|
&& echo "installing from repository: ${REPOSITORY}" \
|
||||||
&& apt-get update \
|
&& apt-get update \
|
||||||
&& apt-get --yes -o "Dpkg::Options::=--force-confdef" -o "Dpkg::Options::=--force-confold" upgrade \
|
|
||||||
&& for package in ${PACKAGES}; do \
|
&& for package in ${PACKAGES}; do \
|
||||||
packages="${packages} ${package}=${VERSION}" \
|
packages="${packages} ${package}=${VERSION}" \
|
||||||
; done \
|
; done \
|
||||||
&& apt-get install --allow-unauthenticated --yes --no-install-recommends ${packages} || exit 1 \
|
&& apt-get install --allow-unauthenticated --yes --no-install-recommends ${packages} || exit 1 \
|
||||||
; fi \
|
|
||||||
&& clickhouse-local -q 'SELECT * FROM system.build_options' \
|
|
||||||
&& rm -rf \
|
&& rm -rf \
|
||||||
/var/lib/apt/lists/* \
|
/var/lib/apt/lists/* \
|
||||||
/var/cache/debconf \
|
/var/cache/debconf \
|
||||||
/tmp/* \
|
/tmp/* \
|
||||||
&& mkdir -p /var/lib/clickhouse /var/log/clickhouse-server /etc/clickhouse-server /etc/clickhouse-client \
|
&& apt-get autoremove --purge -yq libksba8 \
|
||||||
&& chmod ugo+Xrw -R /var/lib/clickhouse /var/log/clickhouse-server /etc/clickhouse-server /etc/clickhouse-client
|
&& apt-get autoremove -yq \
|
||||||
|
; fi
|
||||||
RUN apt-get autoremove --purge -yq libksba8 && \
|
|
||||||
apt-get autoremove -yq
|
|
||||||
|
|
||||||
|
# post install
|
||||||
# we need to allow "others" access to clickhouse folder, because docker container
|
# we need to allow "others" access to clickhouse folder, because docker container
|
||||||
# can be started with arbitrary uid (openshift usecase)
|
# can be started with arbitrary uid (openshift usecase)
|
||||||
|
RUN clickhouse-local -q 'SELECT * FROM system.build_options' \
|
||||||
|
&& mkdir -p /var/lib/clickhouse /var/log/clickhouse-server /etc/clickhouse-server /etc/clickhouse-client \
|
||||||
|
&& chmod ugo+Xrw -R /var/lib/clickhouse /var/log/clickhouse-server /etc/clickhouse-server /etc/clickhouse-client
|
||||||
|
|
||||||
RUN locale-gen en_US.UTF-8
|
RUN locale-gen en_US.UTF-8
|
||||||
ENV LANG en_US.UTF-8
|
ENV LANG en_US.UTF-8
|
||||||
|
@ -20,7 +20,6 @@ For more information and documentation see https://clickhouse.com/.
|
|||||||
|
|
||||||
- The amd64 image requires support for [SSE3 instructions](https://en.wikipedia.org/wiki/SSE3). Virtually all x86 CPUs after 2005 support SSE3.
|
- The amd64 image requires support for [SSE3 instructions](https://en.wikipedia.org/wiki/SSE3). Virtually all x86 CPUs after 2005 support SSE3.
|
||||||
- The arm64 image requires support for the [ARMv8.2-A architecture](https://en.wikipedia.org/wiki/AArch64#ARMv8.2-A). Most ARM CPUs after 2017 support ARMv8.2-A. A notable exception is Raspberry Pi 4 from 2019 whose CPU only supports ARMv8.0-A.
|
- The arm64 image requires support for the [ARMv8.2-A architecture](https://en.wikipedia.org/wiki/AArch64#ARMv8.2-A). Most ARM CPUs after 2017 support ARMv8.2-A. A notable exception is Raspberry Pi 4 from 2019 whose CPU only supports ARMv8.0-A.
|
||||||
- Since the Clickhouse 23.3 Ubuntu image started using `ubuntu:22.04` as its base image, it requires docker version >= `20.10.10`, or use `docker run -- privileged` instead. Alternatively, try the Clickhouse Alpine image.
|
|
||||||
|
|
||||||
## How to use this image
|
## How to use this image
|
||||||
|
|
||||||
|
@ -9,6 +9,7 @@ RUN apt-get update \
|
|||||||
expect \
|
expect \
|
||||||
file \
|
file \
|
||||||
lsof \
|
lsof \
|
||||||
|
odbcinst \
|
||||||
psmisc \
|
psmisc \
|
||||||
python3 \
|
python3 \
|
||||||
python3-lxml \
|
python3-lxml \
|
||||||
|
@ -80,7 +80,7 @@ function start_server
|
|||||||
|
|
||||||
function clone_root
|
function clone_root
|
||||||
{
|
{
|
||||||
git config --global --add safe.directory "$FASTTEST_SOURCE"
|
[ "$UID" -eq 0 ] && git config --global --add safe.directory "$FASTTEST_SOURCE"
|
||||||
git clone --depth 1 https://github.com/ClickHouse/ClickHouse.git -- "$FASTTEST_SOURCE" 2>&1 | ts '%Y-%m-%d %H:%M:%S' | tee "$FASTTEST_OUTPUT/clone_log.txt"
|
git clone --depth 1 https://github.com/ClickHouse/ClickHouse.git -- "$FASTTEST_SOURCE" 2>&1 | ts '%Y-%m-%d %H:%M:%S' | tee "$FASTTEST_OUTPUT/clone_log.txt"
|
||||||
|
|
||||||
(
|
(
|
||||||
@ -151,7 +151,7 @@ function clone_submodules
|
|||||||
)
|
)
|
||||||
|
|
||||||
git submodule sync
|
git submodule sync
|
||||||
git submodule update --jobs=16 --depth 1 --init "${SUBMODULES_TO_UPDATE[@]}"
|
git submodule update --jobs=16 --depth 1 --single-branch --init "${SUBMODULES_TO_UPDATE[@]}"
|
||||||
git submodule foreach git reset --hard
|
git submodule foreach git reset --hard
|
||||||
git submodule foreach git checkout @ -f
|
git submodule foreach git checkout @ -f
|
||||||
git submodule foreach git clean -xfd
|
git submodule foreach git clean -xfd
|
||||||
@ -202,10 +202,11 @@ function build
|
|||||||
| ts '%Y-%m-%d %H:%M:%S' \
|
| ts '%Y-%m-%d %H:%M:%S' \
|
||||||
| tee "$FASTTEST_OUTPUT/test_result.txt"
|
| tee "$FASTTEST_OUTPUT/test_result.txt"
|
||||||
if [ "$COPY_CLICKHOUSE_BINARY_TO_OUTPUT" -eq "1" ]; then
|
if [ "$COPY_CLICKHOUSE_BINARY_TO_OUTPUT" -eq "1" ]; then
|
||||||
cp programs/clickhouse "$FASTTEST_OUTPUT/clickhouse"
|
mkdir -p "$FASTTEST_OUTPUT/binaries/"
|
||||||
|
cp programs/clickhouse "$FASTTEST_OUTPUT/binaries/clickhouse"
|
||||||
|
|
||||||
strip programs/clickhouse -o "$FASTTEST_OUTPUT/clickhouse-stripped"
|
strip programs/clickhouse -o programs/clickhouse-stripped
|
||||||
zstd --threads=0 "$FASTTEST_OUTPUT/clickhouse-stripped"
|
zstd --threads=0 programs/clickhouse-stripped -o "$FASTTEST_OUTPUT/binaries/clickhouse-stripped.zst"
|
||||||
fi
|
fi
|
||||||
ccache_status
|
ccache_status
|
||||||
ccache --evict-older-than 1d ||:
|
ccache --evict-older-than 1d ||:
|
||||||
|
@ -46,12 +46,13 @@ RUN arch=${TARGETARCH:-amd64} \
|
|||||||
arm64) rarch=aarch64 ;; \
|
arm64) rarch=aarch64 ;; \
|
||||||
esac \
|
esac \
|
||||||
&& cd /tmp \
|
&& cd /tmp \
|
||||||
&& curl -o mysql-odbc.rpm "https://cdn.mysql.com/archives/mysql-connector-odbc-8.0/mysql-connector-odbc-8.0.27-1.el8.${rarch}.rpm" \
|
&& curl -o mysql-odbc.rpm "https://cdn.mysql.com/archives/mysql-connector-odbc-8.0/mysql-connector-odbc-8.0.32-1.el9.${rarch}.rpm" \
|
||||||
&& rpm2archive mysql-odbc.rpm \
|
&& rpm2archive mysql-odbc.rpm \
|
||||||
&& tar xf mysql-odbc.rpm.tgz -C / ./usr/lib64/ \
|
&& tar xf mysql-odbc.rpm.tgz -C / ./usr/lib64/ \
|
||||||
&& LINK_DIR=$(dpkg -L libodbc1 | rg '^/usr/lib/.*-linux-gnu/odbc$') \
|
&& rm mysql-odbc.rpm mysql-odbc.rpm.tgz \
|
||||||
&& ln -s /usr/lib64/libmyodbc8a.so "$LINK_DIR" \
|
&& ODBC_DIR=$(dpkg -L odbc-postgresql | rg '^/usr/lib/.*-linux-gnu/odbc$') \
|
||||||
&& ln -s /usr/lib64/libmyodbc8a.so "$LINK_DIR"/libmyodbc.so
|
&& ln -s /usr/lib64/libmyodbc8a.so "$ODBC_DIR" \
|
||||||
|
&& ln -s /usr/lib64/libmyodbc8a.so "$ODBC_DIR"/libmyodbc.so
|
||||||
|
|
||||||
# Unfortunately this is required for a single test for conversion data from zookeeper to clickhouse-keeper.
|
# Unfortunately this is required for a single test for conversion data from zookeeper to clickhouse-keeper.
|
||||||
# ZooKeeper is not started by default, but consumes some space in containers.
|
# ZooKeeper is not started by default, but consumes some space in containers.
|
||||||
|
@ -2,4 +2,7 @@
|
|||||||
# Helper docker container to run iptables without sudo
|
# Helper docker container to run iptables without sudo
|
||||||
|
|
||||||
FROM alpine
|
FROM alpine
|
||||||
RUN apk add -U iproute2
|
RUN apk add --no-cache -U iproute2 \
|
||||||
|
&& for bin in iptables iptables-restore iptables-save; \
|
||||||
|
do ln -sf xtables-nft-multi "/sbin/$bin"; \
|
||||||
|
done
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
# docker build -t clickhouse/mysql-php-client .
|
# docker build -t clickhouse/mysql-php-client .
|
||||||
# MySQL PHP client docker container
|
# MySQL PHP client docker container
|
||||||
|
|
||||||
FROM php:8.0.18-cli
|
FROM php:8-cli-alpine
|
||||||
|
|
||||||
COPY ./client.crt client.crt
|
COPY ./client.crt client.crt
|
||||||
COPY ./client.key client.key
|
COPY ./client.key client.key
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
# docker build -t clickhouse/integration-tests-runner .
|
# docker build -t clickhouse/integration-tests-runner .
|
||||||
FROM ubuntu:20.04
|
FROM ubuntu:22.04
|
||||||
|
|
||||||
# ARG for quick switch to a given ubuntu mirror
|
# ARG for quick switch to a given ubuntu mirror
|
||||||
ARG apt_archive="http://archive.ubuntu.com"
|
ARG apt_archive="http://archive.ubuntu.com"
|
||||||
@ -56,17 +56,19 @@ RUN curl -fsSL https://download.docker.com/linux/ubuntu/gpg | apt-key add - \
|
|||||||
/var/lib/apt/lists/* \
|
/var/lib/apt/lists/* \
|
||||||
/var/cache/debconf \
|
/var/cache/debconf \
|
||||||
/tmp/* \
|
/tmp/* \
|
||||||
&& apt-get clean
|
&& apt-get clean \
|
||||||
|
&& dockerd --version; docker --version
|
||||||
|
|
||||||
RUN dockerd --version; docker --version
|
|
||||||
|
|
||||||
RUN python3 -m pip install --no-cache-dir \
|
RUN python3 -m pip install --no-cache-dir \
|
||||||
PyMySQL \
|
PyMySQL \
|
||||||
aerospike==4.0.0 \
|
aerospike==11.1.0 \
|
||||||
avro==1.10.2 \
|
|
||||||
asyncio \
|
asyncio \
|
||||||
|
avro==1.10.2 \
|
||||||
|
azure-storage-blob \
|
||||||
cassandra-driver \
|
cassandra-driver \
|
||||||
confluent-kafka==1.5.0 \
|
confluent-kafka==1.9.2 \
|
||||||
|
delta-spark==2.3.0 \
|
||||||
dict2xml \
|
dict2xml \
|
||||||
dicttoxml \
|
dicttoxml \
|
||||||
docker \
|
docker \
|
||||||
@ -76,40 +78,38 @@ RUN python3 -m pip install --no-cache-dir \
|
|||||||
kafka-python \
|
kafka-python \
|
||||||
kazoo \
|
kazoo \
|
||||||
lz4 \
|
lz4 \
|
||||||
|
meilisearch==0.18.3 \
|
||||||
minio \
|
minio \
|
||||||
nats-py \
|
nats-py \
|
||||||
protobuf \
|
protobuf \
|
||||||
psycopg2-binary==2.8.6 \
|
psycopg2-binary==2.9.6 \
|
||||||
|
pyhdfs \
|
||||||
pymongo==3.11.0 \
|
pymongo==3.11.0 \
|
||||||
|
pyspark==3.3.2 \
|
||||||
pytest \
|
pytest \
|
||||||
pytest-order==1.0.0 \
|
pytest-order==1.0.0 \
|
||||||
pytest-timeout \
|
|
||||||
pytest-random \
|
pytest-random \
|
||||||
pytest-xdist \
|
|
||||||
pytest-repeat \
|
pytest-repeat \
|
||||||
|
pytest-timeout \
|
||||||
|
pytest-xdist \
|
||||||
pytz \
|
pytz \
|
||||||
redis \
|
redis \
|
||||||
tzlocal==2.1 \
|
|
||||||
urllib3 \
|
|
||||||
requests-kerberos \
|
requests-kerberos \
|
||||||
pyspark==3.3.2 \
|
tzlocal==2.1 \
|
||||||
delta-spark==2.2.0 \
|
urllib3
|
||||||
pyhdfs \
|
|
||||||
azure-storage-blob \
|
|
||||||
meilisearch==0.18.3
|
|
||||||
|
|
||||||
COPY modprobe.sh /usr/local/bin/modprobe
|
|
||||||
COPY dockerd-entrypoint.sh /usr/local/bin/
|
|
||||||
COPY compose/ /compose/
|
|
||||||
COPY misc/ /misc/
|
|
||||||
|
|
||||||
|
# Hudi supports only spark 3.3.*, not 3.4
|
||||||
RUN curl -fsSL -O https://dlcdn.apache.org/spark/spark-3.3.2/spark-3.3.2-bin-hadoop3.tgz \
|
RUN curl -fsSL -O https://dlcdn.apache.org/spark/spark-3.3.2/spark-3.3.2-bin-hadoop3.tgz \
|
||||||
&& tar xzvf spark-3.3.2-bin-hadoop3.tgz -C / \
|
&& tar xzvf spark-3.3.2-bin-hadoop3.tgz -C / \
|
||||||
&& rm spark-3.3.2-bin-hadoop3.tgz
|
&& rm spark-3.3.2-bin-hadoop3.tgz
|
||||||
|
|
||||||
# download spark and packages
|
# download spark and packages
|
||||||
# if you change packages, don't forget to update them in tests/integration/helpers/cluster.py
|
# if you change packages, don't forget to update them in tests/integration/helpers/cluster.py
|
||||||
RUN echo ":quit" | /spark-3.3.2-bin-hadoop3/bin/spark-shell --packages "org.apache.hudi:hudi-spark3.3-bundle_2.12:0.13.0,io.delta:delta-core_2.12:2.2.0,org.apache.iceberg:iceberg-spark-runtime-3.3_2.12:1.1.0" > /dev/null
|
RUN packages="org.apache.hudi:hudi-spark3.3-bundle_2.12:0.13.0,\
|
||||||
|
io.delta:delta-core_2.12:2.3.0,\
|
||||||
|
org.apache.iceberg:iceberg-spark-runtime-3.3_2.12:1.1.0" \
|
||||||
|
&& /spark-3.3.2-bin-hadoop3/bin/spark-shell --packages "$packages" > /dev/null \
|
||||||
|
&& find /root/.ivy2/ -name '*.jar' -exec ln -sf {} /spark-3.3.2-bin-hadoop3/jars/ \;
|
||||||
|
|
||||||
RUN set -x \
|
RUN set -x \
|
||||||
&& addgroup --system dockremap \
|
&& addgroup --system dockremap \
|
||||||
@ -118,6 +118,12 @@ RUN set -x \
|
|||||||
&& echo 'dockremap:165536:65536' >> /etc/subuid \
|
&& echo 'dockremap:165536:65536' >> /etc/subuid \
|
||||||
&& echo 'dockremap:165536:65536' >> /etc/subgid
|
&& echo 'dockremap:165536:65536' >> /etc/subgid
|
||||||
|
|
||||||
|
COPY modprobe.sh /usr/local/bin/modprobe
|
||||||
|
COPY dockerd-entrypoint.sh /usr/local/bin/
|
||||||
|
COPY compose/ /compose/
|
||||||
|
COPY misc/ /misc/
|
||||||
|
|
||||||
|
|
||||||
# Same options as in test/base/Dockerfile
|
# Same options as in test/base/Dockerfile
|
||||||
# (in case you need to override them in tests)
|
# (in case you need to override them in tests)
|
||||||
ENV TSAN_OPTIONS='halt_on_error=1 history_size=7 memory_limit_mb=46080 second_deadlock_stack=1'
|
ENV TSAN_OPTIONS='halt_on_error=1 history_size=7 memory_limit_mb=46080 second_deadlock_stack=1'
|
||||||
|
@ -12,6 +12,17 @@ echo '{
|
|||||||
"registry-mirrors" : ["http://dockerhub-proxy.dockerhub-proxy-zone:5000"]
|
"registry-mirrors" : ["http://dockerhub-proxy.dockerhub-proxy-zone:5000"]
|
||||||
}' | dd of=/etc/docker/daemon.json 2>/dev/null
|
}' | dd of=/etc/docker/daemon.json 2>/dev/null
|
||||||
|
|
||||||
|
if [ -f /sys/fs/cgroup/cgroup.controllers ]; then
|
||||||
|
# move the processes from the root group to the /init group,
|
||||||
|
# otherwise writing subtree_control fails with EBUSY.
|
||||||
|
# An error during moving non-existent process (i.e., "cat") is ignored.
|
||||||
|
mkdir -p /sys/fs/cgroup/init
|
||||||
|
xargs -rn1 < /sys/fs/cgroup/cgroup.procs > /sys/fs/cgroup/init/cgroup.procs || :
|
||||||
|
# enable controllers
|
||||||
|
sed -e 's/ / +/g' -e 's/^/+/' < /sys/fs/cgroup/cgroup.controllers \
|
||||||
|
> /sys/fs/cgroup/cgroup.subtree_control
|
||||||
|
fi
|
||||||
|
|
||||||
# In case of test hung it is convenient to use pytest --pdb to debug it,
|
# In case of test hung it is convenient to use pytest --pdb to debug it,
|
||||||
# and on hung you can simply press Ctrl-C and it will spawn a python pdb,
|
# and on hung you can simply press Ctrl-C and it will spawn a python pdb,
|
||||||
# but on SIGINT dockerd will exit, so ignore it to preserve the daemon.
|
# but on SIGINT dockerd will exit, so ignore it to preserve the daemon.
|
||||||
@ -52,6 +63,8 @@ export CLICKHOUSE_TESTS_BASE_CONFIG_DIR=/clickhouse-config
|
|||||||
export CLICKHOUSE_ODBC_BRIDGE_BINARY_PATH=/clickhouse-odbc-bridge
|
export CLICKHOUSE_ODBC_BRIDGE_BINARY_PATH=/clickhouse-odbc-bridge
|
||||||
export CLICKHOUSE_LIBRARY_BRIDGE_BINARY_PATH=/clickhouse-library-bridge
|
export CLICKHOUSE_LIBRARY_BRIDGE_BINARY_PATH=/clickhouse-library-bridge
|
||||||
|
|
||||||
|
export DOCKER_BASE_TAG=${DOCKER_BASE_TAG:=latest}
|
||||||
|
export DOCKER_HELPER_TAG=${DOCKER_HELPER_TAG:=latest}
|
||||||
export DOCKER_MYSQL_GOLANG_CLIENT_TAG=${DOCKER_MYSQL_GOLANG_CLIENT_TAG:=latest}
|
export DOCKER_MYSQL_GOLANG_CLIENT_TAG=${DOCKER_MYSQL_GOLANG_CLIENT_TAG:=latest}
|
||||||
export DOCKER_DOTNET_CLIENT_TAG=${DOCKER_DOTNET_CLIENT_TAG:=latest}
|
export DOCKER_DOTNET_CLIENT_TAG=${DOCKER_DOTNET_CLIENT_TAG:=latest}
|
||||||
export DOCKER_MYSQL_JAVA_CLIENT_TAG=${DOCKER_MYSQL_JAVA_CLIENT_TAG:=latest}
|
export DOCKER_MYSQL_JAVA_CLIENT_TAG=${DOCKER_MYSQL_JAVA_CLIENT_TAG:=latest}
|
||||||
|
@ -14,6 +14,13 @@ LEFT_SERVER_PORT=9001
|
|||||||
# patched version
|
# patched version
|
||||||
RIGHT_SERVER_PORT=9002
|
RIGHT_SERVER_PORT=9002
|
||||||
|
|
||||||
|
# abort_conf -- abort if some options is not recognized
|
||||||
|
# abort -- abort if something is not right in the env (i.e. per-cpu arenas does not work)
|
||||||
|
# narenas -- set them explicitly to avoid disabling per-cpu arena in env
|
||||||
|
# that returns different number of CPUs for some of the following
|
||||||
|
# _SC_NPROCESSORS_ONLN/_SC_NPROCESSORS_CONF/sched_getaffinity
|
||||||
|
export MALLOC_CONF="abort_conf:true,abort:true,narenas:$(nproc --all)"
|
||||||
|
|
||||||
function wait_for_server # port, pid
|
function wait_for_server # port, pid
|
||||||
{
|
{
|
||||||
for _ in {1..60}
|
for _ in {1..60}
|
||||||
@ -109,10 +116,6 @@ function restart
|
|||||||
while pkill -f clickhouse-serv ; do echo . ; sleep 1 ; done
|
while pkill -f clickhouse-serv ; do echo . ; sleep 1 ; done
|
||||||
echo all killed
|
echo all killed
|
||||||
|
|
||||||
# Change the jemalloc settings here.
|
|
||||||
# https://github.com/jemalloc/jemalloc/wiki/Getting-Started
|
|
||||||
export MALLOC_CONF="confirm_conf:true"
|
|
||||||
|
|
||||||
set -m # Spawn servers in their own process groups
|
set -m # Spawn servers in their own process groups
|
||||||
|
|
||||||
local left_server_opts=(
|
local left_server_opts=(
|
||||||
@ -147,8 +150,6 @@ function restart
|
|||||||
|
|
||||||
set +m
|
set +m
|
||||||
|
|
||||||
unset MALLOC_CONF
|
|
||||||
|
|
||||||
wait_for_server $LEFT_SERVER_PORT $left_pid
|
wait_for_server $LEFT_SERVER_PORT $left_pid
|
||||||
echo left ok
|
echo left ok
|
||||||
|
|
||||||
|
@ -13,6 +13,7 @@ RUN apt-get update --yes \
|
|||||||
sqlite3 \
|
sqlite3 \
|
||||||
unixodbc \
|
unixodbc \
|
||||||
unixodbc-dev \
|
unixodbc-dev \
|
||||||
|
odbcinst \
|
||||||
sudo \
|
sudo \
|
||||||
&& apt-get clean
|
&& apt-get clean
|
||||||
|
|
||||||
|
@ -16,8 +16,9 @@ COPY s3downloader /s3downloader
|
|||||||
ENV S3_URL="https://clickhouse-datasets.s3.amazonaws.com"
|
ENV S3_URL="https://clickhouse-datasets.s3.amazonaws.com"
|
||||||
ENV DATASETS="hits visits"
|
ENV DATASETS="hits visits"
|
||||||
|
|
||||||
RUN npm install -g azurite
|
# The following is already done in clickhouse/stateless-test
|
||||||
RUN npm install tslib
|
# RUN npm install -g azurite
|
||||||
|
# RUN npm install tslib
|
||||||
|
|
||||||
COPY run.sh /
|
COPY run.sh /
|
||||||
CMD ["/bin/bash", "/run.sh"]
|
CMD ["/bin/bash", "/run.sh"]
|
||||||
|
@ -20,6 +20,7 @@ RUN apt-get update -y \
|
|||||||
netcat-openbsd \
|
netcat-openbsd \
|
||||||
nodejs \
|
nodejs \
|
||||||
npm \
|
npm \
|
||||||
|
odbcinst \
|
||||||
openjdk-11-jre-headless \
|
openjdk-11-jre-headless \
|
||||||
openssl \
|
openssl \
|
||||||
postgresql-client \
|
postgresql-client \
|
||||||
@ -71,7 +72,7 @@ RUN arch=${TARGETARCH:-amd64} \
|
|||||||
&& chmod +x ./mc ./minio
|
&& chmod +x ./mc ./minio
|
||||||
|
|
||||||
|
|
||||||
RUN wget 'https://dlcdn.apache.org/hadoop/common/hadoop-3.3.1/hadoop-3.3.1.tar.gz' \
|
RUN wget --no-verbose 'https://dlcdn.apache.org/hadoop/common/hadoop-3.3.1/hadoop-3.3.1.tar.gz' \
|
||||||
&& tar -xvf hadoop-3.3.1.tar.gz \
|
&& tar -xvf hadoop-3.3.1.tar.gz \
|
||||||
&& rm -rf hadoop-3.3.1.tar.gz
|
&& rm -rf hadoop-3.3.1.tar.gz
|
||||||
|
|
||||||
@ -79,8 +80,8 @@ ENV MINIO_ROOT_USER="clickhouse"
|
|||||||
ENV MINIO_ROOT_PASSWORD="clickhouse"
|
ENV MINIO_ROOT_PASSWORD="clickhouse"
|
||||||
ENV EXPORT_S3_STORAGE_POLICIES=1
|
ENV EXPORT_S3_STORAGE_POLICIES=1
|
||||||
|
|
||||||
RUN npm install -g azurite
|
RUN npm install -g azurite \
|
||||||
RUN npm install tslib
|
&& npm install -g tslib
|
||||||
|
|
||||||
COPY run.sh /
|
COPY run.sh /
|
||||||
COPY setup_minio.sh /
|
COPY setup_minio.sh /
|
||||||
|
@ -18,6 +18,9 @@ ln -s /usr/share/clickhouse-test/clickhouse-test /usr/bin/clickhouse-test
|
|||||||
# shellcheck disable=SC1091
|
# shellcheck disable=SC1091
|
||||||
source /usr/share/clickhouse-test/ci/attach_gdb.lib || true # FIXME: to not break old builds, clean on 2023-09-01
|
source /usr/share/clickhouse-test/ci/attach_gdb.lib || true # FIXME: to not break old builds, clean on 2023-09-01
|
||||||
|
|
||||||
|
# shellcheck disable=SC1091
|
||||||
|
source /usr/share/clickhouse-test/ci/utils.lib || true # FIXME: to not break old builds, clean on 2023-09-01
|
||||||
|
|
||||||
# install test configs
|
# install test configs
|
||||||
/usr/share/clickhouse-test/config/install.sh
|
/usr/share/clickhouse-test/config/install.sh
|
||||||
|
|
||||||
@ -90,6 +93,22 @@ sleep 5
|
|||||||
|
|
||||||
attach_gdb_to_clickhouse || true # FIXME: to not break old builds, clean on 2023-09-01
|
attach_gdb_to_clickhouse || true # FIXME: to not break old builds, clean on 2023-09-01
|
||||||
|
|
||||||
|
function fn_exists() {
|
||||||
|
declare -F "$1" > /dev/null;
|
||||||
|
}
|
||||||
|
|
||||||
|
# FIXME: to not break old builds, clean on 2023-09-01
|
||||||
|
function try_run_with_retry() {
|
||||||
|
local total_retries="$1"
|
||||||
|
shift
|
||||||
|
|
||||||
|
if fn_exists run_with_retry; then
|
||||||
|
run_with_retry "$total_retries" "$@"
|
||||||
|
else
|
||||||
|
"$@"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
function run_tests()
|
function run_tests()
|
||||||
{
|
{
|
||||||
set -x
|
set -x
|
||||||
@ -137,8 +156,7 @@ function run_tests()
|
|||||||
|
|
||||||
ADDITIONAL_OPTIONS+=('--report-logs-stats')
|
ADDITIONAL_OPTIONS+=('--report-logs-stats')
|
||||||
|
|
||||||
clickhouse-test "00001_select_1" > /dev/null ||:
|
try_run_with_retry 10 clickhouse-client -q "insert into system.zookeeper (name, path, value) values ('auxiliary_zookeeper2', '/test/chroot/', '')"
|
||||||
clickhouse-client -q "insert into system.zookeeper (name, path, value) values ('auxiliary_zookeeper2', '/test/chroot/', '')" ||:
|
|
||||||
|
|
||||||
set +e
|
set +e
|
||||||
clickhouse-test --testname --shard --zookeeper --check-zookeeper-session --hung-check --print-time \
|
clickhouse-test --testname --shard --zookeeper --check-zookeeper-session --hung-check --print-time \
|
||||||
|
@ -18,7 +18,7 @@ RUN apt-get update && env DEBIAN_FRONTEND=noninteractive apt-get install --yes \
|
|||||||
python3-pip \
|
python3-pip \
|
||||||
shellcheck \
|
shellcheck \
|
||||||
yamllint \
|
yamllint \
|
||||||
&& pip3 install black==23.1.0 boto3 codespell==2.2.1 dohq-artifactory mypy PyGithub unidiff pylint==2.6.2 \
|
&& pip3 install black==23.1.0 boto3 codespell==2.2.1 mypy==1.3.0 PyGithub unidiff pylint==2.6.2 \
|
||||||
&& apt-get clean \
|
&& apt-get clean \
|
||||||
&& rm -rf /root/.cache/pip
|
&& rm -rf /root/.cache/pip
|
||||||
|
|
||||||
|
@ -189,6 +189,7 @@ rg -Fav -e "Code: 236. DB::Exception: Cancelled merging parts" \
|
|||||||
-e "Authentication failed" \
|
-e "Authentication failed" \
|
||||||
-e "Cannot flush" \
|
-e "Cannot flush" \
|
||||||
-e "Container already exists" \
|
-e "Container already exists" \
|
||||||
|
-e "doesn't have metadata version on disk" \
|
||||||
clickhouse-server.upgrade.log \
|
clickhouse-server.upgrade.log \
|
||||||
| grep -av -e "_repl_01111_.*Mapping for table with UUID" \
|
| grep -av -e "_repl_01111_.*Mapping for table with UUID" \
|
||||||
| zgrep -Fa "<Error>" > /test_output/upgrade_error_messages.txt \
|
| zgrep -Fa "<Error>" > /test_output/upgrade_error_messages.txt \
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
# docker build -t clickhouse/test-util .
|
# docker build -t clickhouse/test-util .
|
||||||
FROM ubuntu:20.04
|
FROM ubuntu:22.04
|
||||||
|
|
||||||
# ARG for quick switch to a given ubuntu mirror
|
# ARG for quick switch to a given ubuntu mirror
|
||||||
ARG apt_archive="http://archive.ubuntu.com"
|
ARG apt_archive="http://archive.ubuntu.com"
|
||||||
|
@ -86,7 +86,7 @@ def process_test_log(log_path, broken_tests):
|
|||||||
test_name,
|
test_name,
|
||||||
"NOT_FAILED",
|
"NOT_FAILED",
|
||||||
test_time,
|
test_time,
|
||||||
["This test passed. Update broken_tests.txt.\n"],
|
["This test passed. Update analyzer_tech_debt.txt.\n"],
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
@ -205,7 +205,7 @@ if __name__ == "__main__":
|
|||||||
parser.add_argument("--in-results-dir", default="/test_output/")
|
parser.add_argument("--in-results-dir", default="/test_output/")
|
||||||
parser.add_argument("--out-results-file", default="/test_output/test_results.tsv")
|
parser.add_argument("--out-results-file", default="/test_output/test_results.tsv")
|
||||||
parser.add_argument("--out-status-file", default="/test_output/check_status.tsv")
|
parser.add_argument("--out-status-file", default="/test_output/check_status.tsv")
|
||||||
parser.add_argument("--broken-tests", default="/broken_tests.txt")
|
parser.add_argument("--broken-tests", default="/analyzer_tech_debt.txt")
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
broken_tests = list()
|
broken_tests = list()
|
||||||
|
29
docs/_description_templates/template-data-type.md
Normal file
29
docs/_description_templates/template-data-type.md
Normal file
@ -0,0 +1,29 @@
|
|||||||
|
---
|
||||||
|
toc_priority:
|
||||||
|
toc_title:
|
||||||
|
---
|
||||||
|
|
||||||
|
# data_type_name {#data_type-name}
|
||||||
|
|
||||||
|
Description.
|
||||||
|
|
||||||
|
**Parameters** (Optional)
|
||||||
|
|
||||||
|
- `x` — Description. [Type name](relative/path/to/type/dscr.md#type).
|
||||||
|
- `y` — Description. [Type name](relative/path/to/type/dscr.md#type).
|
||||||
|
|
||||||
|
**Examples**
|
||||||
|
|
||||||
|
```sql
|
||||||
|
|
||||||
|
```
|
||||||
|
|
||||||
|
## Additional Info {#additional-info} (Optional)
|
||||||
|
|
||||||
|
The name of an additional section can be any, for example, **Usage**.
|
||||||
|
|
||||||
|
**See Also** (Optional)
|
||||||
|
|
||||||
|
- [link](#)
|
||||||
|
|
||||||
|
[Original article](https://clickhouse.com/docs/en/data-types/<data-type-name>/) <!--hide-->
|
63
docs/_description_templates/template-engine.md
Normal file
63
docs/_description_templates/template-engine.md
Normal file
@ -0,0 +1,63 @@
|
|||||||
|
# EngineName {#enginename}
|
||||||
|
|
||||||
|
- What the Database/Table engine does.
|
||||||
|
- Relations with other engines if they exist.
|
||||||
|
|
||||||
|
## Creating a Database {#creating-a-database}
|
||||||
|
``` sql
|
||||||
|
CREATE DATABASE ...
|
||||||
|
```
|
||||||
|
or
|
||||||
|
|
||||||
|
## Creating a Table {#creating-a-table}
|
||||||
|
``` sql
|
||||||
|
CREATE TABLE ...
|
||||||
|
```
|
||||||
|
|
||||||
|
**Engine Parameters**
|
||||||
|
|
||||||
|
**Query Clauses** (for Table engines only)
|
||||||
|
|
||||||
|
## Virtual columns {#virtual-columns} (for Table engines only)
|
||||||
|
|
||||||
|
List and virtual columns with description, if they exist.
|
||||||
|
|
||||||
|
## Data Types Support {#data_types-support} (for Database engines only)
|
||||||
|
|
||||||
|
| EngineName | ClickHouse |
|
||||||
|
|-----------------------|------------------------------------|
|
||||||
|
| NativeDataTypeName | [ClickHouseDataTypeName](link#) |
|
||||||
|
|
||||||
|
|
||||||
|
## Specifics and recommendations {#specifics-and-recommendations}
|
||||||
|
|
||||||
|
Algorithms
|
||||||
|
Specifics of read and write processes
|
||||||
|
Examples of tasks
|
||||||
|
Recommendations for usage
|
||||||
|
Specifics of data storage
|
||||||
|
|
||||||
|
## Usage Example {#usage-example}
|
||||||
|
|
||||||
|
The example must show usage and use cases. The following text contains the recommended parts of this section.
|
||||||
|
|
||||||
|
Input table:
|
||||||
|
|
||||||
|
``` text
|
||||||
|
```
|
||||||
|
|
||||||
|
Query:
|
||||||
|
|
||||||
|
``` sql
|
||||||
|
```
|
||||||
|
|
||||||
|
Result:
|
||||||
|
|
||||||
|
``` text
|
||||||
|
```
|
||||||
|
|
||||||
|
Follow up with any text to clarify the example.
|
||||||
|
|
||||||
|
**See Also**
|
||||||
|
|
||||||
|
- [link](#)
|
51
docs/_description_templates/template-function.md
Normal file
51
docs/_description_templates/template-function.md
Normal file
@ -0,0 +1,51 @@
|
|||||||
|
## functionName {#functionname-in-lower-case}
|
||||||
|
|
||||||
|
Short description.
|
||||||
|
|
||||||
|
**Syntax** (without SELECT)
|
||||||
|
|
||||||
|
``` sql
|
||||||
|
<function syntax>
|
||||||
|
```
|
||||||
|
|
||||||
|
Alias: `<alias name>`. (Optional)
|
||||||
|
|
||||||
|
More text (Optional).
|
||||||
|
|
||||||
|
**Arguments** (Optional)
|
||||||
|
|
||||||
|
- `x` — Description. Optional (only for optional arguments). Possible values: <values list>. Default value: <value>. [Type name](relative/path/to/type/dscr.md#type).
|
||||||
|
- `y` — Description. Optional (only for optional arguments). Possible values: <values list>.Default value: <value>. [Type name](relative/path/to/type/dscr.md#type).
|
||||||
|
|
||||||
|
**Parameters** (Optional, only for parametric aggregate functions)
|
||||||
|
|
||||||
|
- `z` — Description. Optional (only for optional parameters). Possible values: <values list>. Default value: <value>. [Type name](relative/path/to/type/dscr.md#type).
|
||||||
|
|
||||||
|
**Returned value(s)**
|
||||||
|
|
||||||
|
- Returned values list.
|
||||||
|
|
||||||
|
Type: [Type name](relative/path/to/type/dscr.md#type).
|
||||||
|
|
||||||
|
**Example**
|
||||||
|
|
||||||
|
The example must show usage and/or a use cases. The following text contains recommended parts of an example.
|
||||||
|
|
||||||
|
Input table (Optional):
|
||||||
|
|
||||||
|
``` text
|
||||||
|
```
|
||||||
|
|
||||||
|
Query:
|
||||||
|
|
||||||
|
``` sql
|
||||||
|
```
|
||||||
|
|
||||||
|
Result:
|
||||||
|
|
||||||
|
``` text
|
||||||
|
```
|
||||||
|
|
||||||
|
**See Also** (Optional)
|
||||||
|
|
||||||
|
- [link](#)
|
33
docs/_description_templates/template-server-setting.md
Normal file
33
docs/_description_templates/template-server-setting.md
Normal file
@ -0,0 +1,33 @@
|
|||||||
|
## server_setting_name {#server_setting_name}
|
||||||
|
|
||||||
|
Description.
|
||||||
|
|
||||||
|
Describe what is configured in this section of settings.
|
||||||
|
|
||||||
|
Possible value: ...
|
||||||
|
|
||||||
|
Default value: ...
|
||||||
|
|
||||||
|
**Settings** (Optional)
|
||||||
|
|
||||||
|
If the section contains several settings, list them here. Specify possible values and default values:
|
||||||
|
|
||||||
|
- setting_1 — Description.
|
||||||
|
- setting_2 — Description.
|
||||||
|
|
||||||
|
**Example**
|
||||||
|
|
||||||
|
```xml
|
||||||
|
<server_setting_name>
|
||||||
|
<setting_1> ... </setting_1>
|
||||||
|
<setting_2> ... </setting_2>
|
||||||
|
</server_setting_name>
|
||||||
|
```
|
||||||
|
|
||||||
|
**Additional Info** (Optional)
|
||||||
|
|
||||||
|
The name of an additional section can be any, for example, **Usage**.
|
||||||
|
|
||||||
|
**See Also** (Optional)
|
||||||
|
|
||||||
|
- [link](#)
|
27
docs/_description_templates/template-setting.md
Normal file
27
docs/_description_templates/template-setting.md
Normal file
@ -0,0 +1,27 @@
|
|||||||
|
## setting_name {#setting_name}
|
||||||
|
|
||||||
|
Description.
|
||||||
|
|
||||||
|
For the switch setting, use the typical phrase: “Enables or disables something …”.
|
||||||
|
|
||||||
|
Possible values:
|
||||||
|
|
||||||
|
*For switcher setting:*
|
||||||
|
|
||||||
|
- 0 — Disabled.
|
||||||
|
- 1 — Enabled.
|
||||||
|
|
||||||
|
*For another setting (typical phrases):*
|
||||||
|
|
||||||
|
- Positive integer.
|
||||||
|
- 0 — Disabled or unlimited or something else.
|
||||||
|
|
||||||
|
Default value: `value`.
|
||||||
|
|
||||||
|
**Additional Info** (Optional)
|
||||||
|
|
||||||
|
The name of an additional section can be any, for example, **Usage**.
|
||||||
|
|
||||||
|
**See Also** (Optional)
|
||||||
|
|
||||||
|
- [link](#)
|
24
docs/_description_templates/template-statement.md
Normal file
24
docs/_description_templates/template-statement.md
Normal file
@ -0,0 +1,24 @@
|
|||||||
|
# Statement name (for example, SHOW USER) {#statement-name-in-lower-case}
|
||||||
|
|
||||||
|
Brief description of what the statement does.
|
||||||
|
|
||||||
|
**Syntax**
|
||||||
|
|
||||||
|
```sql
|
||||||
|
Syntax of the statement.
|
||||||
|
```
|
||||||
|
|
||||||
|
## Other necessary sections of the description (Optional) {#anchor}
|
||||||
|
|
||||||
|
Examples of descriptions with a complicated structure:
|
||||||
|
|
||||||
|
- https://clickhouse.com/docs/en/sql-reference/statements/grant/
|
||||||
|
- https://clickhouse.com/docs/en/sql-reference/statements/revoke/
|
||||||
|
- https://clickhouse.com/docs/en/sql-reference/statements/select/join/
|
||||||
|
|
||||||
|
|
||||||
|
**See Also** (Optional)
|
||||||
|
|
||||||
|
Links to related topics as a list.
|
||||||
|
|
||||||
|
- [link](#)
|
25
docs/_description_templates/template-system-table.md
Normal file
25
docs/_description_templates/template-system-table.md
Normal file
@ -0,0 +1,25 @@
|
|||||||
|
# system.table_name {#system-tables_table-name}
|
||||||
|
|
||||||
|
Description.
|
||||||
|
|
||||||
|
Columns:
|
||||||
|
|
||||||
|
- `column_name` ([data_type_name](path/to/data_type.md)) — Description.
|
||||||
|
|
||||||
|
**Example**
|
||||||
|
|
||||||
|
Query:
|
||||||
|
|
||||||
|
``` sql
|
||||||
|
SELECT * FROM system.table_name
|
||||||
|
```
|
||||||
|
|
||||||
|
Result:
|
||||||
|
|
||||||
|
``` text
|
||||||
|
Some output. It shouldn't be too long.
|
||||||
|
```
|
||||||
|
|
||||||
|
**See Also**
|
||||||
|
|
||||||
|
- [Article name](path/to/article_name.md) — Some words about referenced information.
|
19
docs/changelogs/v23.3.6.7-lts.md
Normal file
19
docs/changelogs/v23.3.6.7-lts.md
Normal file
@ -0,0 +1,19 @@
|
|||||||
|
---
|
||||||
|
sidebar_position: 1
|
||||||
|
sidebar_label: 2023
|
||||||
|
---
|
||||||
|
|
||||||
|
# 2023 Changelog
|
||||||
|
|
||||||
|
### ClickHouse release v23.3.6.7-lts (7e3f0a271b7) FIXME as compared to v23.3.5.9-lts (f5fbc2fd2b3)
|
||||||
|
|
||||||
|
#### Improvement
|
||||||
|
* Backported in [#51240](https://github.com/ClickHouse/ClickHouse/issues/51240): Improve the progress bar for file/s3/hdfs/url table functions by using chunk size from source data and using incremental total size counting in each thread. Fix the progress bar for *Cluster functions. This closes [#47250](https://github.com/ClickHouse/ClickHouse/issues/47250). [#51088](https://github.com/ClickHouse/ClickHouse/pull/51088) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||||
|
|
||||||
|
#### Build/Testing/Packaging Improvement
|
||||||
|
* Backported in [#51529](https://github.com/ClickHouse/ClickHouse/issues/51529): Split huge `RUN` in Dockerfile into smaller conditional. Install the necessary tools on demand in the same `RUN` layer, and remove them after that. Upgrade the OS only once at the beginning. Use a modern way to check the signed repository. Downgrade the base repo to ubuntu:20.04 to address the issues on older docker versions. Upgrade golang version to address golang vulnerabilities. [#51504](https://github.com/ClickHouse/ClickHouse/pull/51504) ([Mikhail f. Shiryaev](https://github.com/Felixoid)).
|
||||||
|
|
||||||
|
#### Bug Fix (user-visible misbehavior in an official stable release)
|
||||||
|
|
||||||
|
* Fix type of LDAP server params hash in cache entry [#50865](https://github.com/ClickHouse/ClickHouse/pull/50865) ([Julian Maicher](https://github.com/jmaicher)).
|
||||||
|
|
16
docs/changelogs/v23.3.7.5-lts.md
Normal file
16
docs/changelogs/v23.3.7.5-lts.md
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
---
|
||||||
|
sidebar_position: 1
|
||||||
|
sidebar_label: 2023
|
||||||
|
---
|
||||||
|
|
||||||
|
# 2023 Changelog
|
||||||
|
|
||||||
|
### ClickHouse release v23.3.7.5-lts (bc683c11c92) FIXME as compared to v23.3.6.7-lts (7e3f0a271b7)
|
||||||
|
|
||||||
|
#### Build/Testing/Packaging Improvement
|
||||||
|
* Backported in [#51568](https://github.com/ClickHouse/ClickHouse/issues/51568): This a follow-up for [#51504](https://github.com/ClickHouse/ClickHouse/issues/51504), the cleanup was lost during refactoring. [#51564](https://github.com/ClickHouse/ClickHouse/pull/51564) ([Mikhail f. Shiryaev](https://github.com/Felixoid)).
|
||||||
|
|
||||||
|
#### Bug Fix (user-visible misbehavior in an official stable release)
|
||||||
|
|
||||||
|
* Fix fuzzer failure in ActionsDAG [#51301](https://github.com/ClickHouse/ClickHouse/pull/51301) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
|
27
docs/changelogs/v23.4.5.22-stable.md
Normal file
27
docs/changelogs/v23.4.5.22-stable.md
Normal file
@ -0,0 +1,27 @@
|
|||||||
|
---
|
||||||
|
sidebar_position: 1
|
||||||
|
sidebar_label: 2023
|
||||||
|
---
|
||||||
|
|
||||||
|
# 2023 Changelog
|
||||||
|
|
||||||
|
### ClickHouse release v23.4.5.22-stable (0ced5d6a8da) FIXME as compared to v23.4.4.16-stable (747ba4fc6a0)
|
||||||
|
|
||||||
|
#### Build/Testing/Packaging Improvement
|
||||||
|
* Backported in [#51530](https://github.com/ClickHouse/ClickHouse/issues/51530): Split huge `RUN` in Dockerfile into smaller conditional. Install the necessary tools on demand in the same `RUN` layer, and remove them after that. Upgrade the OS only once at the beginning. Use a modern way to check the signed repository. Downgrade the base repo to ubuntu:20.04 to address the issues on older docker versions. Upgrade golang version to address golang vulnerabilities. [#51504](https://github.com/ClickHouse/ClickHouse/pull/51504) ([Mikhail f. Shiryaev](https://github.com/Felixoid)).
|
||||||
|
* Backported in [#51570](https://github.com/ClickHouse/ClickHouse/issues/51570): This a follow-up for [#51504](https://github.com/ClickHouse/ClickHouse/issues/51504), the cleanup was lost during refactoring. [#51564](https://github.com/ClickHouse/ClickHouse/pull/51564) ([Mikhail f. Shiryaev](https://github.com/Felixoid)).
|
||||||
|
|
||||||
|
#### Bug Fix (user-visible misbehavior in an official stable release)
|
||||||
|
|
||||||
|
* Fix broken index analysis when binary operator contains a null constant argument [#50177](https://github.com/ClickHouse/ClickHouse/pull/50177) ([Amos Bird](https://github.com/amosbird)).
|
||||||
|
* Fix reconnecting of HTTPS session when target host IP was changed [#50240](https://github.com/ClickHouse/ClickHouse/pull/50240) ([Aleksei Filatov](https://github.com/aalexfvk)).
|
||||||
|
* Fix incorrect constant folding [#50536](https://github.com/ClickHouse/ClickHouse/pull/50536) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* Fix type of LDAP server params hash in cache entry [#50865](https://github.com/ClickHouse/ClickHouse/pull/50865) ([Julian Maicher](https://github.com/jmaicher)).
|
||||||
|
* Fallback to parsing big integer from String instead of exception in Parquet format [#50873](https://github.com/ClickHouse/ClickHouse/pull/50873) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||||
|
* Do not apply projection if read-in-order was enabled. [#50923](https://github.com/ClickHouse/ClickHouse/pull/50923) ([Nikolai Kochetov](https://github.com/KochetovNicolai)).
|
||||||
|
* Fix fuzzer failure in ActionsDAG [#51301](https://github.com/ClickHouse/ClickHouse/pull/51301) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
|
||||||
|
#### NOT FOR CHANGELOG / INSIGNIFICANT
|
||||||
|
|
||||||
|
* Increase max array size in group bitmap [#50620](https://github.com/ClickHouse/ClickHouse/pull/50620) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||||
|
|
31
docs/changelogs/v23.5.4.25-stable.md
Normal file
31
docs/changelogs/v23.5.4.25-stable.md
Normal file
@ -0,0 +1,31 @@
|
|||||||
|
---
|
||||||
|
sidebar_position: 1
|
||||||
|
sidebar_label: 2023
|
||||||
|
---
|
||||||
|
|
||||||
|
# 2023 Changelog
|
||||||
|
|
||||||
|
### ClickHouse release v23.5.4.25-stable (190f962abcf) FIXME as compared to v23.5.3.24-stable (76f54616d3b)
|
||||||
|
|
||||||
|
#### Improvement
|
||||||
|
* Backported in [#51235](https://github.com/ClickHouse/ClickHouse/issues/51235): Improve the progress bar for file/s3/hdfs/url table functions by using chunk size from source data and using incremental total size counting in each thread. Fix the progress bar for *Cluster functions. This closes [#47250](https://github.com/ClickHouse/ClickHouse/issues/47250). [#51088](https://github.com/ClickHouse/ClickHouse/pull/51088) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||||
|
* Backported in [#51255](https://github.com/ClickHouse/ClickHouse/issues/51255): Disable cache setting `do_not_evict_index_and_mark_files` (Was enabled in `23.5`). [#51222](https://github.com/ClickHouse/ClickHouse/pull/51222) ([Kseniia Sumarokova](https://github.com/kssenii)).
|
||||||
|
|
||||||
|
#### Build/Testing/Packaging Improvement
|
||||||
|
* Backported in [#51531](https://github.com/ClickHouse/ClickHouse/issues/51531): Split huge `RUN` in Dockerfile into smaller conditional. Install the necessary tools on demand in the same `RUN` layer, and remove them after that. Upgrade the OS only once at the beginning. Use a modern way to check the signed repository. Downgrade the base repo to ubuntu:20.04 to address the issues on older docker versions. Upgrade golang version to address golang vulnerabilities. [#51504](https://github.com/ClickHouse/ClickHouse/pull/51504) ([Mikhail f. Shiryaev](https://github.com/Felixoid)).
|
||||||
|
* Backported in [#51572](https://github.com/ClickHouse/ClickHouse/issues/51572): This a follow-up for [#51504](https://github.com/ClickHouse/ClickHouse/issues/51504), the cleanup was lost during refactoring. [#51564](https://github.com/ClickHouse/ClickHouse/pull/51564) ([Mikhail f. Shiryaev](https://github.com/Felixoid)).
|
||||||
|
|
||||||
|
#### Bug Fix (user-visible misbehavior in an official stable release)
|
||||||
|
|
||||||
|
* Query Cache: Try to fix bad cast from ColumnConst to ColumnVector<char8_t> [#50704](https://github.com/ClickHouse/ClickHouse/pull/50704) ([Robert Schulze](https://github.com/rschu1ze)).
|
||||||
|
* Fix type of LDAP server params hash in cache entry [#50865](https://github.com/ClickHouse/ClickHouse/pull/50865) ([Julian Maicher](https://github.com/jmaicher)).
|
||||||
|
* Fallback to parsing big integer from String instead of exception in Parquet format [#50873](https://github.com/ClickHouse/ClickHouse/pull/50873) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||||
|
* Do not apply projection if read-in-order was enabled. [#50923](https://github.com/ClickHouse/ClickHouse/pull/50923) ([Nikolai Kochetov](https://github.com/KochetovNicolai)).
|
||||||
|
* Fix race azure blob storage iterator [#50936](https://github.com/ClickHouse/ClickHouse/pull/50936) ([SmitaRKulkarni](https://github.com/SmitaRKulkarni)).
|
||||||
|
* Fix ineffective query cache for SELECTs with subqueries [#51132](https://github.com/ClickHouse/ClickHouse/pull/51132) ([Robert Schulze](https://github.com/rschu1ze)).
|
||||||
|
* Fix fuzzer failure in ActionsDAG [#51301](https://github.com/ClickHouse/ClickHouse/pull/51301) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
|
||||||
|
#### NOT FOR CHANGELOG / INSIGNIFICANT
|
||||||
|
|
||||||
|
* Fix ParallelReadBuffer seek [#50820](https://github.com/ClickHouse/ClickHouse/pull/50820) ([Michael Kolupaev](https://github.com/al13n321)).
|
||||||
|
|
301
docs/changelogs/v23.6.1.1524-stable.md
Normal file
301
docs/changelogs/v23.6.1.1524-stable.md
Normal file
@ -0,0 +1,301 @@
|
|||||||
|
---
|
||||||
|
sidebar_position: 1
|
||||||
|
sidebar_label: 2023
|
||||||
|
---
|
||||||
|
|
||||||
|
# 2023 Changelog
|
||||||
|
|
||||||
|
### ClickHouse release v23.6.1.1524-stable (d1c7e13d088) FIXME as compared to v23.5.1.3174-stable (2fec796e73e)
|
||||||
|
|
||||||
|
#### Backward Incompatible Change
|
||||||
|
* Delete feature `do_not_evict_index_and_mark_files` in the fs cache. This feature was only making things worse. [#51253](https://github.com/ClickHouse/ClickHouse/pull/51253) ([Kseniia Sumarokova](https://github.com/kssenii)).
|
||||||
|
* Remove ALTER support for experimental LIVE VIEW. [#51287](https://github.com/ClickHouse/ClickHouse/pull/51287) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
|
||||||
|
#### New Feature
|
||||||
|
* Add setting `session_timezone`, it is used as default timezone for session when not explicitly specified. [#44149](https://github.com/ClickHouse/ClickHouse/pull/44149) ([Andrey Zvonov](https://github.com/zvonand)).
|
||||||
|
* Added overlay database engine and representation of a directory as a database This commit adds 4 databases: 1. DatabaseOverlay: Implements the IDatabase interface. Allow to combine multiple databases, such as FileSystem and Memory. Internally, it stores a vector with other database pointers and proxies requests to them in turn until it is executed successfully. 2. DatabaseFilesystem: allows to read-only interact with files stored on the file system. Internally, it uses TableFunctionFile to implicitly load file when a user requests the table. Result of TableFunctionFile call cached inside to provide quick access. 3. DatabaseS3: allows to read-only interact with s3 storage. It uses TableFunctionS3 to implicitly load table from s3 4. DatabaseHDFS: allows to interact with hdfs storage. It uses TableFunctionHDFS to implicitly load table from hdfs. [#48821](https://github.com/ClickHouse/ClickHouse/pull/48821) ([alekseygolub](https://github.com/alekseygolub)).
|
||||||
|
* Add a new setting named `use_mysql_types_in_show_columns` to alter the `SHOW COLUMNS` SQL statement to display MySQL equivalent types when a client is connected via the MySQL compatibility port. [#49577](https://github.com/ClickHouse/ClickHouse/pull/49577) ([Thomas Panetti](https://github.com/tpanetti)).
|
||||||
|
* Added option `--rename_files_after_processing <pattern>`. This closes [#34207](https://github.com/ClickHouse/ClickHouse/issues/34207). [#49626](https://github.com/ClickHouse/ClickHouse/pull/49626) ([alekseygolub](https://github.com/alekseygolub)).
|
||||||
|
* 1. Add `TableFunctionRedis` 3. Add table engine Redis 4. Add `RedisCommon` which contains Redis related tools and types 5. Support `equals` and `in` filter push down into Redis. [#50150](https://github.com/ClickHouse/ClickHouse/pull/50150) ([JackyWoo](https://github.com/JackyWoo)).
|
||||||
|
* Allow to skip empty files in file/s3/url/hdfs table functions using settings `s3_skip_empty_files`, `hdfs_skip_empty_files`, `engine_file_skip_empty_files`, `engine_url_skip_empty_files`. [#50364](https://github.com/ClickHouse/ClickHouse/pull/50364) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||||
|
* Clickhouse-client can now be called with a connection instead of "--host", "--port", "--user" etc. [#50689](https://github.com/ClickHouse/ClickHouse/pull/50689) ([Alexey Gerasimchuck](https://github.com/Demilivor)).
|
||||||
|
* Codec DEFLATE_QPL is now controlled via server setting "enable_deflate_qpl_codec" (default: false) instead of setting "allow_experimental_codecs". This marks QPL_DEFLATE non-experimental. [#50775](https://github.com/ClickHouse/ClickHouse/pull/50775) ([Robert Schulze](https://github.com/rschu1ze)).
|
||||||
|
|
||||||
|
#### Performance Improvement
|
||||||
|
* Improve performance with enabled QueryProfiler using thread-local timer_id instead of global object. [#48778](https://github.com/ClickHouse/ClickHouse/pull/48778) ([Jiebin Sun](https://github.com/jiebinn)).
|
||||||
|
* Rewrite CapnProto input/output format to improve its performance. Map column names and CapnProto fields case insensitive, fix reading/writing of nested structure fields. [#49752](https://github.com/ClickHouse/ClickHouse/pull/49752) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||||
|
* Optimize parquet write performance for parallel threads. [#50102](https://github.com/ClickHouse/ClickHouse/pull/50102) ([Hongbin Ma](https://github.com/binmahone)).
|
||||||
|
* ### Documentation entry for user-facing changes Disable `parallelize_output_from_storages` for processing MATERIALIZED VIEWs and storages with one block only. [#50214](https://github.com/ClickHouse/ClickHouse/pull/50214) ([Azat Khuzhin](https://github.com/azat)).
|
||||||
|
* Merge PR https://github.com/ClickHouse/ClickHouse/pull/46558 (Avoid processing already sorted data). Avoid block permutation during sort if the block is already sorted. [#50697](https://github.com/ClickHouse/ClickHouse/pull/50697) ([Maksim Kita](https://github.com/kitaisreal)).
|
||||||
|
* In the earlier PRs ([#50062](https://github.com/ClickHouse/ClickHouse/issues/50062), [#50307](https://github.com/ClickHouse/ClickHouse/issues/50307)), we used to propose an optimization pattern which transforms the predicates with toYear/toYYYYMM into its equivalent but converter-free form. This transformation could bring significant performance impact to some workloads, such as SSB. However, as issue [#50628](https://github.com/ClickHouse/ClickHouse/issues/50628) indicated, these two PRs would introduce some issues which may results in incomplete query results, and as a result, they were reverted by [#50629](https://github.com/ClickHouse/ClickHouse/issues/50629). [#50951](https://github.com/ClickHouse/ClickHouse/pull/50951) ([Zhiguo Zhou](https://github.com/ZhiguoZh)).
|
||||||
|
* Make multiple list requests to ZooKeeper in parallel to speed up reading from system.zookeeper table. [#51042](https://github.com/ClickHouse/ClickHouse/pull/51042) ([Alexander Gololobov](https://github.com/davenger)).
|
||||||
|
* Speedup initialization of DateTime lookup tables for time zones. This should reduce startup/connect time of clickhouse client especially in debug build as it is rather heavy. [#51347](https://github.com/ClickHouse/ClickHouse/pull/51347) ([Alexander Gololobov](https://github.com/davenger)).
|
||||||
|
|
||||||
|
#### Improvement
|
||||||
|
* Allow to cast IPv6 to IPv4 address for CIDR ::ffff:0:0/96 (IPv4-mapped addresses). [#49759](https://github.com/ClickHouse/ClickHouse/pull/49759) ([Yakov Olkhovskiy](https://github.com/yakov-olkhovskiy)).
|
||||||
|
* Update MongoDB protocol to support MongoDB 5.1 version and newer. Support for the versions with the old protocol (<3.6) is preserved. Closes [#45621](https://github.com/ClickHouse/ClickHouse/issues/45621), [#49879](https://github.com/ClickHouse/ClickHouse/issues/49879). [#50061](https://github.com/ClickHouse/ClickHouse/pull/50061) ([Nikolay Degterinsky](https://github.com/evillique)).
|
||||||
|
* Improved scheduling of merge selecting and cleanup tasks in `ReplicatedMergeTree`. The tasks will not be executed too frequently when there's nothing to merge or cleanup. Added settings `max_merge_selecting_sleep_ms`, `merge_selecting_sleep_slowdown_factor`, `max_cleanup_delay_period` and `cleanup_thread_preferred_points_per_iteration`. It should close [#31919](https://github.com/ClickHouse/ClickHouse/issues/31919). [#50107](https://github.com/ClickHouse/ClickHouse/pull/50107) ([Alexander Tokmakov](https://github.com/tavplubix)).
|
||||||
|
* Support parallel replicas with the analyzer. [#50441](https://github.com/ClickHouse/ClickHouse/pull/50441) ([Raúl Marín](https://github.com/Algunenano)).
|
||||||
|
* Add setting `input_format_max_bytes_to_read_for_schema_inference` to limit the number of bytes to read in schema inference. Closes [#50577](https://github.com/ClickHouse/ClickHouse/issues/50577). [#50592](https://github.com/ClickHouse/ClickHouse/pull/50592) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||||
|
* Respect setting input_format_as_default in schema inference. [#50602](https://github.com/ClickHouse/ClickHouse/pull/50602) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||||
|
* Make filter push down through cross join. [#50605](https://github.com/ClickHouse/ClickHouse/pull/50605) ([Han Fei](https://github.com/hanfei1991)).
|
||||||
|
* Actual lz4 version is used now. [#50621](https://github.com/ClickHouse/ClickHouse/pull/50621) ([Nikita Taranov](https://github.com/nickitat)).
|
||||||
|
* Allow to skip trailing empty lines in CSV/TSV/CustomSeparated formats via settings `input_format_csv_skip_trailing_empty_lines`, `input_format_tsv_skip_trailing_empty_lines` and `input_format_custom_skip_trailing_empty_lines` (disabled by default). Closes [#49315](https://github.com/ClickHouse/ClickHouse/issues/49315). [#50635](https://github.com/ClickHouse/ClickHouse/pull/50635) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||||
|
* Functions "toDateOrDefault|OrNull()" and "accuateCast[OrDefault|OrNull]()" now correctly parse numeric arguments. [#50709](https://github.com/ClickHouse/ClickHouse/pull/50709) ([Dmitry Kardymon](https://github.com/kardymonds)).
|
||||||
|
* Currently, the csv input format can not parse the csv file with whitespace or \t field delimiter, and these delimiters is supported in spark. [#50712](https://github.com/ClickHouse/ClickHouse/pull/50712) ([KevinyhZou](https://github.com/KevinyhZou)).
|
||||||
|
* Settings `number_of_mutations_to_delay` and `number_of_mutations_to_throw` are enabled by default now with values 500 and 1000 respectively. [#50726](https://github.com/ClickHouse/ClickHouse/pull/50726) ([Anton Popov](https://github.com/CurtizJ)).
|
||||||
|
* Keeper improvement: add feature flags for Keeper API. Each feature flag can be disabled or enabled by defining it under `keeper_server.feature_flags` config. E.g. to enable `CheckNotExists` request, `keeper_server.feature_flags.check_not_exists` should be set to `1` on Keeper. [#50796](https://github.com/ClickHouse/ClickHouse/pull/50796) ([Antonio Andelic](https://github.com/antonio2368)).
|
||||||
|
* The dashboard correctly shows missing values. This closes [#50831](https://github.com/ClickHouse/ClickHouse/issues/50831). [#50832](https://github.com/ClickHouse/ClickHouse/pull/50832) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* CGroups metrics related to CPU are replaced with one metric, `CGroupMaxCPU` for better usability. The `Normalized` CPU usage metrics will be normalized to CGroups limits instead of the total number of CPUs when they are set. This closes [#50836](https://github.com/ClickHouse/ClickHouse/issues/50836). [#50835](https://github.com/ClickHouse/ClickHouse/pull/50835) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* Relax the thresholds for "too many parts" to be more modern. Return the backpressure during long-running insert queries. [#50856](https://github.com/ClickHouse/ClickHouse/pull/50856) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* Added the possibility to use date and time arguments in syslog timestamp format in functions parseDateTimeBestEffort*() and parseDateTime64BestEffort*(). [#50925](https://github.com/ClickHouse/ClickHouse/pull/50925) ([Victor Krasnov](https://github.com/sirvickr)).
|
||||||
|
* Suggest using `APPEND` or `TRUNCATE` for `INTO OUTFILE` when file exists. [#50950](https://github.com/ClickHouse/ClickHouse/pull/50950) ([alekar](https://github.com/alekar)).
|
||||||
|
* Add embedded keeper-client to standalone keeper binary. [#50964](https://github.com/ClickHouse/ClickHouse/pull/50964) ([pufit](https://github.com/pufit)).
|
||||||
|
* Command line parameter "--password" in clickhouse-client can now be specified only once. [#50966](https://github.com/ClickHouse/ClickHouse/pull/50966) ([Alexey Gerasimchuck](https://github.com/Demilivor)).
|
||||||
|
* Fix data lakes slowness because of synchronous head requests. (Related to Iceberg/Deltalake/Hudi being slow with a lot of files). [#50976](https://github.com/ClickHouse/ClickHouse/pull/50976) ([Kseniia Sumarokova](https://github.com/kssenii)).
|
||||||
|
* Use `hash_of_all_files` from `system.parts` to check identity of parts during on-cluster backups. [#50997](https://github.com/ClickHouse/ClickHouse/pull/50997) ([Vitaly Baranov](https://github.com/vitlibar)).
|
||||||
|
* The system table zookeeper_connection connected_time identifies the time when the connection is established (standard format), and session_uptime_elapsed_seconds is added, which labels the duration of the established connection session (in seconds). [#51026](https://github.com/ClickHouse/ClickHouse/pull/51026) ([郭小龙](https://github.com/guoxiaolongzte)).
|
||||||
|
* Show halves of checksums in `system.parts`, `system.projection_parts` and in error messages in the correct order. [#51040](https://github.com/ClickHouse/ClickHouse/pull/51040) ([Vitaly Baranov](https://github.com/vitlibar)).
|
||||||
|
* Do not replicate `ALTER PARTITION` queries and mutations through `Replicated` database if it has only one shard and the underlying table is `ReplicatedMergeTree`. [#51049](https://github.com/ClickHouse/ClickHouse/pull/51049) ([Alexander Tokmakov](https://github.com/tavplubix)).
|
||||||
|
* Improve the progress bar for file/s3/hdfs/url table functions by using chunk size from source data and using incremental total size counting in each thread. Fix the progress bar for *Cluster functions. This closes [#47250](https://github.com/ClickHouse/ClickHouse/issues/47250). [#51088](https://github.com/ClickHouse/ClickHouse/pull/51088) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||||
|
* Add total_bytes_to_read to Progress packet in TCP protocol for better Progress bar. [#51158](https://github.com/ClickHouse/ClickHouse/pull/51158) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||||
|
* Better checking of data parts on disks with filesystem cache. [#51164](https://github.com/ClickHouse/ClickHouse/pull/51164) ([Anton Popov](https://github.com/CurtizJ)).
|
||||||
|
* Disable cache setting `do_not_evict_index_and_mark_files` (Was enabled in `23.5`). [#51222](https://github.com/ClickHouse/ClickHouse/pull/51222) ([Kseniia Sumarokova](https://github.com/kssenii)).
|
||||||
|
* Fix sometimes not correct current_elements_num in fs cache. [#51242](https://github.com/ClickHouse/ClickHouse/pull/51242) ([Kseniia Sumarokova](https://github.com/kssenii)).
|
||||||
|
* Add random sleep before merges/mutations execution to split load more evenly between replicas in case of zero-copy replication. [#51282](https://github.com/ClickHouse/ClickHouse/pull/51282) ([alesapin](https://github.com/alesapin)).
|
||||||
|
* The function `transform` as well as `CASE` with value matching started to support all data types. This closes [#29730](https://github.com/ClickHouse/ClickHouse/issues/29730). This closes [#32387](https://github.com/ClickHouse/ClickHouse/issues/32387). This closes [#50827](https://github.com/ClickHouse/ClickHouse/issues/50827). This closes [#31336](https://github.com/ClickHouse/ClickHouse/issues/31336). This closes [#40493](https://github.com/ClickHouse/ClickHouse/issues/40493). [#51351](https://github.com/ClickHouse/ClickHouse/pull/51351) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* We have found a bug in LLVM that makes the usage of `compile_expressions` setting unsafe. It is disabled by default. [#51368](https://github.com/ClickHouse/ClickHouse/pull/51368) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* Issue [#50220](https://github.com/ClickHouse/ClickHouse/issues/50220) reports a core in `grace_hash` join. We finally reproduce the exception on local, and found that the issue is related to the failure of creating temporary file. Somehow this is triggered in https://github.com/ClickHouse/ClickHouse/pull/49816 https://github.com/ClickHouse/ClickHouse/pull/49483. [#51382](https://github.com/ClickHouse/ClickHouse/pull/51382) ([lgbo](https://github.com/lgbo-ustc)).
|
||||||
|
|
||||||
|
#### Build/Testing/Packaging Improvement
|
||||||
|
* Update contrib/re2 to 2023-06-02. [#50949](https://github.com/ClickHouse/ClickHouse/pull/50949) ([Yuriy Chernyshov](https://github.com/georgthegreat)).
|
||||||
|
* ClickHouse server will print the list of changed settings on fatal errors. This closes [#51137](https://github.com/ClickHouse/ClickHouse/issues/51137). [#51138](https://github.com/ClickHouse/ClickHouse/pull/51138) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* In https://github.com/ClickHouse/ClickHouse/pull/51143 the fasstests failed, but the status wasn't created because of the chown `file not found`. This addresses it. Decrease the default values for `http-max-field-value-size` and `http_max_field_name_size` to 128K. [#51163](https://github.com/ClickHouse/ClickHouse/pull/51163) ([Mikhail f. Shiryaev](https://github.com/Felixoid)).
|
||||||
|
* Update Ubuntu version in docker containers. [#51180](https://github.com/ClickHouse/ClickHouse/pull/51180) ([Mikhail f. Shiryaev](https://github.com/Felixoid)).
|
||||||
|
* Allow building ClickHouse with clang-17. [#51300](https://github.com/ClickHouse/ClickHouse/pull/51300) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* [SQLancer](https://github.com/sqlancer/sqlancer) check is considered stable as bugs that were triggered by it are fixed. Now failures of SQLancer check will be reported as failed check status. [#51340](https://github.com/ClickHouse/ClickHouse/pull/51340) ([Ilya Yatsishin](https://github.com/qoega)).
|
||||||
|
* Making our CI even better. [#51494](https://github.com/ClickHouse/ClickHouse/pull/51494) ([Nikita Mikhaylov](https://github.com/nikitamikhaylov)).
|
||||||
|
* Split huge `RUN` in Dockerfile into smaller conditional. Install the necessary tools on demand in the same `RUN` layer, and remove them after that. Upgrade the OS only once at the beginning. Use a modern way to check the signed repository. Downgrade the base repo to ubuntu:20.04 to address the issues on older docker versions. Upgrade golang version to address golang vulnerabilities. [#51504](https://github.com/ClickHouse/ClickHouse/pull/51504) ([Mikhail f. Shiryaev](https://github.com/Felixoid)).
|
||||||
|
* This a follow-up for [#51504](https://github.com/ClickHouse/ClickHouse/issues/51504), the cleanup was lost during refactoring. [#51564](https://github.com/ClickHouse/ClickHouse/pull/51564) ([Mikhail f. Shiryaev](https://github.com/Felixoid)).
|
||||||
|
|
||||||
|
#### Bug Fix (user-visible misbehavior in an official stable release)
|
||||||
|
|
||||||
|
* Report loading status for executable dictionaries correctly [#48775](https://github.com/ClickHouse/ClickHouse/pull/48775) ([Anton Kozlov](https://github.com/tonickkozlov)).
|
||||||
|
* Proper mutation of skip indices and projections [#50104](https://github.com/ClickHouse/ClickHouse/pull/50104) ([Amos Bird](https://github.com/amosbird)).
|
||||||
|
* Cleanup moving parts [#50489](https://github.com/ClickHouse/ClickHouse/pull/50489) ([vdimir](https://github.com/vdimir)).
|
||||||
|
* Fix backward compatibility for IP types hashing in aggregate functions [#50551](https://github.com/ClickHouse/ClickHouse/pull/50551) ([Yakov Olkhovskiy](https://github.com/yakov-olkhovskiy)).
|
||||||
|
* Fix Log family table return wrong rows count after truncate [#50585](https://github.com/ClickHouse/ClickHouse/pull/50585) ([flynn](https://github.com/ucasfl)).
|
||||||
|
* Fix bug in `uniqExact` parallel merging [#50590](https://github.com/ClickHouse/ClickHouse/pull/50590) ([Nikita Taranov](https://github.com/nickitat)).
|
||||||
|
* Revert recent grace hash join changes [#50699](https://github.com/ClickHouse/ClickHouse/pull/50699) ([vdimir](https://github.com/vdimir)).
|
||||||
|
* Query Cache: Try to fix bad cast from ColumnConst to ColumnVector<char8_t> [#50704](https://github.com/ClickHouse/ClickHouse/pull/50704) ([Robert Schulze](https://github.com/rschu1ze)).
|
||||||
|
* Do not read all the columns from right GLOBAL JOIN table. [#50721](https://github.com/ClickHouse/ClickHouse/pull/50721) ([Nikolai Kochetov](https://github.com/KochetovNicolai)).
|
||||||
|
* Avoid storing logs in Keeper containing unknown operation [#50751](https://github.com/ClickHouse/ClickHouse/pull/50751) ([Antonio Andelic](https://github.com/antonio2368)).
|
||||||
|
* SummingMergeTree support for DateTime64 [#50797](https://github.com/ClickHouse/ClickHouse/pull/50797) ([Jordi Villar](https://github.com/jrdi)).
|
||||||
|
* Add compat setting for non-const timezones [#50834](https://github.com/ClickHouse/ClickHouse/pull/50834) ([Robert Schulze](https://github.com/rschu1ze)).
|
||||||
|
* Fix type of LDAP server params hash in cache entry [#50865](https://github.com/ClickHouse/ClickHouse/pull/50865) ([Julian Maicher](https://github.com/jmaicher)).
|
||||||
|
* Fallback to parsing big integer from String instead of exception in Parquet format [#50873](https://github.com/ClickHouse/ClickHouse/pull/50873) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||||
|
* Fix checking the lock file too often while writing a backup [#50889](https://github.com/ClickHouse/ClickHouse/pull/50889) ([Vitaly Baranov](https://github.com/vitlibar)).
|
||||||
|
* Do not apply projection if read-in-order was enabled. [#50923](https://github.com/ClickHouse/ClickHouse/pull/50923) ([Nikolai Kochetov](https://github.com/KochetovNicolai)).
|
||||||
|
* Fix race azure blob storage iterator [#50936](https://github.com/ClickHouse/ClickHouse/pull/50936) ([SmitaRKulkarni](https://github.com/SmitaRKulkarni)).
|
||||||
|
* Fix erroneous `sort_description` propagation in `CreatingSets` [#50955](https://github.com/ClickHouse/ClickHouse/pull/50955) ([Nikita Taranov](https://github.com/nickitat)).
|
||||||
|
* Fix iceberg V2 optional metadata parsing [#50974](https://github.com/ClickHouse/ClickHouse/pull/50974) ([Kseniia Sumarokova](https://github.com/kssenii)).
|
||||||
|
* MaterializedMySQL: Keep parentheses for empty table overrides [#50977](https://github.com/ClickHouse/ClickHouse/pull/50977) ([Val Doroshchuk](https://github.com/valbok)).
|
||||||
|
* Fix crash in BackupCoordinationStageSync::setError() [#51012](https://github.com/ClickHouse/ClickHouse/pull/51012) ([Vitaly Baranov](https://github.com/vitlibar)).
|
||||||
|
* Fix subtly broken copy-on-write of ColumnLowCardinality dictionary [#51064](https://github.com/ClickHouse/ClickHouse/pull/51064) ([Michael Kolupaev](https://github.com/al13n321)).
|
||||||
|
* Generate safe IVs [#51086](https://github.com/ClickHouse/ClickHouse/pull/51086) ([Salvatore Mesoraca](https://github.com/aiven-sal)).
|
||||||
|
* Fix ineffective query cache for SELECTs with subqueries [#51132](https://github.com/ClickHouse/ClickHouse/pull/51132) ([Robert Schulze](https://github.com/rschu1ze)).
|
||||||
|
* Fix Set index with constant nullable comparison. [#51205](https://github.com/ClickHouse/ClickHouse/pull/51205) ([Nikolai Kochetov](https://github.com/KochetovNicolai)).
|
||||||
|
* Fix a crash in s3 and s3Cluster functions [#51209](https://github.com/ClickHouse/ClickHouse/pull/51209) ([Nikolay Degterinsky](https://github.com/evillique)).
|
||||||
|
* Fix core dump when compile expression [#51231](https://github.com/ClickHouse/ClickHouse/pull/51231) ([LiuNeng](https://github.com/liuneng1994)).
|
||||||
|
* Fix use-after-free in StorageURL when switching URLs [#51260](https://github.com/ClickHouse/ClickHouse/pull/51260) ([Michael Kolupaev](https://github.com/al13n321)).
|
||||||
|
* Updated check for parameterized view [#51272](https://github.com/ClickHouse/ClickHouse/pull/51272) ([SmitaRKulkarni](https://github.com/SmitaRKulkarni)).
|
||||||
|
* Fix multiple writing of same file to backup [#51299](https://github.com/ClickHouse/ClickHouse/pull/51299) ([Vitaly Baranov](https://github.com/vitlibar)).
|
||||||
|
* Fix fuzzer failure in ActionsDAG [#51301](https://github.com/ClickHouse/ClickHouse/pull/51301) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* Remove garbage from function `transform` [#51350](https://github.com/ClickHouse/ClickHouse/pull/51350) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* Fix MSan report in lowerUTF8/upperUTF8 [#51371](https://github.com/ClickHouse/ClickHouse/pull/51371) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* fs cache: fix a bit incorrect use_count after [#44985](https://github.com/ClickHouse/ClickHouse/issues/44985) [#51406](https://github.com/ClickHouse/ClickHouse/pull/51406) ([Kseniia Sumarokova](https://github.com/kssenii)).
|
||||||
|
* Fix segfault in MathUnary [#51499](https://github.com/ClickHouse/ClickHouse/pull/51499) ([Ilya Yatsishin](https://github.com/qoega)).
|
||||||
|
* Fix logical assert in `tupleElement()` with default values [#51534](https://github.com/ClickHouse/ClickHouse/pull/51534) ([Robert Schulze](https://github.com/rschu1ze)).
|
||||||
|
* fs cache: remove file from opened file cache immediately when evicting file [#51596](https://github.com/ClickHouse/ClickHouse/pull/51596) ([Kseniia Sumarokova](https://github.com/kssenii)).
|
||||||
|
|
||||||
|
#### NOT FOR CHANGELOG / INSIGNIFICANT
|
||||||
|
|
||||||
|
* Deprecate delete-on-destroy.txt [#49181](https://github.com/ClickHouse/ClickHouse/pull/49181) ([Alexander Gololobov](https://github.com/davenger)).
|
||||||
|
* Attempt to increase the general runners' survival rate [#49283](https://github.com/ClickHouse/ClickHouse/pull/49283) ([Mikhail f. Shiryaev](https://github.com/Felixoid)).
|
||||||
|
* Refactor subqueries for IN [#49570](https://github.com/ClickHouse/ClickHouse/pull/49570) ([Nikolai Kochetov](https://github.com/KochetovNicolai)).
|
||||||
|
* Test plan optimization analyzer [#50095](https://github.com/ClickHouse/ClickHouse/pull/50095) ([Igor Nikonov](https://github.com/devcrafter)).
|
||||||
|
* Implement endianness-independent serialization for quantileTiming [#50324](https://github.com/ClickHouse/ClickHouse/pull/50324) ([ltrk2](https://github.com/ltrk2)).
|
||||||
|
* require `finalize()` call before d-tor for all writes buffers [#50395](https://github.com/ClickHouse/ClickHouse/pull/50395) ([Sema Checherinda](https://github.com/CheSema)).
|
||||||
|
* Implement big-endian support for the deterministic reservoir sampler [#50405](https://github.com/ClickHouse/ClickHouse/pull/50405) ([ltrk2](https://github.com/ltrk2)).
|
||||||
|
* Fix compilation error on big-endian platforms [#50406](https://github.com/ClickHouse/ClickHouse/pull/50406) ([ltrk2](https://github.com/ltrk2)).
|
||||||
|
* Attach gdb in stateless tests [#50487](https://github.com/ClickHouse/ClickHouse/pull/50487) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||||
|
* JIT infrastructure refactoring [#50531](https://github.com/ClickHouse/ClickHouse/pull/50531) ([Maksim Kita](https://github.com/kitaisreal)).
|
||||||
|
* Analyzer: Do not apply Query Tree optimizations on shards [#50584](https://github.com/ClickHouse/ClickHouse/pull/50584) ([Dmitry Novik](https://github.com/novikd)).
|
||||||
|
* Increase max array size in group bitmap [#50620](https://github.com/ClickHouse/ClickHouse/pull/50620) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||||
|
* Misc Annoy index improvements [#50661](https://github.com/ClickHouse/ClickHouse/pull/50661) ([Robert Schulze](https://github.com/rschu1ze)).
|
||||||
|
* Fix reading negative decimals in avro format [#50668](https://github.com/ClickHouse/ClickHouse/pull/50668) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||||
|
* Unify priorities for connection pools [#50675](https://github.com/ClickHouse/ClickHouse/pull/50675) ([Sergei Trifonov](https://github.com/serxa)).
|
||||||
|
* Prostpone check of outdated parts [#50676](https://github.com/ClickHouse/ClickHouse/pull/50676) ([Alexander Tokmakov](https://github.com/tavplubix)).
|
||||||
|
* Unify priorities: `IExecutableTask`s [#50677](https://github.com/ClickHouse/ClickHouse/pull/50677) ([Sergei Trifonov](https://github.com/serxa)).
|
||||||
|
* Disable grace_hash join in stress tests [#50693](https://github.com/ClickHouse/ClickHouse/pull/50693) ([vdimir](https://github.com/vdimir)).
|
||||||
|
* ReverseTransform small improvement [#50698](https://github.com/ClickHouse/ClickHouse/pull/50698) ([Maksim Kita](https://github.com/kitaisreal)).
|
||||||
|
* Support OPTIMIZE for temporary tables [#50710](https://github.com/ClickHouse/ClickHouse/pull/50710) ([Alexander Tokmakov](https://github.com/tavplubix)).
|
||||||
|
* Refactor reading from object storages [#50711](https://github.com/ClickHouse/ClickHouse/pull/50711) ([Kseniia Sumarokova](https://github.com/kssenii)).
|
||||||
|
* Fix data race in log message of cached buffer [#50723](https://github.com/ClickHouse/ClickHouse/pull/50723) ([Kseniia Sumarokova](https://github.com/kssenii)).
|
||||||
|
* Add new keywords into projections documentation [#50743](https://github.com/ClickHouse/ClickHouse/pull/50743) ([YalalovSM](https://github.com/YalalovSM)).
|
||||||
|
* Fix build for aarch64 (temporary disable azure) [#50770](https://github.com/ClickHouse/ClickHouse/pull/50770) ([alesapin](https://github.com/alesapin)).
|
||||||
|
* Update version after release [#50772](https://github.com/ClickHouse/ClickHouse/pull/50772) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* Update version_date.tsv and changelogs after v23.5.1.3174-stable [#50774](https://github.com/ClickHouse/ClickHouse/pull/50774) ([robot-clickhouse](https://github.com/robot-clickhouse)).
|
||||||
|
* Update CHANGELOG.md [#50788](https://github.com/ClickHouse/ClickHouse/pull/50788) ([Ilya Yatsishin](https://github.com/qoega)).
|
||||||
|
* Update version_date.tsv and changelogs after v23.2.7.32-stable [#50809](https://github.com/ClickHouse/ClickHouse/pull/50809) ([robot-clickhouse](https://github.com/robot-clickhouse)).
|
||||||
|
* Desctructing --> Destructing [#50810](https://github.com/ClickHouse/ClickHouse/pull/50810) ([Robert Schulze](https://github.com/rschu1ze)).
|
||||||
|
* Don't mark a part as broken on `Poco::TimeoutException` [#50811](https://github.com/ClickHouse/ClickHouse/pull/50811) ([Alexander Tokmakov](https://github.com/tavplubix)).
|
||||||
|
* Rename azure_blob_storage to azureBlobStorage [#50812](https://github.com/ClickHouse/ClickHouse/pull/50812) ([SmitaRKulkarni](https://github.com/SmitaRKulkarni)).
|
||||||
|
* Fix ParallelReadBuffer seek [#50820](https://github.com/ClickHouse/ClickHouse/pull/50820) ([Michael Kolupaev](https://github.com/al13n321)).
|
||||||
|
* [RFC] Print git hash when crashing [#50823](https://github.com/ClickHouse/ClickHouse/pull/50823) ([Michael Kolupaev](https://github.com/al13n321)).
|
||||||
|
* Add tests for function "transform" [#50833](https://github.com/ClickHouse/ClickHouse/pull/50833) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* Update version_date.tsv and changelogs after v23.5.2.7-stable [#50844](https://github.com/ClickHouse/ClickHouse/pull/50844) ([robot-clickhouse](https://github.com/robot-clickhouse)).
|
||||||
|
* Updated changelog with azureBlobStorage table function & engine entry [#50850](https://github.com/ClickHouse/ClickHouse/pull/50850) ([SmitaRKulkarni](https://github.com/SmitaRKulkarni)).
|
||||||
|
* Update easy_tasks_sorted_ru.md [#50853](https://github.com/ClickHouse/ClickHouse/pull/50853) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* Document x86 / ARM prerequisites for Docker image [#50867](https://github.com/ClickHouse/ClickHouse/pull/50867) ([Robert Schulze](https://github.com/rschu1ze)).
|
||||||
|
* MaterializedMySQL: Add test_named_collections [#50874](https://github.com/ClickHouse/ClickHouse/pull/50874) ([Val Doroshchuk](https://github.com/valbok)).
|
||||||
|
* Update version_date.tsv and changelogs after v22.8.18.31-lts [#50881](https://github.com/ClickHouse/ClickHouse/pull/50881) ([robot-clickhouse](https://github.com/robot-clickhouse)).
|
||||||
|
* Update version_date.tsv and changelogs after v23.3.3.52-lts [#50882](https://github.com/ClickHouse/ClickHouse/pull/50882) ([robot-clickhouse](https://github.com/robot-clickhouse)).
|
||||||
|
* Update version_date.tsv and changelogs after v23.4.3.48-stable [#50883](https://github.com/ClickHouse/ClickHouse/pull/50883) ([robot-clickhouse](https://github.com/robot-clickhouse)).
|
||||||
|
* MaterializedMySQL: Add additional test case to insert_with_modify_binlog_checksum [#50884](https://github.com/ClickHouse/ClickHouse/pull/50884) ([Val Doroshchuk](https://github.com/valbok)).
|
||||||
|
* Update broken tests list [#50886](https://github.com/ClickHouse/ClickHouse/pull/50886) ([Dmitry Novik](https://github.com/novikd)).
|
||||||
|
* Fix LOGICAL_ERROR in snowflakeToDateTime*() [#50893](https://github.com/ClickHouse/ClickHouse/pull/50893) ([Robert Schulze](https://github.com/rschu1ze)).
|
||||||
|
* Tests with parallel replicas are no more "always green" [#50896](https://github.com/ClickHouse/ClickHouse/pull/50896) ([Nikita Mikhaylov](https://github.com/nikitamikhaylov)).
|
||||||
|
* Slightly more information in error message about cached disk [#50897](https://github.com/ClickHouse/ClickHouse/pull/50897) ([Michael Kolupaev](https://github.com/al13n321)).
|
||||||
|
* do not call finalize after exception [#50907](https://github.com/ClickHouse/ClickHouse/pull/50907) ([Sema Checherinda](https://github.com/CheSema)).
|
||||||
|
* Update Annoy docs [#50912](https://github.com/ClickHouse/ClickHouse/pull/50912) ([Robert Schulze](https://github.com/rschu1ze)).
|
||||||
|
* A bit safer UserDefinedSQLFunctionVisitor [#50913](https://github.com/ClickHouse/ClickHouse/pull/50913) ([Kseniia Sumarokova](https://github.com/kssenii)).
|
||||||
|
* Update contribe/orc in .gitmodules [#50920](https://github.com/ClickHouse/ClickHouse/pull/50920) ([San](https://github.com/santrancisco)).
|
||||||
|
* MaterializedMySQL: Add missing DROP DATABASE for tests [#50924](https://github.com/ClickHouse/ClickHouse/pull/50924) ([Val Doroshchuk](https://github.com/valbok)).
|
||||||
|
* Fix 'Illegal column timezone' in stress tests [#50929](https://github.com/ClickHouse/ClickHouse/pull/50929) ([Alexander Tokmakov](https://github.com/tavplubix)).
|
||||||
|
* Fix tests sanity checks and avoid dropping system.query_log table [#50934](https://github.com/ClickHouse/ClickHouse/pull/50934) ([Azat Khuzhin](https://github.com/azat)).
|
||||||
|
* Fix tests for throttling by allowing more margin of error for trottling event [#50935](https://github.com/ClickHouse/ClickHouse/pull/50935) ([Azat Khuzhin](https://github.com/azat)).
|
||||||
|
* 01746_convert_type_with_default: Temporarily disable flaky test [#50937](https://github.com/ClickHouse/ClickHouse/pull/50937) ([Robert Schulze](https://github.com/rschu1ze)).
|
||||||
|
* Fix the statless tests image for old commits [#50947](https://github.com/ClickHouse/ClickHouse/pull/50947) ([Mikhail f. Shiryaev](https://github.com/Felixoid)).
|
||||||
|
* Fix logic in `AsynchronousBoundedReadBuffer::seek` [#50952](https://github.com/ClickHouse/ClickHouse/pull/50952) ([Nikita Taranov](https://github.com/nickitat)).
|
||||||
|
* Uncomment flaky test (01746_convert_type_with_default) [#50954](https://github.com/ClickHouse/ClickHouse/pull/50954) ([Dmitry Kardymon](https://github.com/kardymonds)).
|
||||||
|
* Fix keeper-client help message [#50965](https://github.com/ClickHouse/ClickHouse/pull/50965) ([pufit](https://github.com/pufit)).
|
||||||
|
* fix build issue on clang 15 [#50967](https://github.com/ClickHouse/ClickHouse/pull/50967) ([Chang chen](https://github.com/baibaichen)).
|
||||||
|
* Docs: Fix embedded video link [#50972](https://github.com/ClickHouse/ClickHouse/pull/50972) ([Robert Schulze](https://github.com/rschu1ze)).
|
||||||
|
* Change submodule capnproto to it's fork in ClickHouse [#50987](https://github.com/ClickHouse/ClickHouse/pull/50987) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||||
|
* Attempt to make 01281_group_by_limit_memory_tracking not flaky [#50995](https://github.com/ClickHouse/ClickHouse/pull/50995) ([Dmitry Novik](https://github.com/novikd)).
|
||||||
|
* Fix flaky 02561_null_as_default_more_formats [#51001](https://github.com/ClickHouse/ClickHouse/pull/51001) ([Igor Nikonov](https://github.com/devcrafter)).
|
||||||
|
* Fix flaky test_seekable_formats [#51002](https://github.com/ClickHouse/ClickHouse/pull/51002) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||||
|
* Follow-up to [#50448](https://github.com/ClickHouse/ClickHouse/issues/50448) [#51006](https://github.com/ClickHouse/ClickHouse/pull/51006) ([Alexander Tokmakov](https://github.com/tavplubix)).
|
||||||
|
* Fix a versions' tweak for tagged commits, improve version_helper [#51035](https://github.com/ClickHouse/ClickHouse/pull/51035) ([Mikhail f. Shiryaev](https://github.com/Felixoid)).
|
||||||
|
* Sqlancer has changed master to main [#51060](https://github.com/ClickHouse/ClickHouse/pull/51060) ([Mikhail f. Shiryaev](https://github.com/Felixoid)).
|
||||||
|
* Do not spam sqlancer build log [#51061](https://github.com/ClickHouse/ClickHouse/pull/51061) ([Mikhail f. Shiryaev](https://github.com/Felixoid)).
|
||||||
|
* Refactor IColumn::forEachSubcolumn to make it slightly harder to implement incorrectly [#51072](https://github.com/ClickHouse/ClickHouse/pull/51072) ([Michael Kolupaev](https://github.com/al13n321)).
|
||||||
|
* MaterializedMySQL: Rename materialize_with_ddl.py -> materialized_with_ddl [#51074](https://github.com/ClickHouse/ClickHouse/pull/51074) ([Val Doroshchuk](https://github.com/valbok)).
|
||||||
|
* Improve woboq browser report [#51077](https://github.com/ClickHouse/ClickHouse/pull/51077) ([Mikhail f. Shiryaev](https://github.com/Felixoid)).
|
||||||
|
* Fix for part_names_mutex used after destruction [#51099](https://github.com/ClickHouse/ClickHouse/pull/51099) ([Alexander Gololobov](https://github.com/davenger)).
|
||||||
|
* Fix ColumnConst::forEachSubcolumn missing from previous PR [#51102](https://github.com/ClickHouse/ClickHouse/pull/51102) ([Michael Kolupaev](https://github.com/al13n321)).
|
||||||
|
* Fix the test 02783_parsedatetimebesteffort_syslog flakiness [#51112](https://github.com/ClickHouse/ClickHouse/pull/51112) ([Victor Krasnov](https://github.com/sirvickr)).
|
||||||
|
* Compatibility with clang-17 [#51114](https://github.com/ClickHouse/ClickHouse/pull/51114) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* Make more parallel get requests to ZooKeeper in system.zookeeper [#51118](https://github.com/ClickHouse/ClickHouse/pull/51118) ([Alexander Gololobov](https://github.com/davenger)).
|
||||||
|
* Fix 02703_max_local_write_bandwidth flakiness [#51120](https://github.com/ClickHouse/ClickHouse/pull/51120) ([Azat Khuzhin](https://github.com/azat)).
|
||||||
|
* Update version_date.tsv and changelogs after v23.5.3.24-stable [#51121](https://github.com/ClickHouse/ClickHouse/pull/51121) ([robot-clickhouse](https://github.com/robot-clickhouse)).
|
||||||
|
* Update version_date.tsv and changelogs after v23.4.4.16-stable [#51122](https://github.com/ClickHouse/ClickHouse/pull/51122) ([robot-clickhouse](https://github.com/robot-clickhouse)).
|
||||||
|
* Update version_date.tsv and changelogs after v23.3.4.17-lts [#51123](https://github.com/ClickHouse/ClickHouse/pull/51123) ([robot-clickhouse](https://github.com/robot-clickhouse)).
|
||||||
|
* Update version_date.tsv and changelogs after v22.8.19.10-lts [#51124](https://github.com/ClickHouse/ClickHouse/pull/51124) ([robot-clickhouse](https://github.com/robot-clickhouse)).
|
||||||
|
* Fix typo [#51126](https://github.com/ClickHouse/ClickHouse/pull/51126) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* Slightly better diagnostics [#51127](https://github.com/ClickHouse/ClickHouse/pull/51127) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* Small fix in `MergeTreePrefetchedReadPool` [#51131](https://github.com/ClickHouse/ClickHouse/pull/51131) ([Nikita Taranov](https://github.com/nickitat)).
|
||||||
|
* Don't report table function accesses to system.errors [#51147](https://github.com/ClickHouse/ClickHouse/pull/51147) ([Raúl Marín](https://github.com/Algunenano)).
|
||||||
|
* Fix SQLancer branch name [#51148](https://github.com/ClickHouse/ClickHouse/pull/51148) ([Ilya Yatsishin](https://github.com/qoega)).
|
||||||
|
* Revert "Added ability to implicitly use file/hdfs/s3 table functions in clickhouse-local" [#51149](https://github.com/ClickHouse/ClickHouse/pull/51149) ([Alexander Tokmakov](https://github.com/tavplubix)).
|
||||||
|
* More profile events for fs cache [#51161](https://github.com/ClickHouse/ClickHouse/pull/51161) ([Kseniia Sumarokova](https://github.com/kssenii)).
|
||||||
|
* Unforget to pass callback to readBigAt() in ParallelReadBuffer [#51165](https://github.com/ClickHouse/ClickHouse/pull/51165) ([Michael Kolupaev](https://github.com/al13n321)).
|
||||||
|
* Update README.md [#51179](https://github.com/ClickHouse/ClickHouse/pull/51179) ([Tyler Hannan](https://github.com/tylerhannan)).
|
||||||
|
* Update exception message [#51187](https://github.com/ClickHouse/ClickHouse/pull/51187) ([Kseniia Sumarokova](https://github.com/kssenii)).
|
||||||
|
* Split long test 02149_schema_inference_formats_with_schema into several tests to avoid timeout in debug [#51197](https://github.com/ClickHouse/ClickHouse/pull/51197) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||||
|
* Avoid initializing DateLUT from emptyArray function registration [#51199](https://github.com/ClickHouse/ClickHouse/pull/51199) ([Alexander Gololobov](https://github.com/davenger)).
|
||||||
|
* Suppress check for covered parts in ZooKeeper [#51207](https://github.com/ClickHouse/ClickHouse/pull/51207) ([Alexander Tokmakov](https://github.com/tavplubix)).
|
||||||
|
* One more profile event for fs cache [#51223](https://github.com/ClickHouse/ClickHouse/pull/51223) ([Kseniia Sumarokova](https://github.com/kssenii)).
|
||||||
|
* Typo: passowrd_sha256_hex --> password_sha256_hex [#51233](https://github.com/ClickHouse/ClickHouse/pull/51233) ([Robert Schulze](https://github.com/rschu1ze)).
|
||||||
|
* Introduce settings enum field with auto-generated values list [#51237](https://github.com/ClickHouse/ClickHouse/pull/51237) ([Sergei Trifonov](https://github.com/serxa)).
|
||||||
|
* Drop session if we fail to get Keeper API version [#51238](https://github.com/ClickHouse/ClickHouse/pull/51238) ([Alexander Gololobov](https://github.com/davenger)).
|
||||||
|
* Revert "Fix a crash in s3 and s3Cluster functions" [#51239](https://github.com/ClickHouse/ClickHouse/pull/51239) ([Alexander Tokmakov](https://github.com/tavplubix)).
|
||||||
|
* fix flaky `AsyncLoader` destructor [#51245](https://github.com/ClickHouse/ClickHouse/pull/51245) ([Sergei Trifonov](https://github.com/serxa)).
|
||||||
|
* Docs: little cleanup of configuration-files.md [#51249](https://github.com/ClickHouse/ClickHouse/pull/51249) ([Robert Schulze](https://github.com/rschu1ze)).
|
||||||
|
* Fix a stupid bug on Replicated database recovery [#51252](https://github.com/ClickHouse/ClickHouse/pull/51252) ([Alexander Tokmakov](https://github.com/tavplubix)).
|
||||||
|
* FileCache: tryReserve() slight improvement [#51259](https://github.com/ClickHouse/ClickHouse/pull/51259) ([Igor Nikonov](https://github.com/devcrafter)).
|
||||||
|
* Ugly hotfix for "terminate on uncaught exception" in WriteBufferFromOStream [#51265](https://github.com/ClickHouse/ClickHouse/pull/51265) ([Alexander Tokmakov](https://github.com/tavplubix)).
|
||||||
|
* Avoid too many calls to Poco::Logger::get [#51266](https://github.com/ClickHouse/ClickHouse/pull/51266) ([Kseniia Sumarokova](https://github.com/kssenii)).
|
||||||
|
* Update version_date.tsv and changelogs after v23.3.5.9-lts [#51269](https://github.com/ClickHouse/ClickHouse/pull/51269) ([robot-clickhouse](https://github.com/robot-clickhouse)).
|
||||||
|
* Better reporting of broken parts [#51270](https://github.com/ClickHouse/ClickHouse/pull/51270) ([Anton Popov](https://github.com/CurtizJ)).
|
||||||
|
* Update ext-dict-functions.md [#51283](https://github.com/ClickHouse/ClickHouse/pull/51283) ([Mike Kot](https://github.com/myrrc)).
|
||||||
|
* Disable table structure check for secondary queries from Replicated db [#51284](https://github.com/ClickHouse/ClickHouse/pull/51284) ([Alexander Tokmakov](https://github.com/tavplubix)).
|
||||||
|
* Define Thrift version for parquet and use correct arrow version [#51285](https://github.com/ClickHouse/ClickHouse/pull/51285) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||||
|
* Restore Azure build on ARM [#51288](https://github.com/ClickHouse/ClickHouse/pull/51288) ([Robert Schulze](https://github.com/rschu1ze)).
|
||||||
|
* Query Cache: Un-comment settings in server cfg [#51294](https://github.com/ClickHouse/ClickHouse/pull/51294) ([Robert Schulze](https://github.com/rschu1ze)).
|
||||||
|
* Require more checks [#51295](https://github.com/ClickHouse/ClickHouse/pull/51295) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* Fix metadata loading test [#51297](https://github.com/ClickHouse/ClickHouse/pull/51297) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* Scratch the strange Python code [#51302](https://github.com/ClickHouse/ClickHouse/pull/51302) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* Add a test for [#47865](https://github.com/ClickHouse/ClickHouse/issues/47865) [#51306](https://github.com/ClickHouse/ClickHouse/pull/51306) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* Add a test for [#48894](https://github.com/ClickHouse/ClickHouse/issues/48894) [#51307](https://github.com/ClickHouse/ClickHouse/pull/51307) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* Add a test for [#48676](https://github.com/ClickHouse/ClickHouse/issues/48676) [#51308](https://github.com/ClickHouse/ClickHouse/pull/51308) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* Fix long test `functions_bad_arguments` [#51310](https://github.com/ClickHouse/ClickHouse/pull/51310) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* Unify merge predicate [#51344](https://github.com/ClickHouse/ClickHouse/pull/51344) ([Alexander Tokmakov](https://github.com/tavplubix)).
|
||||||
|
* Fix using locks in ProcessList [#51348](https://github.com/ClickHouse/ClickHouse/pull/51348) ([Vitaly Baranov](https://github.com/vitlibar)).
|
||||||
|
* Add a test for [#42631](https://github.com/ClickHouse/ClickHouse/issues/42631) [#51353](https://github.com/ClickHouse/ClickHouse/pull/51353) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* Fix performance tests due to warnings from jemalloc about Per-CPU arena disabled [#51362](https://github.com/ClickHouse/ClickHouse/pull/51362) ([Azat Khuzhin](https://github.com/azat)).
|
||||||
|
* Fix "merge_truncate_long" test [#51369](https://github.com/ClickHouse/ClickHouse/pull/51369) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* Increase timeout of Fast Test [#51372](https://github.com/ClickHouse/ClickHouse/pull/51372) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* Fix bad tests for DNS [#51374](https://github.com/ClickHouse/ClickHouse/pull/51374) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* Attempt to fix the `relax_too_many_parts` test [#51375](https://github.com/ClickHouse/ClickHouse/pull/51375) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* Fix MySQL test in Debug mode [#51376](https://github.com/ClickHouse/ClickHouse/pull/51376) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* Fix bad test `01018_Distributed__shard_num` [#51377](https://github.com/ClickHouse/ClickHouse/pull/51377) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* Fix "logical error" in addressToLineWithInlines [#51379](https://github.com/ClickHouse/ClickHouse/pull/51379) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* Fix test 01280_ttl_where_group_by [#51380](https://github.com/ClickHouse/ClickHouse/pull/51380) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* Attempt to fix `test_ssl_cert_authentication` [#51384](https://github.com/ClickHouse/ClickHouse/pull/51384) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* Revert "Merge pull request [#50951](https://github.com/ClickHouse/ClickHouse/issues/50951) from ZhiguoZh/20230607-toyear-fix" [#51390](https://github.com/ClickHouse/ClickHouse/pull/51390) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* Two tests are twice longer in average with Analyzer and sometimes failing [#51391](https://github.com/ClickHouse/ClickHouse/pull/51391) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* Fix 00899_long_attach_memory_limit [#51395](https://github.com/ClickHouse/ClickHouse/pull/51395) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* Fix test 01293_optimize_final_force [#51396](https://github.com/ClickHouse/ClickHouse/pull/51396) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* Fix test 02481_parquet_list_monotonically_increasing_offsets [#51397](https://github.com/ClickHouse/ClickHouse/pull/51397) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* Fix test 02497_trace_events_stress_long [#51398](https://github.com/ClickHouse/ClickHouse/pull/51398) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* Fix broken labeling for `manual approve` [#51405](https://github.com/ClickHouse/ClickHouse/pull/51405) ([Mikhail f. Shiryaev](https://github.com/Felixoid)).
|
||||||
|
* Fix parts lifetime in `MergeTreeTransaction` [#51407](https://github.com/ClickHouse/ClickHouse/pull/51407) ([Alexander Tokmakov](https://github.com/tavplubix)).
|
||||||
|
* Fix flaky test test_skip_empty_files [#51409](https://github.com/ClickHouse/ClickHouse/pull/51409) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||||
|
* fix flacky test test_profile_events_s3 [#51412](https://github.com/ClickHouse/ClickHouse/pull/51412) ([Sema Checherinda](https://github.com/CheSema)).
|
||||||
|
* Update README.md [#51413](https://github.com/ClickHouse/ClickHouse/pull/51413) ([Tyler Hannan](https://github.com/tylerhannan)).
|
||||||
|
* Replace try/catch logic in hasTokenOrNull() by something more lightweight [#51425](https://github.com/ClickHouse/ClickHouse/pull/51425) ([Robert Schulze](https://github.com/rschu1ze)).
|
||||||
|
* Add retries to `tlsv1_3` tests [#51434](https://github.com/ClickHouse/ClickHouse/pull/51434) ([János Benjamin Antal](https://github.com/antaljanosbenjamin)).
|
||||||
|
* Update exception message [#51440](https://github.com/ClickHouse/ClickHouse/pull/51440) ([Kseniia Sumarokova](https://github.com/kssenii)).
|
||||||
|
* fs cache: add check for intersecting ranges [#51444](https://github.com/ClickHouse/ClickHouse/pull/51444) ([Kseniia Sumarokova](https://github.com/kssenii)).
|
||||||
|
* Slightly better code around packets for parallel replicas [#51451](https://github.com/ClickHouse/ClickHouse/pull/51451) ([Nikita Mikhaylov](https://github.com/nikitamikhaylov)).
|
||||||
|
* Update system_warnings test [#51453](https://github.com/ClickHouse/ClickHouse/pull/51453) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* Many fixes [#51455](https://github.com/ClickHouse/ClickHouse/pull/51455) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* Fix test 01605_adaptive_granularity_block_borders [#51457](https://github.com/ClickHouse/ClickHouse/pull/51457) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* Try fix flaky 02497_storage_file_reader_selection [#51468](https://github.com/ClickHouse/ClickHouse/pull/51468) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||||
|
* Try making Keeper in `DatabaseReplicated` tests more stable [#51473](https://github.com/ClickHouse/ClickHouse/pull/51473) ([Antonio Andelic](https://github.com/antonio2368)).
|
||||||
|
* Convert 02003_memory_limit_in_client from expect to sh test (to fix flakiness) [#51475](https://github.com/ClickHouse/ClickHouse/pull/51475) ([Azat Khuzhin](https://github.com/azat)).
|
||||||
|
* Fix test_disk_over_web_server [#51476](https://github.com/ClickHouse/ClickHouse/pull/51476) ([Kseniia Sumarokova](https://github.com/kssenii)).
|
||||||
|
* Delay shutdown of system and temporary databases [#51479](https://github.com/ClickHouse/ClickHouse/pull/51479) ([Kseniia Sumarokova](https://github.com/kssenii)).
|
||||||
|
* Fix memory leakage in CompressionCodecDeflateQpl [#51480](https://github.com/ClickHouse/ClickHouse/pull/51480) ([Vitaly Baranov](https://github.com/vitlibar)).
|
||||||
|
* Increase retries in test_multiple_disks/test.py::test_start_stop_moves [#51482](https://github.com/ClickHouse/ClickHouse/pull/51482) ([Kseniia Sumarokova](https://github.com/kssenii)).
|
||||||
|
* Fix race in BoundedReadBuffer [#51484](https://github.com/ClickHouse/ClickHouse/pull/51484) ([Kseniia Sumarokova](https://github.com/kssenii)).
|
||||||
|
* Fix flaky unit test [#51485](https://github.com/ClickHouse/ClickHouse/pull/51485) ([Kseniia Sumarokova](https://github.com/kssenii)).
|
||||||
|
* Fix flaky test `test_host_regexp_multiple_ptr_records` [#51506](https://github.com/ClickHouse/ClickHouse/pull/51506) ([Nikolay Degterinsky](https://github.com/evillique)).
|
||||||
|
* Add a comment [#51517](https://github.com/ClickHouse/ClickHouse/pull/51517) ([Kseniia Sumarokova](https://github.com/kssenii)).
|
||||||
|
* Make `test_ssl_cert_authentication` similar to `test_tlvs1_3` [#51520](https://github.com/ClickHouse/ClickHouse/pull/51520) ([János Benjamin Antal](https://github.com/antaljanosbenjamin)).
|
||||||
|
* Fix duplicate storage set logical error. [#51521](https://github.com/ClickHouse/ClickHouse/pull/51521) ([Nikolai Kochetov](https://github.com/KochetovNicolai)).
|
||||||
|
* Update test_storage_postgresql/test.py::test_concurrent_queries [#51523](https://github.com/ClickHouse/ClickHouse/pull/51523) ([Kseniia Sumarokova](https://github.com/kssenii)).
|
||||||
|
* Fix FATAL: query context is not detached from thread group [#51540](https://github.com/ClickHouse/ClickHouse/pull/51540) ([Igor Nikonov](https://github.com/devcrafter)).
|
||||||
|
* Update version_date.tsv and changelogs after v23.3.6.7-lts [#51548](https://github.com/ClickHouse/ClickHouse/pull/51548) ([robot-clickhouse](https://github.com/robot-clickhouse)).
|
||||||
|
* Decoupled commits from [#51180](https://github.com/ClickHouse/ClickHouse/issues/51180) for backports [#51561](https://github.com/ClickHouse/ClickHouse/pull/51561) ([Mikhail f. Shiryaev](https://github.com/Felixoid)).
|
||||||
|
* Try to fix deadlock in ZooKeeper client [#51563](https://github.com/ClickHouse/ClickHouse/pull/51563) ([Alexander Tokmakov](https://github.com/tavplubix)).
|
||||||
|
* Retry chroot creation in ZK before stateless tests [#51585](https://github.com/ClickHouse/ClickHouse/pull/51585) ([Antonio Andelic](https://github.com/antonio2368)).
|
||||||
|
* use timeout instead trap in 01443_merge_truncate_long.sh [#51593](https://github.com/ClickHouse/ClickHouse/pull/51593) ([Sema Checherinda](https://github.com/CheSema)).
|
||||||
|
* Update version_date.tsv and changelogs after v23.5.4.25-stable [#51604](https://github.com/ClickHouse/ClickHouse/pull/51604) ([robot-clickhouse](https://github.com/robot-clickhouse)).
|
||||||
|
* Fix MergeTreeMarksLoader segfaulting if marks file is longer than expected [#51636](https://github.com/ClickHouse/ClickHouse/pull/51636) ([Michael Kolupaev](https://github.com/al13n321)).
|
||||||
|
* Update version_date.tsv and changelogs after v23.4.5.22-stable [#51638](https://github.com/ClickHouse/ClickHouse/pull/51638) ([robot-clickhouse](https://github.com/robot-clickhouse)).
|
||||||
|
* Update version_date.tsv and changelogs after v23.3.7.5-lts [#51639](https://github.com/ClickHouse/ClickHouse/pull/51639) ([robot-clickhouse](https://github.com/robot-clickhouse)).
|
||||||
|
* Update parts.md [#51643](https://github.com/ClickHouse/ClickHouse/pull/51643) ([Ramazan Polat](https://github.com/ramazanpolat)).
|
||||||
|
|
@ -44,11 +44,12 @@ Create a table in ClickHouse which allows to read data from Redis:
|
|||||||
``` sql
|
``` sql
|
||||||
CREATE TABLE redis_table
|
CREATE TABLE redis_table
|
||||||
(
|
(
|
||||||
`k` String,
|
`key` String,
|
||||||
`m` String,
|
`v1` UInt32,
|
||||||
`n` UInt32
|
`v2` String,
|
||||||
|
`v3` Float32
|
||||||
)
|
)
|
||||||
ENGINE = Redis('redis1:6379') PRIMARY KEY(k);
|
ENGINE = Redis('redis1:6379') PRIMARY KEY(key);
|
||||||
```
|
```
|
||||||
|
|
||||||
Insert:
|
Insert:
|
||||||
@ -111,9 +112,16 @@ Flush Redis db asynchronously. Also `Truncate` support SYNC mode.
|
|||||||
TRUNCATE TABLE redis_table SYNC;
|
TRUNCATE TABLE redis_table SYNC;
|
||||||
```
|
```
|
||||||
|
|
||||||
|
Join:
|
||||||
|
|
||||||
|
Join with other tables.
|
||||||
|
|
||||||
|
```
|
||||||
|
SELECT * FROM redis_table JOIN merge_tree_table ON merge_tree_table.key=redis_table.key;
|
||||||
|
```
|
||||||
|
|
||||||
## Limitations {#limitations}
|
## Limitations {#limitations}
|
||||||
|
|
||||||
Redis engine also supports scanning queries, such as `where k > xx`, but it has some limitations:
|
Redis engine also supports scanning queries, such as `where k > xx`, but it has some limitations:
|
||||||
1. Scanning query may produce some duplicated keys in a very rare case when it is rehashing. See details in [Redis Scan](https://github.com/redis/redis/blob/e4d183afd33e0b2e6e8d1c79a832f678a04a7886/src/dict.c#L1186-L1269)
|
1. Scanning query may produce some duplicated keys in a very rare case when it is rehashing. See details in [Redis Scan](https://github.com/redis/redis/blob/e4d183afd33e0b2e6e8d1c79a832f678a04a7886/src/dict.c#L1186-L1269).
|
||||||
2. During the scanning, keys could be created and deleted, so the resulting dataset can not represent a valid point in time.
|
2. During the scanning, keys could be created and deleted, so the resulting dataset can not represent a valid point in time.
|
||||||
|
@ -756,6 +756,17 @@ If you perform the `SELECT` query between merges, you may get expired data. To a
|
|||||||
- [ttl_only_drop_parts](/docs/en/operations/settings/settings.md/#ttl_only_drop_parts) setting
|
- [ttl_only_drop_parts](/docs/en/operations/settings/settings.md/#ttl_only_drop_parts) setting
|
||||||
|
|
||||||
|
|
||||||
|
## Disk types
|
||||||
|
|
||||||
|
In addition to local block devices, ClickHouse supports these storage types:
|
||||||
|
- [`s3` for S3 and MinIO](#table_engine-mergetree-s3)
|
||||||
|
- [`gcs` for GCS](/docs/en/integrations/data-ingestion/gcs/index.md/#creating-a-disk)
|
||||||
|
- [`blob_storage_disk` for Azure Blob Storage](#table_engine-mergetree-azure-blob-storage)
|
||||||
|
- [`hdfs` for HDFS](#hdfs-storage)
|
||||||
|
- [`web` for read-only from web](#web-storage)
|
||||||
|
- [`cache` for local caching](/docs/en/operations/storing-data.md/#using-local-cache)
|
||||||
|
- [`s3_plain` for backups to S3](/docs/en/operations/backup#backuprestore-using-an-s3-disk)
|
||||||
|
|
||||||
## Using Multiple Block Devices for Data Storage {#table_engine-mergetree-multiple-volumes}
|
## Using Multiple Block Devices for Data Storage {#table_engine-mergetree-multiple-volumes}
|
||||||
|
|
||||||
### Introduction {#introduction}
|
### Introduction {#introduction}
|
||||||
@ -936,7 +947,16 @@ configuration files; all the settings are in the CREATE/ATTACH query.
|
|||||||
The example uses `type=web`, but any disk type can be configured as dynamic, even Local disk. Local disks require a path argument to be inside the server config parameter `custom_local_disks_base_directory`, which has no default, so set that also when using local disk.
|
The example uses `type=web`, but any disk type can be configured as dynamic, even Local disk. Local disks require a path argument to be inside the server config parameter `custom_local_disks_base_directory`, which has no default, so set that also when using local disk.
|
||||||
:::
|
:::
|
||||||
|
|
||||||
|
#### Example dynamic web storage
|
||||||
|
|
||||||
|
:::tip
|
||||||
|
A [demo dataset](https://github.com/ClickHouse/web-tables-demo) is hosted in GitHub. To prepare your own tables for web storage see the tool [clickhouse-static-files-uploader](/docs/en/operations/storing-data.md/#storing-data-on-webserver)
|
||||||
|
:::
|
||||||
|
|
||||||
|
In this `ATTACH TABLE` query the `UUID` provided matches the directory name of the data, and the endpoint is the URL for the raw GitHub content.
|
||||||
|
|
||||||
```sql
|
```sql
|
||||||
|
# highlight-next-line
|
||||||
ATTACH TABLE uk_price_paid UUID 'cf712b4f-2ca8-435c-ac23-c4393efe52f7'
|
ATTACH TABLE uk_price_paid UUID 'cf712b4f-2ca8-435c-ac23-c4393efe52f7'
|
||||||
(
|
(
|
||||||
price UInt32,
|
price UInt32,
|
||||||
@ -1238,6 +1258,93 @@ Examples of working configurations can be found in integration tests directory (
|
|||||||
Zero-copy replication is disabled by default in ClickHouse version 22.8 and higher. This feature is not recommended for production use.
|
Zero-copy replication is disabled by default in ClickHouse version 22.8 and higher. This feature is not recommended for production use.
|
||||||
:::
|
:::
|
||||||
|
|
||||||
|
## HDFS storage {#hdfs-storage}
|
||||||
|
|
||||||
|
In this sample configuration:
|
||||||
|
- the disk is of type `hdfs`
|
||||||
|
- the data is hosted at `hdfs://hdfs1:9000/clickhouse/`
|
||||||
|
|
||||||
|
```xml
|
||||||
|
<clickhouse>
|
||||||
|
<storage_configuration>
|
||||||
|
<disks>
|
||||||
|
<hdfs>
|
||||||
|
<type>hdfs</type>
|
||||||
|
<endpoint>hdfs://hdfs1:9000/clickhouse/</endpoint>
|
||||||
|
<skip_access_check>true</skip_access_check>
|
||||||
|
</hdfs>
|
||||||
|
<hdd>
|
||||||
|
<type>local</type>
|
||||||
|
<path>/</path>
|
||||||
|
</hdd>
|
||||||
|
</disks>
|
||||||
|
<policies>
|
||||||
|
<hdfs>
|
||||||
|
<volumes>
|
||||||
|
<main>
|
||||||
|
<disk>hdfs</disk>
|
||||||
|
</main>
|
||||||
|
<external>
|
||||||
|
<disk>hdd</disk>
|
||||||
|
</external>
|
||||||
|
</volumes>
|
||||||
|
</hdfs>
|
||||||
|
</policies>
|
||||||
|
</storage_configuration>
|
||||||
|
</clickhouse>
|
||||||
|
```
|
||||||
|
|
||||||
|
## Web storage (read-only) {#web-storage}
|
||||||
|
|
||||||
|
Web storage can be used for read-only purposes. An example use is for hosting sample
|
||||||
|
data, or for migrating data.
|
||||||
|
|
||||||
|
:::tip
|
||||||
|
Storage can also be configured temporarily within a query, if a web dataset is not expected
|
||||||
|
to be used routinely, see [dynamic storage](#dynamic-storage) and skip editing the
|
||||||
|
configuration file.
|
||||||
|
:::
|
||||||
|
|
||||||
|
In this sample configuration:
|
||||||
|
- the disk is of type `web`
|
||||||
|
- the data is hosted at `http://nginx:80/test1/`
|
||||||
|
- a cache on local storage is used
|
||||||
|
|
||||||
|
```xml
|
||||||
|
<clickhouse>
|
||||||
|
<storage_configuration>
|
||||||
|
<disks>
|
||||||
|
<web>
|
||||||
|
<type>web</type>
|
||||||
|
<endpoint>http://nginx:80/test1/</endpoint>
|
||||||
|
</web>
|
||||||
|
<cached_web>
|
||||||
|
<type>cache</type>
|
||||||
|
<disk>web</disk>
|
||||||
|
<path>cached_web_cache/</path>
|
||||||
|
<max_size>100000000</max_size>
|
||||||
|
</cached_web>
|
||||||
|
</disks>
|
||||||
|
<policies>
|
||||||
|
<web>
|
||||||
|
<volumes>
|
||||||
|
<main>
|
||||||
|
<disk>web</disk>
|
||||||
|
</main>
|
||||||
|
</volumes>
|
||||||
|
</web>
|
||||||
|
<cached_web>
|
||||||
|
<volumes>
|
||||||
|
<main>
|
||||||
|
<disk>cached_web</disk>
|
||||||
|
</main>
|
||||||
|
</volumes>
|
||||||
|
</cached_web>
|
||||||
|
</policies>
|
||||||
|
</storage_configuration>
|
||||||
|
</clickhouse>
|
||||||
|
```
|
||||||
|
|
||||||
## Virtual Columns {#virtual-columns}
|
## Virtual Columns {#virtual-columns}
|
||||||
|
|
||||||
- `_part` — Name of a part.
|
- `_part` — Name of a part.
|
||||||
|
@ -378,6 +378,10 @@ request](https://github.com/ClickHouse/ClickHouse/commits/master) and find CI ch
|
|||||||
https://s3.amazonaws.com/clickhouse/builds/PRs/.../.../binary_aarch64_v80compat/clickhouse". You can then click the link to download the
|
https://s3.amazonaws.com/clickhouse/builds/PRs/.../.../binary_aarch64_v80compat/clickhouse". You can then click the link to download the
|
||||||
build.
|
build.
|
||||||
|
|
||||||
|
### macOS-only: Install with Homebrew
|
||||||
|
|
||||||
|
To install ClickHouse using the popular `brew` package manager, follow the instructions listed in the [ClickHouse Homebrew tap](https://github.com/ClickHouse/homebrew-clickhouse).
|
||||||
|
|
||||||
## Launch {#launch}
|
## Launch {#launch}
|
||||||
|
|
||||||
To start the server as a daemon, run:
|
To start the server as a daemon, run:
|
||||||
|
@ -2454,18 +2454,22 @@ In this format, all input data is read to a single value. It is possible to pars
|
|||||||
The result is output in binary format without delimiters and escaping. If more than one value is output, the format is ambiguous, and it will be impossible to read the data back.
|
The result is output in binary format without delimiters and escaping. If more than one value is output, the format is ambiguous, and it will be impossible to read the data back.
|
||||||
|
|
||||||
Below is a comparison of the formats `RawBLOB` and [TabSeparatedRaw](#tabseparatedraw).
|
Below is a comparison of the formats `RawBLOB` and [TabSeparatedRaw](#tabseparatedraw).
|
||||||
|
|
||||||
`RawBLOB`:
|
`RawBLOB`:
|
||||||
- data is output in binary format, no escaping;
|
- data is output in binary format, no escaping;
|
||||||
- there are no delimiters between values;
|
- there are no delimiters between values;
|
||||||
- no newline at the end of each value.
|
- no newline at the end of each value.
|
||||||
[TabSeparatedRaw] (#tabseparatedraw):
|
|
||||||
|
`TabSeparatedRaw`:
|
||||||
- data is output without escaping;
|
- data is output without escaping;
|
||||||
- the rows contain values separated by tabs;
|
- the rows contain values separated by tabs;
|
||||||
- there is a line feed after the last value in every row.
|
- there is a line feed after the last value in every row.
|
||||||
|
|
||||||
The following is a comparison of the `RawBLOB` and [RowBinary](#rowbinary) formats.
|
The following is a comparison of the `RawBLOB` and [RowBinary](#rowbinary) formats.
|
||||||
|
|
||||||
`RawBLOB`:
|
`RawBLOB`:
|
||||||
- String fields are output without being prefixed by length.
|
- String fields are output without being prefixed by length.
|
||||||
|
|
||||||
`RowBinary`:
|
`RowBinary`:
|
||||||
- String fields are represented as length in varint format (unsigned [LEB128] (https://en.wikipedia.org/wiki/LEB128)), followed by the bytes of the string.
|
- String fields are represented as length in varint format (unsigned [LEB128] (https://en.wikipedia.org/wiki/LEB128)), followed by the bytes of the string.
|
||||||
|
|
||||||
|
@ -1602,7 +1602,7 @@ Keys for server/client settings:
|
|||||||
- requireTLSv1_2 (default: false) – Require a TLSv1.2 connection. Acceptable values: `true`, `false`.
|
- requireTLSv1_2 (default: false) – Require a TLSv1.2 connection. Acceptable values: `true`, `false`.
|
||||||
- fips (default: false) – Activates OpenSSL FIPS mode. Supported if the library’s OpenSSL version supports FIPS.
|
- fips (default: false) – Activates OpenSSL FIPS mode. Supported if the library’s OpenSSL version supports FIPS.
|
||||||
- privateKeyPassphraseHandler (default: `KeyConsoleHandler`)– Class (PrivateKeyPassphraseHandler subclass) that requests the passphrase for accessing the private key. For example: `<privateKeyPassphraseHandler>`, `<name>KeyFileHandler</name>`, `<options><password>test</password></options>`, `</privateKeyPassphraseHandler>`.
|
- privateKeyPassphraseHandler (default: `KeyConsoleHandler`)– Class (PrivateKeyPassphraseHandler subclass) that requests the passphrase for accessing the private key. For example: `<privateKeyPassphraseHandler>`, `<name>KeyFileHandler</name>`, `<options><password>test</password></options>`, `</privateKeyPassphraseHandler>`.
|
||||||
- invalidCertificateHandler (default: `ConsoleCertificateHandler`) – Class (a subclass of CertificateHandler) for verifying invalid certificates. For example: `<invalidCertificateHandler> <name>ConsoleCertificateHandler</name> </invalidCertificateHandler>` .
|
- invalidCertificateHandler (default: `RejectCertificateHandler`) – Class (a subclass of CertificateHandler) for verifying invalid certificates. For example: `<invalidCertificateHandler> <name>RejectCertificateHandler</name> </invalidCertificateHandler>` .
|
||||||
- disableProtocols (default: "") – Protocols that are not allowed to use.
|
- disableProtocols (default: "") – Protocols that are not allowed to use.
|
||||||
- preferServerCiphers (default: false) – Preferred server ciphers on the client.
|
- preferServerCiphers (default: false) – Preferred server ciphers on the client.
|
||||||
|
|
||||||
@ -2120,6 +2120,12 @@ This section contains the following parameters:
|
|||||||
- `operation_timeout_ms` — Maximum timeout for one operation in milliseconds.
|
- `operation_timeout_ms` — Maximum timeout for one operation in milliseconds.
|
||||||
- `root` — The [znode](http://zookeeper.apache.org/doc/r3.5.5/zookeeperOver.html#Nodes+and+ephemeral+nodes) that is used as the root for znodes used by the ClickHouse server. Optional.
|
- `root` — The [znode](http://zookeeper.apache.org/doc/r3.5.5/zookeeperOver.html#Nodes+and+ephemeral+nodes) that is used as the root for znodes used by the ClickHouse server. Optional.
|
||||||
- `identity` — User and password, that can be required by ZooKeeper to give access to requested znodes. Optional.
|
- `identity` — User and password, that can be required by ZooKeeper to give access to requested znodes. Optional.
|
||||||
|
- zookeeper_load_balancing - Specifies the algorithm of ZooKeeper node selection.
|
||||||
|
* random - randomly selects one of ZooKeeper nodes.
|
||||||
|
* in_order - selects the first ZooKeeper node, if it's not available then the second, and so on.
|
||||||
|
* nearest_hostname - selects a ZooKeeper node with a hostname that is most similar to the server’s hostname.
|
||||||
|
* first_or_random - selects the first ZooKeeper node, if it's not available then randomly selects one of remaining ZooKeeper nodes.
|
||||||
|
* round_robin - selects the first ZooKeeper node, if reconnection happens selects the next.
|
||||||
|
|
||||||
**Example configuration**
|
**Example configuration**
|
||||||
|
|
||||||
@ -2139,6 +2145,8 @@ This section contains the following parameters:
|
|||||||
<root>/path/to/zookeeper/node</root>
|
<root>/path/to/zookeeper/node</root>
|
||||||
<!-- Optional. Zookeeper digest ACL string. -->
|
<!-- Optional. Zookeeper digest ACL string. -->
|
||||||
<identity>user:password</identity>
|
<identity>user:password</identity>
|
||||||
|
<!--<zookeeper_load_balancing>random / in_order / nearest_hostname / first_or_random / round_robin</zookeeper_load_balancing>-->
|
||||||
|
<zookeeper_load_balancing>random</zookeeper_load_balancing>
|
||||||
</zookeeper>
|
</zookeeper>
|
||||||
```
|
```
|
||||||
|
|
||||||
|
@ -1322,7 +1322,7 @@ Connection pool size for PostgreSQL table engine and database engine.
|
|||||||
|
|
||||||
Default value: 16
|
Default value: 16
|
||||||
|
|
||||||
## postgresql_connection_pool_size {#postgresql-connection-pool-size}
|
## postgresql_connection_pool_wait_timeout {#postgresql-connection-pool-wait-timeout}
|
||||||
|
|
||||||
Connection pool push/pop timeout on empty pool for PostgreSQL table engine and database engine. By default it will block on empty pool.
|
Connection pool push/pop timeout on empty pool for PostgreSQL table engine and database engine. By default it will block on empty pool.
|
||||||
|
|
||||||
|
@ -184,13 +184,15 @@ These settings should be defined in the disk configuration section.
|
|||||||
|
|
||||||
- `enable_filesystem_query_cache_limit` - allow to limit the size of cache which is downloaded within each query (depends on user setting `max_query_cache_size`). Default: `false`.
|
- `enable_filesystem_query_cache_limit` - allow to limit the size of cache which is downloaded within each query (depends on user setting `max_query_cache_size`). Default: `false`.
|
||||||
|
|
||||||
- `enable_cache_hits_threshold` - number which defines how many times some data needs to be read before it will be cached. Default: `0`, e.g. the data is cached at the first attempt to read it.
|
- `enable_cache_hits_threshold` - number which defines how many times some data needs to be read before it will be cached. Default: `false`. This threshold can be defined by `cache_hits_threshold`. Default: `0`, e.g. the data is cached at the first attempt to read it.
|
||||||
|
|
||||||
|
- `enable_bypass_cache_with_threshold` - allows to skip cache completely in case the requested read range exceeds the threshold. Default: `false`. This threshold can be defined by `bypass_cache_threashold`. Default: `268435456` (`256Mi`).
|
||||||
|
|
||||||
- `do_not_evict_index_and_mark_files` - do not evict small frequently used files according to cache policy. Default: `false`. This setting was added in version 22.8. If you used filesystem cache before this version, then it will not work on versions starting from 22.8 if this setting is set to `true`. If you want to use this setting, clear old cache created before version 22.8 before upgrading.
|
- `do_not_evict_index_and_mark_files` - do not evict small frequently used files according to cache policy. Default: `false`. This setting was added in version 22.8. If you used filesystem cache before this version, then it will not work on versions starting from 22.8 if this setting is set to `true`. If you want to use this setting, clear old cache created before version 22.8 before upgrading.
|
||||||
|
|
||||||
- `max_file_segment_size` - a maximum size of a single cache file in bytes or in readable format (`ki, Mi, Gi, etc`, example `10Gi`). Default: `104857600` (`100Mi`).
|
- `max_file_segment_size` - a maximum size of a single cache file in bytes or in readable format (`ki, Mi, Gi, etc`, example `10Gi`). Default: `8388608` (`8Mi`).
|
||||||
|
|
||||||
- `max_elements` - a limit for a number of cache files. Default: `1048576`.
|
- `max_elements` - a limit for a number of cache files. Default: `10000000`.
|
||||||
|
|
||||||
File Cache **query/profile settings**:
|
File Cache **query/profile settings**:
|
||||||
|
|
||||||
|
@ -27,7 +27,7 @@ Columns:
|
|||||||
|
|
||||||
Data storing format is controlled by the `min_bytes_for_wide_part` and `min_rows_for_wide_part` settings of the [MergeTree](../../engines/table-engines/mergetree-family/mergetree.md) table.
|
Data storing format is controlled by the `min_bytes_for_wide_part` and `min_rows_for_wide_part` settings of the [MergeTree](../../engines/table-engines/mergetree-family/mergetree.md) table.
|
||||||
|
|
||||||
- `active` ([UInt8](../../sql-reference/data-types/int-uint.md)) – Flag that indicates whether the data part is active. If a data part is active, it’s used in a table. Otherwise, it’s deleted. Inactive data parts remain after merging.
|
- `active` ([UInt8](../../sql-reference/data-types/int-uint.md)) – Flag that indicates whether the data part is active. If a data part is active, it’s used in a table. Otherwise, it’s deleted. Inactive data parts remain after merging.
|
||||||
|
|
||||||
- `marks` ([UInt64](../../sql-reference/data-types/int-uint.md)) – The number of marks. To get the approximate number of rows in a data part, multiply `marks` by the index granularity (usually 8192) (this hint does not work for adaptive granularity).
|
- `marks` ([UInt64](../../sql-reference/data-types/int-uint.md)) – The number of marks. To get the approximate number of rows in a data part, multiply `marks` by the index granularity (usually 8192) (this hint does not work for adaptive granularity).
|
||||||
|
|
||||||
|
@ -97,6 +97,10 @@ Result:
|
|||||||
|
|
||||||
If you apply this combinator, the aggregate function does not return the resulting value (such as the number of unique values for the [uniq](../../sql-reference/aggregate-functions/reference/uniq.md#agg_function-uniq) function), but an intermediate state of the aggregation (for `uniq`, this is the hash table for calculating the number of unique values). This is an `AggregateFunction(...)` that can be used for further processing or stored in a table to finish aggregating later.
|
If you apply this combinator, the aggregate function does not return the resulting value (such as the number of unique values for the [uniq](../../sql-reference/aggregate-functions/reference/uniq.md#agg_function-uniq) function), but an intermediate state of the aggregation (for `uniq`, this is the hash table for calculating the number of unique values). This is an `AggregateFunction(...)` that can be used for further processing or stored in a table to finish aggregating later.
|
||||||
|
|
||||||
|
:::note
|
||||||
|
Please notice, that -MapState is not an invariant for the same data due to the fact that order of data in intermediate state can change, though it doesn't impact ingestion of this data.
|
||||||
|
:::
|
||||||
|
|
||||||
To work with these states, use:
|
To work with these states, use:
|
||||||
|
|
||||||
- [AggregatingMergeTree](../../engines/table-engines/mergetree-family/aggregatingmergetree.md) table engine.
|
- [AggregatingMergeTree](../../engines/table-engines/mergetree-family/aggregatingmergetree.md) table engine.
|
||||||
|
@ -19,8 +19,19 @@ Standard aggregate functions:
|
|||||||
- [stddevSamp](/docs/en/sql-reference/aggregate-functions/reference/stddevsamp.md)
|
- [stddevSamp](/docs/en/sql-reference/aggregate-functions/reference/stddevsamp.md)
|
||||||
- [varPop](/docs/en/sql-reference/aggregate-functions/reference/varpop.md)
|
- [varPop](/docs/en/sql-reference/aggregate-functions/reference/varpop.md)
|
||||||
- [varSamp](/docs/en/sql-reference/aggregate-functions/reference/varsamp.md)
|
- [varSamp](/docs/en/sql-reference/aggregate-functions/reference/varsamp.md)
|
||||||
|
- [corr](./corr.md)
|
||||||
- [covarPop](/docs/en/sql-reference/aggregate-functions/reference/covarpop.md)
|
- [covarPop](/docs/en/sql-reference/aggregate-functions/reference/covarpop.md)
|
||||||
- [covarSamp](/docs/en/sql-reference/aggregate-functions/reference/covarsamp.md)
|
- [covarSamp](/docs/en/sql-reference/aggregate-functions/reference/covarsamp.md)
|
||||||
|
- [entropy](./entropy.md)
|
||||||
|
- [exponentialMovingAverage](./exponentialmovingaverage.md)
|
||||||
|
- [intervalLengthSum](./intervalLengthSum.md)
|
||||||
|
- [kolmogorovSmirnovTest](./kolmogorovsmirnovtest.md)
|
||||||
|
- [mannwhitneyutest](./mannwhitneyutest.md)
|
||||||
|
- [median](./median.md)
|
||||||
|
- [rankCorr](./rankCorr.md)
|
||||||
|
- [sumKahan](./sumkahan.md)
|
||||||
|
- [studentTTest](./studentttest.md)
|
||||||
|
- [welchTTest](./welchttest.md)
|
||||||
|
|
||||||
ClickHouse-specific aggregate functions:
|
ClickHouse-specific aggregate functions:
|
||||||
|
|
||||||
@ -34,12 +45,15 @@ ClickHouse-specific aggregate functions:
|
|||||||
- [avgWeighted](/docs/en/sql-reference/aggregate-functions/reference/avgweighted.md)
|
- [avgWeighted](/docs/en/sql-reference/aggregate-functions/reference/avgweighted.md)
|
||||||
- [topK](/docs/en/sql-reference/aggregate-functions/reference/topk.md)
|
- [topK](/docs/en/sql-reference/aggregate-functions/reference/topk.md)
|
||||||
- [topKWeighted](/docs/en/sql-reference/aggregate-functions/reference/topkweighted.md)
|
- [topKWeighted](/docs/en/sql-reference/aggregate-functions/reference/topkweighted.md)
|
||||||
|
- [deltaSum](./deltasum.md)
|
||||||
|
- [deltaSumTimestamp](./deltasumtimestamp.md)
|
||||||
- [groupArray](/docs/en/sql-reference/aggregate-functions/reference/grouparray.md)
|
- [groupArray](/docs/en/sql-reference/aggregate-functions/reference/grouparray.md)
|
||||||
- [groupArrayLast](/docs/en/sql-reference/aggregate-functions/reference/grouparraylast.md)
|
- [groupArrayLast](/docs/en/sql-reference/aggregate-functions/reference/grouparraylast.md)
|
||||||
- [groupUniqArray](/docs/en/sql-reference/aggregate-functions/reference/groupuniqarray.md)
|
- [groupUniqArray](/docs/en/sql-reference/aggregate-functions/reference/groupuniqarray.md)
|
||||||
- [groupArrayInsertAt](/docs/en/sql-reference/aggregate-functions/reference/grouparrayinsertat.md)
|
- [groupArrayInsertAt](/docs/en/sql-reference/aggregate-functions/reference/grouparrayinsertat.md)
|
||||||
- [groupArrayMovingAvg](/docs/en/sql-reference/aggregate-functions/reference/grouparraymovingavg.md)
|
- [groupArrayMovingAvg](/docs/en/sql-reference/aggregate-functions/reference/grouparraymovingavg.md)
|
||||||
- [groupArrayMovingSum](/docs/en/sql-reference/aggregate-functions/reference/grouparraymovingsum.md)
|
- [groupArrayMovingSum](/docs/en/sql-reference/aggregate-functions/reference/grouparraymovingsum.md)
|
||||||
|
- [groupArraySample](./grouparraysample.md)
|
||||||
- [groupBitAnd](/docs/en/sql-reference/aggregate-functions/reference/groupbitand.md)
|
- [groupBitAnd](/docs/en/sql-reference/aggregate-functions/reference/groupbitand.md)
|
||||||
- [groupBitOr](/docs/en/sql-reference/aggregate-functions/reference/groupbitor.md)
|
- [groupBitOr](/docs/en/sql-reference/aggregate-functions/reference/groupbitor.md)
|
||||||
- [groupBitXor](/docs/en/sql-reference/aggregate-functions/reference/groupbitxor.md)
|
- [groupBitXor](/docs/en/sql-reference/aggregate-functions/reference/groupbitxor.md)
|
||||||
@ -84,3 +98,9 @@ ClickHouse-specific aggregate functions:
|
|||||||
- [theilsU](./theilsu.md)
|
- [theilsU](./theilsu.md)
|
||||||
- [maxIntersections](./maxintersections.md)
|
- [maxIntersections](./maxintersections.md)
|
||||||
- [maxIntersectionsPosition](./maxintersectionsposition.md)
|
- [maxIntersectionsPosition](./maxintersectionsposition.md)
|
||||||
|
- [meanZTest](./meanztest.md)
|
||||||
|
- [quantileGK](./quantileGK.md)
|
||||||
|
- [quantileInterpolatedWeighted](./quantileinterpolatedweighted.md)
|
||||||
|
- [sparkBar](./sparkbar.md)
|
||||||
|
- [sumCount](./sumcount.md)
|
||||||
|
|
||||||
|
@ -230,13 +230,15 @@ hasAll(set, subset)
|
|||||||
**Arguments**
|
**Arguments**
|
||||||
|
|
||||||
- `set` – Array of any type with a set of elements.
|
- `set` – Array of any type with a set of elements.
|
||||||
- `subset` – Array of any type with elements that should be tested to be a subset of `set`.
|
- `subset` – Array of any type that shares a common supertype with `set` containing elements that should be tested to be a subset of `set`.
|
||||||
|
|
||||||
**Return values**
|
**Return values**
|
||||||
|
|
||||||
- `1`, if `set` contains all of the elements from `subset`.
|
- `1`, if `set` contains all of the elements from `subset`.
|
||||||
- `0`, otherwise.
|
- `0`, otherwise.
|
||||||
|
|
||||||
|
Raises an exception `NO_COMMON_TYPE` if the set and subset elements do not share a common supertype.
|
||||||
|
|
||||||
**Peculiar properties**
|
**Peculiar properties**
|
||||||
|
|
||||||
- An empty array is a subset of any array.
|
- An empty array is a subset of any array.
|
||||||
@ -253,7 +255,7 @@ hasAll(set, subset)
|
|||||||
|
|
||||||
`SELECT hasAll(['a', 'b'], ['a'])` returns 1.
|
`SELECT hasAll(['a', 'b'], ['a'])` returns 1.
|
||||||
|
|
||||||
`SELECT hasAll([1], ['a'])` returns 0.
|
`SELECT hasAll([1], ['a'])` raises a `NO_COMMON_TYPE` exception.
|
||||||
|
|
||||||
`SELECT hasAll([[1, 2], [3, 4]], [[1, 2], [3, 5]])` returns 0.
|
`SELECT hasAll([[1, 2], [3, 4]], [[1, 2], [3, 5]])` returns 0.
|
||||||
|
|
||||||
@ -268,13 +270,15 @@ hasAny(array1, array2)
|
|||||||
**Arguments**
|
**Arguments**
|
||||||
|
|
||||||
- `array1` – Array of any type with a set of elements.
|
- `array1` – Array of any type with a set of elements.
|
||||||
- `array2` – Array of any type with a set of elements.
|
- `array2` – Array of any type that shares a common supertype with `array1`.
|
||||||
|
|
||||||
**Return values**
|
**Return values**
|
||||||
|
|
||||||
- `1`, if `array1` and `array2` have one similar element at least.
|
- `1`, if `array1` and `array2` have one similar element at least.
|
||||||
- `0`, otherwise.
|
- `0`, otherwise.
|
||||||
|
|
||||||
|
Raises an exception `NO_COMMON_TYPE` if the array1 and array2 elements do not share a common supertype.
|
||||||
|
|
||||||
**Peculiar properties**
|
**Peculiar properties**
|
||||||
|
|
||||||
- `Null` processed as a value.
|
- `Null` processed as a value.
|
||||||
@ -288,7 +292,7 @@ hasAny(array1, array2)
|
|||||||
|
|
||||||
`SELECT hasAny([-128, 1., 512], [1])` returns `1`.
|
`SELECT hasAny([-128, 1., 512], [1])` returns `1`.
|
||||||
|
|
||||||
`SELECT hasAny([[1, 2], [3, 4]], ['a', 'c'])` returns `0`.
|
`SELECT hasAny([[1, 2], [3, 4]], ['a', 'c'])` raises a `NO_COMMON_TYPE` exception.
|
||||||
|
|
||||||
`SELECT hasAll([[1, 2], [3, 4]], [[1, 2], [1, 2]])` returns `1`.
|
`SELECT hasAll([[1, 2], [3, 4]], [[1, 2], [1, 2]])` returns `1`.
|
||||||
|
|
||||||
@ -318,6 +322,8 @@ For Example:
|
|||||||
- `1`, if `array1` contains `array2`.
|
- `1`, if `array1` contains `array2`.
|
||||||
- `0`, otherwise.
|
- `0`, otherwise.
|
||||||
|
|
||||||
|
Raises an exception `NO_COMMON_TYPE` if the array1 and array2 elements do not share a common supertype.
|
||||||
|
|
||||||
**Peculiar properties**
|
**Peculiar properties**
|
||||||
|
|
||||||
- The function will return `1` if `array2` is empty.
|
- The function will return `1` if `array2` is empty.
|
||||||
@ -339,6 +345,9 @@ For Example:
|
|||||||
`SELECT hasSubstr(['a', 'b' , 'c'], ['a', 'c'])` returns 0.
|
`SELECT hasSubstr(['a', 'b' , 'c'], ['a', 'c'])` returns 0.
|
||||||
|
|
||||||
`SELECT hasSubstr([[1, 2], [3, 4], [5, 6]], [[1, 2], [3, 4]])` returns 1.
|
`SELECT hasSubstr([[1, 2], [3, 4], [5, 6]], [[1, 2], [3, 4]])` returns 1.
|
||||||
|
i
|
||||||
|
`SELECT hasSubstr([1, 2, NULL, 3, 4], ['a'])` raises a `NO_COMMON_TYPE` exception.
|
||||||
|
|
||||||
|
|
||||||
## indexOf(arr, x)
|
## indexOf(arr, x)
|
||||||
|
|
||||||
|
@ -8,7 +8,7 @@ sidebar_label: Nullable
|
|||||||
|
|
||||||
## isNull
|
## isNull
|
||||||
|
|
||||||
Returns whether the argument is [NULL](../../sql-reference/syntax.md#null-literal).
|
Returns whether the argument is [NULL](../../sql-reference/syntax.md#null).
|
||||||
|
|
||||||
``` sql
|
``` sql
|
||||||
isNull(x)
|
isNull(x)
|
||||||
|
@ -22,14 +22,15 @@ tuple(x, y, …)
|
|||||||
|
|
||||||
A function that allows getting a column from a tuple.
|
A function that allows getting a column from a tuple.
|
||||||
|
|
||||||
If the second argument is a number `n`, it is the column index, starting from 1. If the second argument is a string `s`, it represents the name of the element. Besides, we can provide the third optional argument, such that when index out of bounds or element for such name does not exist, the default value returned instead of throw exception. The second and third arguments if provided are always must be constant. There is no cost to execute the function.
|
If the second argument is a number `index`, it is the column index, starting from 1. If the second argument is a string `name`, it represents the name of the element. Besides, we can provide the third optional argument, such that when index out of bounds or no element exist for the name, the default value returned instead of throwing an exception. The second and third arguments, if provided, must be constants. There is no cost to execute the function.
|
||||||
|
|
||||||
The function implements the operator `x.n` and `x.s`.
|
The function implements operators `x.index` and `x.name`.
|
||||||
|
|
||||||
**Syntax**
|
**Syntax**
|
||||||
|
|
||||||
``` sql
|
``` sql
|
||||||
tupleElement(tuple, n/s [, default_value])
|
tupleElement(tuple, index, [, default_value])
|
||||||
|
tupleElement(tuple, name, [, default_value])
|
||||||
```
|
```
|
||||||
|
|
||||||
## untuple
|
## untuple
|
||||||
|
@ -21,6 +21,9 @@ Expressions from `ON` clause and columns from `USING` clause are called “join
|
|||||||
## Related Content
|
## Related Content
|
||||||
|
|
||||||
- Blog: [ClickHouse: A Blazingly Fast DBMS with Full SQL Join Support - Part 1](https://clickhouse.com/blog/clickhouse-fully-supports-joins)
|
- Blog: [ClickHouse: A Blazingly Fast DBMS with Full SQL Join Support - Part 1](https://clickhouse.com/blog/clickhouse-fully-supports-joins)
|
||||||
|
- Blog: [ClickHouse: A Blazingly Fast DBMS with Full SQL Join Support - Under the Hood - Part 2](https://clickhouse.com/blog/clickhouse-fully-supports-joins-hash-joins-part2)
|
||||||
|
- Blog: [ClickHouse: A Blazingly Fast DBMS with Full SQL Join Support - Under the Hood - Part 3](https://clickhouse.com/blog/clickhouse-fully-supports-joins-full-sort-partial-merge-part3)
|
||||||
|
- Blog: [ClickHouse: A Blazingly Fast DBMS with Full SQL Join Support - Under the Hood - Part 4](https://clickhouse.com/blog/clickhouse-fully-supports-joins-direct-join-part4)
|
||||||
|
|
||||||
## Supported Types of JOIN
|
## Supported Types of JOIN
|
||||||
|
|
||||||
|
@ -283,7 +283,7 @@ The optional keyword `EXTENDED` currently has no effect, it only exists for MySQ
|
|||||||
|
|
||||||
`SHOW INDEX` produces a result table with the following structure:
|
`SHOW INDEX` produces a result table with the following structure:
|
||||||
- table - The name of the table (String)
|
- table - The name of the table (String)
|
||||||
- non_unique - 0 if the index can contain duplicates, 1 otherwise (UInt8)
|
- non_unique - 0 if the index cannot contain duplicates, 1 otherwise (UInt8)
|
||||||
- key_name - The name of the index, `PRIMARY` if the index is a primary key index (String)
|
- key_name - The name of the index, `PRIMARY` if the index is a primary key index (String)
|
||||||
- seq_in_index - Currently unused
|
- seq_in_index - Currently unused
|
||||||
- column_name - Currently unused
|
- column_name - Currently unused
|
||||||
|
@ -1067,7 +1067,7 @@ ClickHouse использует потоки из глобального пул
|
|||||||
- requireTLSv1_2 - Требование соединения TLSv1.2. Допустимые значения: `true`, `false`.
|
- requireTLSv1_2 - Требование соединения TLSv1.2. Допустимые значения: `true`, `false`.
|
||||||
- fips - Активация режима OpenSSL FIPS. Поддерживается, если версия OpenSSL, с которой собрана библиотека поддерживает fips.
|
- fips - Активация режима OpenSSL FIPS. Поддерживается, если версия OpenSSL, с которой собрана библиотека поддерживает fips.
|
||||||
- privateKeyPassphraseHandler - Класс (подкласс PrivateKeyPassphraseHandler)запрашивающий кодовую фразу доступа к секретному ключу. Например, `<privateKeyPassphraseHandler>`, `<name>KeyFileHandler</name>`, `<options><password>test</password></options>`, `</privateKeyPassphraseHandler>`.
|
- privateKeyPassphraseHandler - Класс (подкласс PrivateKeyPassphraseHandler)запрашивающий кодовую фразу доступа к секретному ключу. Например, `<privateKeyPassphraseHandler>`, `<name>KeyFileHandler</name>`, `<options><password>test</password></options>`, `</privateKeyPassphraseHandler>`.
|
||||||
- invalidCertificateHandler - Класс (подкласс CertificateHandler) для подтверждения не валидных сертификатов. Например, `<invalidCertificateHandler> <name>ConsoleCertificateHandler</name> </invalidCertificateHandler>`.
|
- invalidCertificateHandler - Класс (подкласс CertificateHandler) для подтверждения не валидных сертификатов. Например, `<invalidCertificateHandler> <name>RejectCertificateHandler</name> </invalidCertificateHandler>`.
|
||||||
- disableProtocols - Запрещенные к использованию протоколы.
|
- disableProtocols - Запрещенные к использованию протоколы.
|
||||||
- preferServerCiphers - Предпочтение серверных шифров на клиенте.
|
- preferServerCiphers - Предпочтение серверных шифров на клиенте.
|
||||||
|
|
||||||
|
@ -66,6 +66,10 @@ WITH anySimpleState(number) AS c SELECT toTypeName(c), c FROM numbers(1);
|
|||||||
|
|
||||||
В случае применения этого комбинатора, агрегатная функция возвращает не готовое значение (например, в случае функции [uniq](reference/uniq.md#agg_function-uniq) — количество уникальных значений), а промежуточное состояние агрегации (например, в случае функции `uniq` — хэш-таблицу для расчёта количества уникальных значений), которое имеет тип `AggregateFunction(...)` и может использоваться для дальнейшей обработки или может быть сохранено в таблицу для последующей доагрегации.
|
В случае применения этого комбинатора, агрегатная функция возвращает не готовое значение (например, в случае функции [uniq](reference/uniq.md#agg_function-uniq) — количество уникальных значений), а промежуточное состояние агрегации (например, в случае функции `uniq` — хэш-таблицу для расчёта количества уникальных значений), которое имеет тип `AggregateFunction(...)` и может использоваться для дальнейшей обработки или может быть сохранено в таблицу для последующей доагрегации.
|
||||||
|
|
||||||
|
:::note
|
||||||
|
Промежуточное состояние для -MapState не является инвариантом для одних и тех же исходных данные т.к. порядок данных может меняться. Это не влияет, тем не менее, на загрузку таких данных.
|
||||||
|
:::
|
||||||
|
|
||||||
Для работы с промежуточными состояниями предназначены:
|
Для работы с промежуточными состояниями предназначены:
|
||||||
|
|
||||||
- Движок таблиц [AggregatingMergeTree](../../engines/table-engines/mergetree-family/aggregatingmergetree.md).
|
- Движок таблиц [AggregatingMergeTree](../../engines/table-engines/mergetree-family/aggregatingmergetree.md).
|
||||||
|
@ -466,7 +466,7 @@ SSL客户端/服务器配置。
|
|||||||
- requireTLSv1_2 – Require a TLSv1.2 connection. Acceptable values: `true`, `false`.
|
- requireTLSv1_2 – Require a TLSv1.2 connection. Acceptable values: `true`, `false`.
|
||||||
- fips – Activates OpenSSL FIPS mode. Supported if the library’s OpenSSL version supports FIPS.
|
- fips – Activates OpenSSL FIPS mode. Supported if the library’s OpenSSL version supports FIPS.
|
||||||
- privateKeyPassphraseHandler – Class (PrivateKeyPassphraseHandler subclass) that requests the passphrase for accessing the private key. For example: `<privateKeyPassphraseHandler>`, `<name>KeyFileHandler</name>`, `<options><password>test</password></options>`, `</privateKeyPassphraseHandler>`.
|
- privateKeyPassphraseHandler – Class (PrivateKeyPassphraseHandler subclass) that requests the passphrase for accessing the private key. For example: `<privateKeyPassphraseHandler>`, `<name>KeyFileHandler</name>`, `<options><password>test</password></options>`, `</privateKeyPassphraseHandler>`.
|
||||||
- invalidCertificateHandler – Class (a subclass of CertificateHandler) for verifying invalid certificates. For example: `<invalidCertificateHandler> <name>ConsoleCertificateHandler</name> </invalidCertificateHandler>` .
|
- invalidCertificateHandler – Class (a subclass of CertificateHandler) for verifying invalid certificates. For example: `<invalidCertificateHandler> <name>RejectCertificateHandler</name> </invalidCertificateHandler>` .
|
||||||
- disableProtocols – Protocols that are not allowed to use.
|
- disableProtocols – Protocols that are not allowed to use.
|
||||||
- preferServerCiphers – Preferred server ciphers on the client.
|
- preferServerCiphers – Preferred server ciphers on the client.
|
||||||
|
|
||||||
|
@ -790,7 +790,7 @@ bool Client::processWithFuzzing(const String & full_query)
|
|||||||
|
|
||||||
WriteBufferFromOStream cerr_buf(std::cerr, 4096);
|
WriteBufferFromOStream cerr_buf(std::cerr, 4096);
|
||||||
fuzz_base->dumpTree(cerr_buf);
|
fuzz_base->dumpTree(cerr_buf);
|
||||||
cerr_buf.next();
|
cerr_buf.finalize();
|
||||||
|
|
||||||
fmt::print(
|
fmt::print(
|
||||||
stderr,
|
stderr,
|
||||||
@ -928,7 +928,7 @@ bool Client::processWithFuzzing(const String & full_query)
|
|||||||
std::cout << std::endl;
|
std::cout << std::endl;
|
||||||
WriteBufferFromOStream ast_buf(std::cout, 4096);
|
WriteBufferFromOStream ast_buf(std::cout, 4096);
|
||||||
formatAST(*query, ast_buf, false /*highlight*/);
|
formatAST(*query, ast_buf, false /*highlight*/);
|
||||||
ast_buf.next();
|
ast_buf.finalize();
|
||||||
if (const auto * insert = query->as<ASTInsertQuery>())
|
if (const auto * insert = query->as<ASTInsertQuery>())
|
||||||
{
|
{
|
||||||
/// For inserts with data it's really useful to have the data itself available in the logs, as formatAST doesn't print it
|
/// For inserts with data it's really useful to have the data itself available in the logs, as formatAST doesn't print it
|
||||||
|
@ -151,6 +151,7 @@ int mainEntryClickHouseFormat(int argc, char ** argv)
|
|||||||
|
|
||||||
WriteBufferFromFileDescriptor out(STDOUT_FILENO);
|
WriteBufferFromFileDescriptor out(STDOUT_FILENO);
|
||||||
obfuscateQueries(query, out, obfuscated_words_map, used_nouns, hash_func, is_known_identifier);
|
obfuscateQueries(query, out, obfuscated_words_map, used_nouns, hash_func, is_known_identifier);
|
||||||
|
out.finalize();
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
@ -175,7 +176,7 @@ int mainEntryClickHouseFormat(int argc, char ** argv)
|
|||||||
{
|
{
|
||||||
WriteBufferFromOStream res_buf(std::cout, 4096);
|
WriteBufferFromOStream res_buf(std::cout, 4096);
|
||||||
formatAST(*res, res_buf, hilite, oneline);
|
formatAST(*res, res_buf, hilite, oneline);
|
||||||
res_buf.next();
|
res_buf.finalize();
|
||||||
if (multiple)
|
if (multiple)
|
||||||
std::cout << "\n;\n";
|
std::cout << "\n;\n";
|
||||||
std::cout << std::endl;
|
std::cout << std::endl;
|
||||||
@ -199,7 +200,7 @@ int mainEntryClickHouseFormat(int argc, char ** argv)
|
|||||||
res_cout.write(*s_pos++);
|
res_cout.write(*s_pos++);
|
||||||
}
|
}
|
||||||
|
|
||||||
res_cout.next();
|
res_cout.finalize();
|
||||||
if (multiple)
|
if (multiple)
|
||||||
std::cout << " \\\n;\n";
|
std::cout << " \\\n;\n";
|
||||||
std::cout << std::endl;
|
std::cout << std::endl;
|
||||||
|
@ -9,6 +9,7 @@
|
|||||||
#include <Poco/AutoPtr.h>
|
#include <Poco/AutoPtr.h>
|
||||||
#include <Poco/Logger.h>
|
#include <Poco/Logger.h>
|
||||||
#include <Common/logger_useful.h>
|
#include <Common/logger_useful.h>
|
||||||
|
#include <Disks/DiskLocal.h>
|
||||||
|
|
||||||
|
|
||||||
int mainEntryClickHouseKeeperConverter(int argc, char ** argv)
|
int mainEntryClickHouseKeeperConverter(int argc, char ** argv)
|
||||||
@ -39,8 +40,9 @@ int mainEntryClickHouseKeeperConverter(int argc, char ** argv)
|
|||||||
|
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
auto keeper_context = std::make_shared<KeeperContext>();
|
auto keeper_context = std::make_shared<KeeperContext>(true);
|
||||||
keeper_context->digest_enabled = true;
|
keeper_context->setDigestEnabled(true);
|
||||||
|
keeper_context->setSnapshotDisk(std::make_shared<DiskLocal>("Keeper-snapshots", options["output-dir"].as<std::string>(), 0));
|
||||||
|
|
||||||
DB::KeeperStorage storage(/* tick_time_ms */ 500, /* superdigest */ "", keeper_context, /* initialize_system_nodes */ false);
|
DB::KeeperStorage storage(/* tick_time_ms */ 500, /* superdigest */ "", keeper_context, /* initialize_system_nodes */ false);
|
||||||
|
|
||||||
@ -51,10 +53,10 @@ int mainEntryClickHouseKeeperConverter(int argc, char ** argv)
|
|||||||
DB::SnapshotMetadataPtr snapshot_meta = std::make_shared<DB::SnapshotMetadata>(storage.getZXID(), 1, std::make_shared<nuraft::cluster_config>());
|
DB::SnapshotMetadataPtr snapshot_meta = std::make_shared<DB::SnapshotMetadata>(storage.getZXID(), 1, std::make_shared<nuraft::cluster_config>());
|
||||||
DB::KeeperStorageSnapshot snapshot(&storage, snapshot_meta);
|
DB::KeeperStorageSnapshot snapshot(&storage, snapshot_meta);
|
||||||
|
|
||||||
DB::KeeperSnapshotManager manager(options["output-dir"].as<std::string>(), 1, keeper_context);
|
DB::KeeperSnapshotManager manager(1, keeper_context);
|
||||||
auto snp = manager.serializeSnapshotToBuffer(snapshot);
|
auto snp = manager.serializeSnapshotToBuffer(snapshot);
|
||||||
auto path = manager.serializeSnapshotBufferToDisk(*snp, storage.getZXID());
|
auto file_info = manager.serializeSnapshotBufferToDisk(*snp, storage.getZXID());
|
||||||
std::cout << "Snapshot serialized to path:" << path << std::endl;
|
std::cout << "Snapshot serialized to path:" << fs::path(file_info.disk->getPath()) / file_info.path << std::endl;
|
||||||
}
|
}
|
||||||
catch (...)
|
catch (...)
|
||||||
{
|
{
|
||||||
|
@ -43,13 +43,15 @@ if (BUILD_STANDALONE_KEEPER)
|
|||||||
${CMAKE_CURRENT_SOURCE_DIR}/../../src/Coordination/KeeperDispatcher.cpp
|
${CMAKE_CURRENT_SOURCE_DIR}/../../src/Coordination/KeeperDispatcher.cpp
|
||||||
${CMAKE_CURRENT_SOURCE_DIR}/../../src/Coordination/KeeperLogStore.cpp
|
${CMAKE_CURRENT_SOURCE_DIR}/../../src/Coordination/KeeperLogStore.cpp
|
||||||
${CMAKE_CURRENT_SOURCE_DIR}/../../src/Coordination/KeeperServer.cpp
|
${CMAKE_CURRENT_SOURCE_DIR}/../../src/Coordination/KeeperServer.cpp
|
||||||
|
${CMAKE_CURRENT_SOURCE_DIR}/../../src/Coordination/KeeperContext.cpp
|
||||||
|
${CMAKE_CURRENT_SOURCE_DIR}/../../src/Coordination/KeeperFeatureFlags.cpp
|
||||||
${CMAKE_CURRENT_SOURCE_DIR}/../../src/Coordination/KeeperSnapshotManager.cpp
|
${CMAKE_CURRENT_SOURCE_DIR}/../../src/Coordination/KeeperSnapshotManager.cpp
|
||||||
${CMAKE_CURRENT_SOURCE_DIR}/../../src/Coordination/KeeperSnapshotManagerS3.cpp
|
${CMAKE_CURRENT_SOURCE_DIR}/../../src/Coordination/KeeperSnapshotManagerS3.cpp
|
||||||
${CMAKE_CURRENT_SOURCE_DIR}/../../src/Coordination/KeeperStateMachine.cpp
|
${CMAKE_CURRENT_SOURCE_DIR}/../../src/Coordination/KeeperStateMachine.cpp
|
||||||
|
${CMAKE_CURRENT_SOURCE_DIR}/../../src/Coordination/KeeperContext.cpp
|
||||||
${CMAKE_CURRENT_SOURCE_DIR}/../../src/Coordination/KeeperStateManager.cpp
|
${CMAKE_CURRENT_SOURCE_DIR}/../../src/Coordination/KeeperStateManager.cpp
|
||||||
${CMAKE_CURRENT_SOURCE_DIR}/../../src/Coordination/KeeperStorage.cpp
|
${CMAKE_CURRENT_SOURCE_DIR}/../../src/Coordination/KeeperStorage.cpp
|
||||||
${CMAKE_CURRENT_SOURCE_DIR}/../../src/Coordination/KeeperAsynchronousMetrics.cpp
|
${CMAKE_CURRENT_SOURCE_DIR}/../../src/Coordination/KeeperAsynchronousMetrics.cpp
|
||||||
${CMAKE_CURRENT_SOURCE_DIR}/../../src/Coordination/TinyContext.cpp
|
|
||||||
${CMAKE_CURRENT_SOURCE_DIR}/../../src/Coordination/pathUtils.cpp
|
${CMAKE_CURRENT_SOURCE_DIR}/../../src/Coordination/pathUtils.cpp
|
||||||
${CMAKE_CURRENT_SOURCE_DIR}/../../src/Coordination/SessionExpiryQueue.cpp
|
${CMAKE_CURRENT_SOURCE_DIR}/../../src/Coordination/SessionExpiryQueue.cpp
|
||||||
${CMAKE_CURRENT_SOURCE_DIR}/../../src/Coordination/SummingStateMachine.cpp
|
${CMAKE_CURRENT_SOURCE_DIR}/../../src/Coordination/SummingStateMachine.cpp
|
||||||
@ -58,10 +60,14 @@ if (BUILD_STANDALONE_KEEPER)
|
|||||||
|
|
||||||
${CMAKE_CURRENT_SOURCE_DIR}/../../src/Core/SettingsFields.cpp
|
${CMAKE_CURRENT_SOURCE_DIR}/../../src/Core/SettingsFields.cpp
|
||||||
${CMAKE_CURRENT_SOURCE_DIR}/../../src/Core/BaseSettings.cpp
|
${CMAKE_CURRENT_SOURCE_DIR}/../../src/Core/BaseSettings.cpp
|
||||||
|
${CMAKE_CURRENT_SOURCE_DIR}/../../src/Core/ServerSettings.cpp
|
||||||
${CMAKE_CURRENT_SOURCE_DIR}/../../src/Core/Field.cpp
|
${CMAKE_CURRENT_SOURCE_DIR}/../../src/Core/Field.cpp
|
||||||
${CMAKE_CURRENT_SOURCE_DIR}/../../src/Core/SettingsEnums.cpp
|
${CMAKE_CURRENT_SOURCE_DIR}/../../src/Core/SettingsEnums.cpp
|
||||||
${CMAKE_CURRENT_SOURCE_DIR}/../../src/Core/ServerUUID.cpp
|
${CMAKE_CURRENT_SOURCE_DIR}/../../src/Core/ServerUUID.cpp
|
||||||
${CMAKE_CURRENT_SOURCE_DIR}/../../src/Core/UUID.cpp
|
${CMAKE_CURRENT_SOURCE_DIR}/../../src/Core/UUID.cpp
|
||||||
|
${CMAKE_CURRENT_SOURCE_DIR}/../../src/Core/BackgroundSchedulePool.cpp
|
||||||
|
|
||||||
|
${CMAKE_CURRENT_SOURCE_DIR}/../../src/IO/ReadBuffer.cpp
|
||||||
|
|
||||||
${CMAKE_CURRENT_SOURCE_DIR}/../../src/Server/KeeperTCPHandler.cpp
|
${CMAKE_CURRENT_SOURCE_DIR}/../../src/Server/KeeperTCPHandler.cpp
|
||||||
${CMAKE_CURRENT_SOURCE_DIR}/../../src/Server/TCPServer.cpp
|
${CMAKE_CURRENT_SOURCE_DIR}/../../src/Server/TCPServer.cpp
|
||||||
@ -93,6 +99,10 @@ if (BUILD_STANDALONE_KEEPER)
|
|||||||
${CMAKE_CURRENT_SOURCE_DIR}/../../src/Compression/ICompressionCodec.cpp
|
${CMAKE_CURRENT_SOURCE_DIR}/../../src/Compression/ICompressionCodec.cpp
|
||||||
${CMAKE_CURRENT_SOURCE_DIR}/../../src/Compression/LZ4_decompress_faster.cpp
|
${CMAKE_CURRENT_SOURCE_DIR}/../../src/Compression/LZ4_decompress_faster.cpp
|
||||||
|
|
||||||
|
${CMAKE_CURRENT_SOURCE_DIR}/../../src/Common/CurrentThread.cpp
|
||||||
|
${CMAKE_CURRENT_SOURCE_DIR}/../../src/Common/NamedCollections/NamedCollections.cpp
|
||||||
|
${CMAKE_CURRENT_SOURCE_DIR}/../../src/Common/NamedCollections/NamedCollectionConfiguration.cpp
|
||||||
|
|
||||||
${CMAKE_CURRENT_SOURCE_DIR}/../../src/Common/ZooKeeper/IKeeper.cpp
|
${CMAKE_CURRENT_SOURCE_DIR}/../../src/Common/ZooKeeper/IKeeper.cpp
|
||||||
${CMAKE_CURRENT_SOURCE_DIR}/../../src/Common/ZooKeeper/TestKeeper.cpp
|
${CMAKE_CURRENT_SOURCE_DIR}/../../src/Common/ZooKeeper/TestKeeper.cpp
|
||||||
${CMAKE_CURRENT_SOURCE_DIR}/../../src/Common/ZooKeeper/ZooKeeperCommon.cpp
|
${CMAKE_CURRENT_SOURCE_DIR}/../../src/Common/ZooKeeper/ZooKeeperCommon.cpp
|
||||||
@ -103,11 +113,58 @@ if (BUILD_STANDALONE_KEEPER)
|
|||||||
${CMAKE_CURRENT_SOURCE_DIR}/../../src/Common/ZooKeeper/ZooKeeperLock.cpp
|
${CMAKE_CURRENT_SOURCE_DIR}/../../src/Common/ZooKeeper/ZooKeeperLock.cpp
|
||||||
${CMAKE_CURRENT_SOURCE_DIR}/../../src/Common/ZooKeeper/ZooKeeperNodeCache.cpp
|
${CMAKE_CURRENT_SOURCE_DIR}/../../src/Common/ZooKeeper/ZooKeeperNodeCache.cpp
|
||||||
|
|
||||||
|
${CMAKE_CURRENT_SOURCE_DIR}/../../src/Disks/registerDisks.cpp
|
||||||
|
${CMAKE_CURRENT_SOURCE_DIR}/../../src/Disks/IDisk.cpp
|
||||||
|
${CMAKE_CURRENT_SOURCE_DIR}/../../src/Disks/DiskFactory.cpp
|
||||||
|
${CMAKE_CURRENT_SOURCE_DIR}/../../src/Disks/DiskSelector.cpp
|
||||||
|
${CMAKE_CURRENT_SOURCE_DIR}/../../src/Disks/DiskLocal.cpp
|
||||||
|
${CMAKE_CURRENT_SOURCE_DIR}/../../src/Disks/DiskLocalCheckThread.cpp
|
||||||
|
${CMAKE_CURRENT_SOURCE_DIR}/../../src/Disks/LocalDirectorySyncGuard.cpp
|
||||||
|
${CMAKE_CURRENT_SOURCE_DIR}/../../src/Disks/TemporaryFileOnDisk.cpp
|
||||||
|
${CMAKE_CURRENT_SOURCE_DIR}/../../src/Disks/loadLocalDiskConfig.cpp
|
||||||
|
|
||||||
|
${CMAKE_CURRENT_SOURCE_DIR}/../../src/Disks/ObjectStorages/IObjectStorage.cpp
|
||||||
|
${CMAKE_CURRENT_SOURCE_DIR}/../../src/Disks/ObjectStorages/MetadataStorageFromPlainObjectStorage.cpp
|
||||||
|
${CMAKE_CURRENT_SOURCE_DIR}/../../src/Disks/ObjectStorages/MetadataStorageFromDisk.cpp
|
||||||
|
${CMAKE_CURRENT_SOURCE_DIR}/../../src/Disks/ObjectStorages/MetadataFromDiskTransactionState.cpp
|
||||||
|
${CMAKE_CURRENT_SOURCE_DIR}/../../src/Disks/ObjectStorages/DiskObjectStorageMetadata.cpp
|
||||||
|
${CMAKE_CURRENT_SOURCE_DIR}/../../src/Disks/ObjectStorages/MetadataStorageFromDiskTransactionOperations.cpp
|
||||||
|
${CMAKE_CURRENT_SOURCE_DIR}/../../src/Disks/ObjectStorages/DiskObjectStorage.cpp
|
||||||
|
${CMAKE_CURRENT_SOURCE_DIR}/../../src/Disks/ObjectStorages/DiskObjectStorageTransaction.cpp
|
||||||
|
${CMAKE_CURRENT_SOURCE_DIR}/../../src/Disks/ObjectStorages/DiskObjectStorageRemoteMetadataRestoreHelper.cpp
|
||||||
|
${CMAKE_CURRENT_SOURCE_DIR}/../../src/Disks/ObjectStorages/DiskObjectStorageCommon.cpp
|
||||||
|
${CMAKE_CURRENT_SOURCE_DIR}/../../src/Disks/ObjectStorages/ObjectStorageIteratorAsync.cpp
|
||||||
|
${CMAKE_CURRENT_SOURCE_DIR}/../../src/Disks/ObjectStorages/ObjectStorageIterator.cpp
|
||||||
|
${CMAKE_CURRENT_SOURCE_DIR}/../../src/Disks/ObjectStorages/StoredObject.cpp
|
||||||
|
|
||||||
|
${CMAKE_CURRENT_SOURCE_DIR}/../../src/Disks/ObjectStorages/S3/registerDiskS3.cpp
|
||||||
|
${CMAKE_CURRENT_SOURCE_DIR}/../../src/Disks/ObjectStorages/S3/S3ObjectStorage.cpp
|
||||||
|
${CMAKE_CURRENT_SOURCE_DIR}/../../src/Disks/ObjectStorages/S3/S3Capabilities.cpp
|
||||||
|
${CMAKE_CURRENT_SOURCE_DIR}/../../src/Disks/ObjectStorages/S3/diskSettings.cpp
|
||||||
|
${CMAKE_CURRENT_SOURCE_DIR}/../../src/Disks/ObjectStorages/S3/ProxyListConfiguration.cpp
|
||||||
|
${CMAKE_CURRENT_SOURCE_DIR}/../../src/Disks/ObjectStorages/S3/ProxyResolverConfiguration.cpp
|
||||||
|
|
||||||
|
${CMAKE_CURRENT_SOURCE_DIR}/../../src/Disks/IO/createReadBufferFromFileBase.cpp
|
||||||
|
${CMAKE_CURRENT_SOURCE_DIR}/../../src/Disks/IO/ReadBufferFromRemoteFSGather.cpp
|
||||||
|
${CMAKE_CURRENT_SOURCE_DIR}/../../src/Disks/IO/IOUringReader.cpp
|
||||||
|
${CMAKE_CURRENT_SOURCE_DIR}/../../src/Disks/IO/WriteBufferFromTemporaryFile.cpp
|
||||||
|
${CMAKE_CURRENT_SOURCE_DIR}/../../src/Disks/IO/WriteBufferWithFinalizeCallback.cpp
|
||||||
|
${CMAKE_CURRENT_SOURCE_DIR}/../../src/Disks/IO/AsynchronousBoundedReadBuffer.cpp
|
||||||
|
${CMAKE_CURRENT_SOURCE_DIR}/../../src/Disks/IO/getThreadPoolReader.cpp
|
||||||
|
${CMAKE_CURRENT_SOURCE_DIR}/../../src/Disks/IO/ThreadPoolRemoteFSReader.cpp
|
||||||
|
${CMAKE_CURRENT_SOURCE_DIR}/../../src/Disks/IO/ThreadPoolReader.cpp
|
||||||
|
|
||||||
|
${CMAKE_CURRENT_SOURCE_DIR}/../../src/Storages/StorageS3Settings.cpp
|
||||||
|
|
||||||
${CMAKE_CURRENT_SOURCE_DIR}/../../src/Daemon/BaseDaemon.cpp
|
${CMAKE_CURRENT_SOURCE_DIR}/../../src/Daemon/BaseDaemon.cpp
|
||||||
${CMAKE_CURRENT_SOURCE_DIR}/../../src/Daemon/SentryWriter.cpp
|
${CMAKE_CURRENT_SOURCE_DIR}/../../src/Daemon/SentryWriter.cpp
|
||||||
${CMAKE_CURRENT_SOURCE_DIR}/../../src/Daemon/GraphiteWriter.cpp
|
${CMAKE_CURRENT_SOURCE_DIR}/../../src/Daemon/GraphiteWriter.cpp
|
||||||
${CMAKE_CURRENT_BINARY_DIR}/../../src/Daemon/GitHash.generated.cpp
|
${CMAKE_CURRENT_BINARY_DIR}/../../src/Daemon/GitHash.generated.cpp
|
||||||
|
|
||||||
|
${CMAKE_CURRENT_SOURCE_DIR}/../../src/Coordination/Standalone/Context.cpp
|
||||||
|
${CMAKE_CURRENT_SOURCE_DIR}/../../src/Coordination/Standalone/Settings.cpp
|
||||||
|
${CMAKE_CURRENT_SOURCE_DIR}/../../src/Coordination/Standalone/ThreadStatusExt.cpp
|
||||||
|
|
||||||
Keeper.cpp
|
Keeper.cpp
|
||||||
clickhouse-keeper.cpp
|
clickhouse-keeper.cpp
|
||||||
)
|
)
|
||||||
@ -130,10 +187,6 @@ if (BUILD_STANDALONE_KEEPER)
|
|||||||
target_compile_definitions (clickhouse-keeper PRIVATE -DCLICKHOUSE_PROGRAM_STANDALONE_BUILD)
|
target_compile_definitions (clickhouse-keeper PRIVATE -DCLICKHOUSE_PROGRAM_STANDALONE_BUILD)
|
||||||
target_compile_definitions (clickhouse-keeper PUBLIC -DWITHOUT_TEXT_LOG)
|
target_compile_definitions (clickhouse-keeper PUBLIC -DWITHOUT_TEXT_LOG)
|
||||||
|
|
||||||
target_include_directories(clickhouse-keeper PUBLIC "${CMAKE_CURRENT_SOURCE_DIR}/../../src") # uses includes from src directory
|
|
||||||
target_include_directories(clickhouse-keeper PUBLIC "${CMAKE_CURRENT_BINARY_DIR}/../../src/Core/include") # uses some includes from core
|
|
||||||
target_include_directories(clickhouse-keeper PUBLIC "${CMAKE_CURRENT_BINARY_DIR}/../../src") # uses some includes from common
|
|
||||||
|
|
||||||
if (ENABLE_CLICKHOUSE_KEEPER_CLIENT AND TARGET ch_rust::skim)
|
if (ENABLE_CLICKHOUSE_KEEPER_CLIENT AND TARGET ch_rust::skim)
|
||||||
target_link_libraries(clickhouse-keeper PRIVATE ch_rust::skim)
|
target_link_libraries(clickhouse-keeper PRIVATE ch_rust::skim)
|
||||||
endif()
|
endif()
|
||||||
|
@ -24,6 +24,8 @@
|
|||||||
#include <sys/stat.h>
|
#include <sys/stat.h>
|
||||||
#include <pwd.h>
|
#include <pwd.h>
|
||||||
|
|
||||||
|
#include <Interpreters/Context.h>
|
||||||
|
|
||||||
#include <Coordination/FourLetterCommand.h>
|
#include <Coordination/FourLetterCommand.h>
|
||||||
#include <Coordination/KeeperAsynchronousMetrics.h>
|
#include <Coordination/KeeperAsynchronousMetrics.h>
|
||||||
|
|
||||||
@ -45,6 +47,8 @@
|
|||||||
#include <Server/ProtocolServerAdapter.h>
|
#include <Server/ProtocolServerAdapter.h>
|
||||||
#include <Server/KeeperTCPHandlerFactory.h>
|
#include <Server/KeeperTCPHandlerFactory.h>
|
||||||
|
|
||||||
|
#include <Disks/registerDisks.h>
|
||||||
|
|
||||||
|
|
||||||
int mainEntryClickHouseKeeper(int argc, char ** argv)
|
int mainEntryClickHouseKeeper(int argc, char ** argv)
|
||||||
{
|
{
|
||||||
@ -201,9 +205,12 @@ void Keeper::defineOptions(Poco::Util::OptionSet & options)
|
|||||||
BaseDaemon::defineOptions(options);
|
BaseDaemon::defineOptions(options);
|
||||||
}
|
}
|
||||||
|
|
||||||
struct Keeper::KeeperHTTPContext : public IHTTPContext
|
namespace
|
||||||
{
|
{
|
||||||
explicit KeeperHTTPContext(TinyContextPtr context_)
|
|
||||||
|
struct KeeperHTTPContext : public IHTTPContext
|
||||||
|
{
|
||||||
|
explicit KeeperHTTPContext(ContextPtr context_)
|
||||||
: context(std::move(context_))
|
: context(std::move(context_))
|
||||||
{}
|
{}
|
||||||
|
|
||||||
@ -247,12 +254,14 @@ struct Keeper::KeeperHTTPContext : public IHTTPContext
|
|||||||
return {context->getConfigRef().getInt64("keeper_server.http_send_timeout", DBMS_DEFAULT_SEND_TIMEOUT_SEC), 0};
|
return {context->getConfigRef().getInt64("keeper_server.http_send_timeout", DBMS_DEFAULT_SEND_TIMEOUT_SEC), 0};
|
||||||
}
|
}
|
||||||
|
|
||||||
TinyContextPtr context;
|
ContextPtr context;
|
||||||
};
|
};
|
||||||
|
|
||||||
HTTPContextPtr Keeper::httpContext()
|
HTTPContextPtr httpContext()
|
||||||
{
|
{
|
||||||
return std::make_shared<KeeperHTTPContext>(tiny_context);
|
return std::make_shared<KeeperHTTPContext>(Context::getGlobalContextInstance());
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
int Keeper::main(const std::vector<std::string> & /*args*/)
|
int Keeper::main(const std::vector<std::string> & /*args*/)
|
||||||
@ -316,10 +325,21 @@ try
|
|||||||
std::mutex servers_lock;
|
std::mutex servers_lock;
|
||||||
auto servers = std::make_shared<std::vector<ProtocolServerAdapter>>();
|
auto servers = std::make_shared<std::vector<ProtocolServerAdapter>>();
|
||||||
|
|
||||||
tiny_context = std::make_shared<TinyContext>();
|
auto shared_context = Context::createShared();
|
||||||
|
auto global_context = Context::createGlobal(shared_context.get());
|
||||||
|
|
||||||
|
global_context->makeGlobalContext();
|
||||||
|
global_context->setPath(path);
|
||||||
|
global_context->setRemoteHostFilter(config());
|
||||||
|
|
||||||
|
if (config().has("macros"))
|
||||||
|
global_context->setMacros(std::make_unique<Macros>(config(), "macros", log));
|
||||||
|
|
||||||
|
registerDisks(/*global_skip_access_check=*/false);
|
||||||
|
|
||||||
/// This object will periodically calculate some metrics.
|
/// This object will periodically calculate some metrics.
|
||||||
KeeperAsynchronousMetrics async_metrics(
|
KeeperAsynchronousMetrics async_metrics(
|
||||||
tiny_context,
|
global_context,
|
||||||
config().getUInt("asynchronous_metrics_update_period_s", 1),
|
config().getUInt("asynchronous_metrics_update_period_s", 1),
|
||||||
[&]() -> std::vector<ProtocolServerMetrics>
|
[&]() -> std::vector<ProtocolServerMetrics>
|
||||||
{
|
{
|
||||||
@ -344,12 +364,12 @@ try
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Initialize keeper RAFT. Do nothing if no keeper_server in config.
|
/// Initialize keeper RAFT. Do nothing if no keeper_server in config.
|
||||||
tiny_context->initializeKeeperDispatcher(/* start_async = */ true);
|
global_context->initializeKeeperDispatcher(/* start_async = */ true);
|
||||||
FourLetterCommandFactory::registerCommands(*tiny_context->getKeeperDispatcher());
|
FourLetterCommandFactory::registerCommands(*global_context->getKeeperDispatcher());
|
||||||
|
|
||||||
auto config_getter = [this] () -> const Poco::Util::AbstractConfiguration &
|
auto config_getter = [&] () -> const Poco::Util::AbstractConfiguration &
|
||||||
{
|
{
|
||||||
return tiny_context->getConfigRef();
|
return global_context->getConfigRef();
|
||||||
};
|
};
|
||||||
|
|
||||||
auto tcp_receive_timeout = config().getInt64("keeper_server.socket_receive_timeout_sec", DBMS_DEFAULT_RECEIVE_TIMEOUT_SEC);
|
auto tcp_receive_timeout = config().getInt64("keeper_server.socket_receive_timeout_sec", DBMS_DEFAULT_RECEIVE_TIMEOUT_SEC);
|
||||||
@ -371,7 +391,7 @@ try
|
|||||||
"Keeper (tcp): " + address.toString(),
|
"Keeper (tcp): " + address.toString(),
|
||||||
std::make_unique<TCPServer>(
|
std::make_unique<TCPServer>(
|
||||||
new KeeperTCPHandlerFactory(
|
new KeeperTCPHandlerFactory(
|
||||||
config_getter, tiny_context->getKeeperDispatcher(),
|
config_getter, global_context->getKeeperDispatcher(),
|
||||||
tcp_receive_timeout, tcp_send_timeout, false), server_pool, socket));
|
tcp_receive_timeout, tcp_send_timeout, false), server_pool, socket));
|
||||||
});
|
});
|
||||||
|
|
||||||
@ -389,7 +409,7 @@ try
|
|||||||
"Keeper with secure protocol (tcp_secure): " + address.toString(),
|
"Keeper with secure protocol (tcp_secure): " + address.toString(),
|
||||||
std::make_unique<TCPServer>(
|
std::make_unique<TCPServer>(
|
||||||
new KeeperTCPHandlerFactory(
|
new KeeperTCPHandlerFactory(
|
||||||
config_getter, tiny_context->getKeeperDispatcher(),
|
config_getter, global_context->getKeeperDispatcher(),
|
||||||
tcp_receive_timeout, tcp_send_timeout, true), server_pool, socket));
|
tcp_receive_timeout, tcp_send_timeout, true), server_pool, socket));
|
||||||
#else
|
#else
|
||||||
UNUSED(port);
|
UNUSED(port);
|
||||||
@ -441,7 +461,7 @@ try
|
|||||||
[&](ConfigurationPtr config, bool /* initial_loading */)
|
[&](ConfigurationPtr config, bool /* initial_loading */)
|
||||||
{
|
{
|
||||||
if (config->has("keeper_server"))
|
if (config->has("keeper_server"))
|
||||||
tiny_context->updateKeeperConfiguration(*config);
|
global_context->updateKeeperConfiguration(*config);
|
||||||
},
|
},
|
||||||
/* already_loaded = */ false); /// Reload it right now (initial loading)
|
/* already_loaded = */ false); /// Reload it right now (initial loading)
|
||||||
|
|
||||||
@ -472,7 +492,7 @@ try
|
|||||||
else
|
else
|
||||||
LOG_INFO(log, "Closed connections to Keeper.");
|
LOG_INFO(log, "Closed connections to Keeper.");
|
||||||
|
|
||||||
tiny_context->shutdownKeeperDispatcher();
|
global_context->shutdownKeeperDispatcher();
|
||||||
|
|
||||||
/// Wait server pool to avoid use-after-free of destroyed context in the handlers
|
/// Wait server pool to avoid use-after-free of destroyed context in the handlers
|
||||||
server_pool.joinAll();
|
server_pool.joinAll();
|
||||||
|
@ -1,9 +1,7 @@
|
|||||||
#pragma once
|
#pragma once
|
||||||
|
|
||||||
#include <Server/IServer.h>
|
#include <Server/IServer.h>
|
||||||
#include <Server/HTTP/HTTPContext.h>
|
|
||||||
#include <Daemon/BaseDaemon.h>
|
#include <Daemon/BaseDaemon.h>
|
||||||
#include <Coordination/TinyContext.h>
|
|
||||||
|
|
||||||
namespace Poco
|
namespace Poco
|
||||||
{
|
{
|
||||||
@ -68,11 +66,6 @@ protected:
|
|||||||
std::string getDefaultConfigFileName() const override;
|
std::string getDefaultConfigFileName() const override;
|
||||||
|
|
||||||
private:
|
private:
|
||||||
TinyContextPtr tiny_context;
|
|
||||||
|
|
||||||
struct KeeperHTTPContext;
|
|
||||||
HTTPContextPtr httpContext();
|
|
||||||
|
|
||||||
Poco::Net::SocketAddress socketBindListen(Poco::Net::ServerSocket & socket, const std::string & host, UInt16 port, [[maybe_unused]] bool secure = false) const;
|
Poco::Net::SocketAddress socketBindListen(Poco::Net::ServerSocket & socket, const std::string & host, UInt16 port, [[maybe_unused]] bool secure = false) const;
|
||||||
|
|
||||||
using CreateServerFunc = std::function<void(UInt16)>;
|
using CreateServerFunc = std::function<void(UInt16)>;
|
||||||
|
@ -282,6 +282,13 @@
|
|||||||
<cacheSessions>true</cacheSessions>
|
<cacheSessions>true</cacheSessions>
|
||||||
<disableProtocols>sslv2,sslv3</disableProtocols>
|
<disableProtocols>sslv2,sslv3</disableProtocols>
|
||||||
<preferServerCiphers>true</preferServerCiphers>
|
<preferServerCiphers>true</preferServerCiphers>
|
||||||
|
|
||||||
|
<invalidCertificateHandler>
|
||||||
|
<!-- The server, in contrast to the client, cannot ask about the certificate interactively.
|
||||||
|
The only reasonable option is to reject.
|
||||||
|
-->
|
||||||
|
<name>RejectCertificateHandler</name>
|
||||||
|
</invalidCertificateHandler>
|
||||||
</server>
|
</server>
|
||||||
|
|
||||||
<client> <!-- Used for connecting to https dictionary source and secured Zookeeper communication -->
|
<client> <!-- Used for connecting to https dictionary source and secured Zookeeper communication -->
|
||||||
|
360
rust/skim/Cargo.lock
generated
360
rust/skim/Cargo.lock
generated
@ -14,13 +14,19 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "aho-corasick"
|
name = "aho-corasick"
|
||||||
version = "0.7.20"
|
version = "1.0.2"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "cc936419f96fa211c1b9166887b38e5e40b19958e5b895be7c1f93adec7071ac"
|
checksum = "43f6cb1bf222025340178f382c426f13757b2960e89779dfcb319c32542a5a41"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"memchr",
|
"memchr",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "android-tzdata"
|
||||||
|
version = "0.1.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "e999941b234f3131b00bc13c22d06e8c5ff726d1b6318ac7eb276997bbb4fef0"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "android_system_properties"
|
name = "android_system_properties"
|
||||||
version = "0.1.5"
|
version = "0.1.5"
|
||||||
@ -32,9 +38,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "arrayvec"
|
name = "arrayvec"
|
||||||
version = "0.7.2"
|
version = "0.7.4"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "8da52d66c7071e2e3fa2a1e5c6d088fec47b593032b254f5e980de8ea54454d6"
|
checksum = "96d30a06541fbafbc7f82ed10c06164cfbd2c401138f6addd8404629c4b16711"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "atty"
|
name = "atty"
|
||||||
@ -42,7 +48,7 @@ version = "0.2.14"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8"
|
checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"hermit-abi",
|
"hermit-abi 0.1.19",
|
||||||
"libc",
|
"libc",
|
||||||
"winapi",
|
"winapi",
|
||||||
]
|
]
|
||||||
@ -67,15 +73,15 @@ checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "bumpalo"
|
name = "bumpalo"
|
||||||
version = "3.11.1"
|
version = "3.13.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "572f695136211188308f16ad2ca5c851a712c464060ae6974944458eb83880ba"
|
checksum = "a3e2c3daef883ecc1b5d58c15adae93470a91d425f3532ba1695849656af3fc1"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "cc"
|
name = "cc"
|
||||||
version = "1.0.77"
|
version = "1.0.79"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "e9f73505338f7d905b19d18738976aae232eb46b8efc15554ffc56deb5d9ebe4"
|
checksum = "50d30906286121d95be3d479533b458f87493b30a4b5f79a607db8f5d11aa91f"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "cfg-if"
|
name = "cfg-if"
|
||||||
@ -85,13 +91,13 @@ checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "chrono"
|
name = "chrono"
|
||||||
version = "0.4.23"
|
version = "0.4.26"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "16b0a3d9ed01224b22057780a37bb8c5dbfe1be8ba48678e7bf57ec4b385411f"
|
checksum = "ec837a71355b28f6556dbd569b37b3f363091c0bd4b2e735674521b4c5fd9bc5"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
|
"android-tzdata",
|
||||||
"iana-time-zone",
|
"iana-time-zone",
|
||||||
"js-sys",
|
"js-sys",
|
||||||
"num-integer",
|
|
||||||
"num-traits",
|
"num-traits",
|
||||||
"time 0.1.45",
|
"time 0.1.45",
|
||||||
"wasm-bindgen",
|
"wasm-bindgen",
|
||||||
@ -100,9 +106,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "clap"
|
name = "clap"
|
||||||
version = "3.2.23"
|
version = "3.2.25"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "71655c45cb9845d3270c9d6df84ebe72b4dad3c2ba3f7023ad47c144e4e473a5"
|
checksum = "4ea181bf566f71cb9a5d17a59e1871af638180a18fb0035c92ae62b705207123"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"atty",
|
"atty",
|
||||||
"bitflags",
|
"bitflags",
|
||||||
@ -135,9 +141,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "core-foundation-sys"
|
name = "core-foundation-sys"
|
||||||
version = "0.8.3"
|
version = "0.8.4"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "5827cebf4670468b8772dd191856768aedcb1b0278a04f989f7766351917b9dc"
|
checksum = "e496a50fda8aacccc86d7529e2c1e0892dbd0f898a6b5645b5561b89c3210efa"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "crossbeam"
|
name = "crossbeam"
|
||||||
@ -155,9 +161,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "crossbeam-channel"
|
name = "crossbeam-channel"
|
||||||
version = "0.5.6"
|
version = "0.5.8"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "c2dd04ddaf88237dc3b8d8f9a3c1004b506b54b3313403944054d23c0870c521"
|
checksum = "a33c2bf77f2df06183c3aa30d1e96c0695a313d4f9c453cc3762a6db39f99200"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"cfg-if",
|
"cfg-if",
|
||||||
"crossbeam-utils",
|
"crossbeam-utils",
|
||||||
@ -165,9 +171,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "crossbeam-deque"
|
name = "crossbeam-deque"
|
||||||
version = "0.8.2"
|
version = "0.8.3"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "715e8152b692bba2d374b53d4875445368fdf21a94751410af607a5ac677d1fc"
|
checksum = "ce6fd6f855243022dcecf8702fef0c297d4338e226845fe067f6341ad9fa0cef"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"cfg-if",
|
"cfg-if",
|
||||||
"crossbeam-epoch",
|
"crossbeam-epoch",
|
||||||
@ -176,14 +182,14 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "crossbeam-epoch"
|
name = "crossbeam-epoch"
|
||||||
version = "0.9.13"
|
version = "0.9.15"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "01a9af1f4c2ef74bb8aa1f7e19706bc72d03598c8a570bb5de72243c7a9d9d5a"
|
checksum = "ae211234986c545741a7dc064309f67ee1e5ad243d0e48335adc0484d960bcc7"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"autocfg",
|
"autocfg",
|
||||||
"cfg-if",
|
"cfg-if",
|
||||||
"crossbeam-utils",
|
"crossbeam-utils",
|
||||||
"memoffset 0.7.1",
|
"memoffset 0.9.0",
|
||||||
"scopeguard",
|
"scopeguard",
|
||||||
]
|
]
|
||||||
|
|
||||||
@ -199,18 +205,18 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "crossbeam-utils"
|
name = "crossbeam-utils"
|
||||||
version = "0.8.14"
|
version = "0.8.16"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "4fb766fa798726286dbbb842f174001dab8abc7b627a1dd86e0b7222a95d929f"
|
checksum = "5a22b2d63d4d1dc0b7f1b6b2747dd0088008a9be28b6ddf0b1e7d335e3037294"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"cfg-if",
|
"cfg-if",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "cxx"
|
name = "cxx"
|
||||||
version = "1.0.83"
|
version = "1.0.97"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "bdf07d07d6531bfcdbe9b8b739b104610c6508dcc4d63b410585faf338241daf"
|
checksum = "e88abab2f5abbe4c56e8f1fb431b784d710b709888f35755a160e62e33fe38e8"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"cc",
|
"cc",
|
||||||
"cxxbridge-flags",
|
"cxxbridge-flags",
|
||||||
@ -220,9 +226,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "cxx-build"
|
name = "cxx-build"
|
||||||
version = "1.0.83"
|
version = "1.0.97"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "d2eb5b96ecdc99f72657332953d4d9c50135af1bac34277801cc3937906ebd39"
|
checksum = "5c0c11acd0e63bae27dcd2afced407063312771212b7a823b4fd72d633be30fb"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"cc",
|
"cc",
|
||||||
"codespan-reporting",
|
"codespan-reporting",
|
||||||
@ -230,31 +236,31 @@ dependencies = [
|
|||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"scratch",
|
"scratch",
|
||||||
"syn",
|
"syn 2.0.23",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "cxxbridge-flags"
|
name = "cxxbridge-flags"
|
||||||
version = "1.0.83"
|
version = "1.0.97"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "ac040a39517fd1674e0f32177648334b0f4074625b5588a64519804ba0553b12"
|
checksum = "8d3816ed957c008ccd4728485511e3d9aaf7db419aa321e3d2c5a2f3411e36c8"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "cxxbridge-macro"
|
name = "cxxbridge-macro"
|
||||||
version = "1.0.83"
|
version = "1.0.97"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "1362b0ddcfc4eb0a1f57b68bd77dd99f0e826958a96abd0ae9bd092e114ffed6"
|
checksum = "a26acccf6f445af85ea056362561a24ef56cdc15fcc685f03aec50b9c702cb6d"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"syn",
|
"syn 2.0.23",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "darling"
|
name = "darling"
|
||||||
version = "0.14.2"
|
version = "0.14.4"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "b0dd3cd20dc6b5a876612a6e5accfe7f3dd883db6d07acfbf14c128f61550dfa"
|
checksum = "7b750cb3417fd1b327431a470f388520309479ab0bf5e323505daf0290cd3850"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"darling_core",
|
"darling_core",
|
||||||
"darling_macro",
|
"darling_macro",
|
||||||
@ -262,27 +268,27 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "darling_core"
|
name = "darling_core"
|
||||||
version = "0.14.2"
|
version = "0.14.4"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "a784d2ccaf7c98501746bf0be29b2022ba41fd62a2e622af997a03e9f972859f"
|
checksum = "109c1ca6e6b7f82cc233a97004ea8ed7ca123a9af07a8230878fcfda9b158bf0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"fnv",
|
"fnv",
|
||||||
"ident_case",
|
"ident_case",
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"strsim",
|
"strsim",
|
||||||
"syn",
|
"syn 1.0.109",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "darling_macro"
|
name = "darling_macro"
|
||||||
version = "0.14.2"
|
version = "0.14.4"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "7618812407e9402654622dd402b0a89dff9ba93badd6540781526117b92aab7e"
|
checksum = "a4aab4dbc9f7611d8b55048a3a16d2d010c2c8334e46304b40ac1cc14bf3b48e"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"darling_core",
|
"darling_core",
|
||||||
"quote",
|
"quote",
|
||||||
"syn",
|
"syn 1.0.109",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -313,7 +319,7 @@ dependencies = [
|
|||||||
"darling",
|
"darling",
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"syn",
|
"syn 1.0.109",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -323,7 +329,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||||||
checksum = "8f0314b72bed045f3a68671b3c86328386762c93f82d98c65c3cb5e5f573dd68"
|
checksum = "8f0314b72bed045f3a68671b3c86328386762c93f82d98c65c3cb5e5f573dd68"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"derive_builder_core",
|
"derive_builder_core",
|
||||||
"syn",
|
"syn 1.0.109",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -349,9 +355,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "either"
|
name = "either"
|
||||||
version = "1.8.0"
|
version = "1.8.1"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "90e5c1c8368803113bf0c9584fc495a58b86dc8a29edbf8fe877d21d9507e797"
|
checksum = "7fcaabb2fef8c910e7f4c7ce9f67a1283a1715879a7c230ca9d6d1ae31f16d91"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "env_logger"
|
name = "env_logger"
|
||||||
@ -383,9 +389,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "getrandom"
|
name = "getrandom"
|
||||||
version = "0.2.8"
|
version = "0.2.10"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "c05aeb6a22b8f62540c194aac980f2115af067bfe15a0734d7277a768d396b31"
|
checksum = "be4136b2a15dd319360be1c07d9933517ccf0be8f16bf62a3bee4f0d618df427"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"cfg-if",
|
"cfg-if",
|
||||||
"libc",
|
"libc",
|
||||||
@ -407,6 +413,12 @@ dependencies = [
|
|||||||
"libc",
|
"libc",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "hermit-abi"
|
||||||
|
version = "0.3.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "fed44880c466736ef9a5c5b5facefb5ed0785676d0c02d612db14e54f0d84286"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "humantime"
|
name = "humantime"
|
||||||
version = "2.1.0"
|
version = "2.1.0"
|
||||||
@ -415,26 +427,25 @@ checksum = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "iana-time-zone"
|
name = "iana-time-zone"
|
||||||
version = "0.1.53"
|
version = "0.1.57"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "64c122667b287044802d6ce17ee2ddf13207ed924c712de9a66a5814d5b64765"
|
checksum = "2fad5b825842d2b38bd206f3e81d6957625fd7f0a361e345c30e01a0ae2dd613"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"android_system_properties",
|
"android_system_properties",
|
||||||
"core-foundation-sys",
|
"core-foundation-sys",
|
||||||
"iana-time-zone-haiku",
|
"iana-time-zone-haiku",
|
||||||
"js-sys",
|
"js-sys",
|
||||||
"wasm-bindgen",
|
"wasm-bindgen",
|
||||||
"winapi",
|
"windows",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "iana-time-zone-haiku"
|
name = "iana-time-zone-haiku"
|
||||||
version = "0.1.1"
|
version = "0.1.2"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "0703ae284fc167426161c2e3f1da3ea71d94b21bedbcc9494e92b28e334e3dca"
|
checksum = "f31827a206f56af32e590ba56d5d2d085f558508192593743f16b2306495269f"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"cxx",
|
"cc",
|
||||||
"cxx-build",
|
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -445,9 +456,9 @@ checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "indexmap"
|
name = "indexmap"
|
||||||
version = "1.9.2"
|
version = "1.9.3"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "1885e79c1fc4b10f0e172c475f458b7f7b93061064d98c3293e98c5ba0c8b399"
|
checksum = "bd070e393353796e801d209ad339e89596eb4c8d430d18ede6a1cced8fafbd99"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"autocfg",
|
"autocfg",
|
||||||
"hashbrown",
|
"hashbrown",
|
||||||
@ -455,9 +466,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "js-sys"
|
name = "js-sys"
|
||||||
version = "0.3.60"
|
version = "0.3.64"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "49409df3e3bf0856b916e2ceaca09ee28e6871cf7d9ce97a692cacfdb2a25a47"
|
checksum = "c5f195fe497f702db0f318b07fdd68edb16955aed830df8363d837542f8f935a"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"wasm-bindgen",
|
"wasm-bindgen",
|
||||||
]
|
]
|
||||||
@ -470,27 +481,24 @@ checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "libc"
|
name = "libc"
|
||||||
version = "0.2.138"
|
version = "0.2.147"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "db6d7e329c562c5dfab7a46a2afabc8b987ab9a4834c9d1ca04dc54c1546cef8"
|
checksum = "b4668fb0ea861c1df094127ac5f1da3409a82116a4ba74fca2e58ef927159bb3"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "link-cplusplus"
|
name = "link-cplusplus"
|
||||||
version = "1.0.7"
|
version = "1.0.8"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "9272ab7b96c9046fbc5bc56c06c117cb639fe2d509df0c421cad82d2915cf369"
|
checksum = "ecd207c9c713c34f95a097a5b029ac2ce6010530c7b49d7fea24d977dede04f5"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"cc",
|
"cc",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "log"
|
name = "log"
|
||||||
version = "0.4.17"
|
version = "0.4.19"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "abb12e687cfb44aa40f41fc3978ef76448f9b6038cad6aef4259d3c095a2382e"
|
checksum = "b06a4cde4c0f271a446782e3eff8de789548ce57dbc8eca9292c27f4a42004b4"
|
||||||
dependencies = [
|
|
||||||
"cfg-if",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "memchr"
|
name = "memchr"
|
||||||
@ -509,9 +517,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "memoffset"
|
name = "memoffset"
|
||||||
version = "0.7.1"
|
version = "0.9.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "5de893c32cde5f383baa4c04c5d6dbdd735cfd4a794b0debdb2bb1b421da5ff4"
|
checksum = "5a634b1c61a95585bd15607c6ab0c4e5b226e695ff2800ba0cdccddf208c406c"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"autocfg",
|
"autocfg",
|
||||||
]
|
]
|
||||||
@ -541,16 +549,6 @@ dependencies = [
|
|||||||
"pin-utils",
|
"pin-utils",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "num-integer"
|
|
||||||
version = "0.1.45"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "225d3389fb3509a24c93f5c29eb6bde2586b98d9f016636dff58d7c6f7569cd9"
|
|
||||||
dependencies = [
|
|
||||||
"autocfg",
|
|
||||||
"num-traits",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "num-traits"
|
name = "num-traits"
|
||||||
version = "0.2.15"
|
version = "0.2.15"
|
||||||
@ -562,25 +560,25 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "num_cpus"
|
name = "num_cpus"
|
||||||
version = "1.14.0"
|
version = "1.16.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "f6058e64324c71e02bc2b150e4f3bc8286db6c83092132ffa3f6b1eab0f9def5"
|
checksum = "4161fcb6d602d4d2081af7c3a45852d875a03dd337a6bfdd6e06407b61342a43"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"hermit-abi",
|
"hermit-abi 0.3.1",
|
||||||
"libc",
|
"libc",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "once_cell"
|
name = "once_cell"
|
||||||
version = "1.16.0"
|
version = "1.18.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "86f0b0d4bf799edbc74508c1e8bf170ff5f41238e5f8225603ca7caaae2b7860"
|
checksum = "dd8b5dd2ae5ed71462c540258bedcb51965123ad7e7ccf4b9a8cafaa4a63576d"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "os_str_bytes"
|
name = "os_str_bytes"
|
||||||
version = "6.4.1"
|
version = "6.5.1"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "9b7820b9daea5457c9f21c69448905d723fbd21136ccf521748f23fd49e723ee"
|
checksum = "4d5d9eb14b174ee9aa2ef96dc2b94637a2d4b6e7cb873c7e171f0c20c6cf3eac"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "pin-utils"
|
name = "pin-utils"
|
||||||
@ -590,27 +588,27 @@ checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "proc-macro2"
|
name = "proc-macro2"
|
||||||
version = "1.0.47"
|
version = "1.0.63"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "5ea3d908b0e36316caf9e9e2c4625cdde190a7e6f440d794667ed17a1855e725"
|
checksum = "7b368fba921b0dce7e60f5e04ec15e565b3303972b42bcfde1d0713b881959eb"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"unicode-ident",
|
"unicode-ident",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "quote"
|
name = "quote"
|
||||||
version = "1.0.21"
|
version = "1.0.29"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "bbe448f377a7d6961e30f5955f9b8d106c3f5e449d493ee1b125c1d43c2b5179"
|
checksum = "573015e8ab27661678357f27dc26460738fd2b6c86e46f386fde94cb5d913105"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "rayon"
|
name = "rayon"
|
||||||
version = "1.6.1"
|
version = "1.7.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "6db3a213adf02b3bcfd2d3846bb41cb22857d131789e01df434fb7e7bc0759b7"
|
checksum = "1d2df5196e37bcc87abebc0053e20787d73847bb33134a69841207dd0a47f03b"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"either",
|
"either",
|
||||||
"rayon-core",
|
"rayon-core",
|
||||||
@ -618,9 +616,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "rayon-core"
|
name = "rayon-core"
|
||||||
version = "1.10.1"
|
version = "1.11.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "cac410af5d00ab6884528b4ab69d1e8e146e8d471201800fa1b4524126de6ad3"
|
checksum = "4b8f95bd6966f5c87776639160a66bd8ab9895d9d4ab01ddba9fc60661aebe8d"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"crossbeam-channel",
|
"crossbeam-channel",
|
||||||
"crossbeam-deque",
|
"crossbeam-deque",
|
||||||
@ -650,9 +648,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "regex"
|
name = "regex"
|
||||||
version = "1.7.0"
|
version = "1.8.4"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "e076559ef8e241f2ae3479e36f97bd5741c0330689e217ad51ce2c76808b868a"
|
checksum = "d0ab3ca65655bb1e41f2a8c8cd662eb4fb035e67c3f78da1d61dffe89d07300f"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"aho-corasick",
|
"aho-corasick",
|
||||||
"memchr",
|
"memchr",
|
||||||
@ -661,15 +659,15 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "regex-syntax"
|
name = "regex-syntax"
|
||||||
version = "0.6.28"
|
version = "0.7.2"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "456c603be3e8d448b072f410900c09faf164fbce2d480456f50eea6e25f9c848"
|
checksum = "436b050e76ed2903236f032a59761c1eb99e1b0aead2c257922771dab1fc8c78"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "rustversion"
|
name = "rustversion"
|
||||||
version = "1.0.9"
|
version = "1.0.12"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "97477e48b4cf8603ad5f7aaf897467cf42ab4218a38ef76fb14c2d6773a6d6a8"
|
checksum = "4f3208ce4d8448b3f3e7d168a73f5e0c43a61e32930de3bceeccedb388b6bf06"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "scopeguard"
|
name = "scopeguard"
|
||||||
@ -679,15 +677,15 @@ checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "scratch"
|
name = "scratch"
|
||||||
version = "1.0.2"
|
version = "1.0.5"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "9c8132065adcfd6e02db789d9285a0deb2f3fcb04002865ab67d5fb103533898"
|
checksum = "1792db035ce95be60c3f8853017b3999209281c24e2ba5bc8e59bf97a0c590c1"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "serde"
|
name = "serde"
|
||||||
version = "1.0.149"
|
version = "1.0.164"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "256b9932320c590e707b94576e3cc1f7c9024d0ee6612dfbcf1cb106cbe8e055"
|
checksum = "9e8c8cf938e98f769bc164923b06dce91cea1751522f46f8466461af04c9027d"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "shlex"
|
name = "shlex"
|
||||||
@ -697,9 +695,9 @@ checksum = "43b2853a4d09f215c24cc5489c992ce46052d359b5109343cbafbf26bc62f8a3"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "skim"
|
name = "skim"
|
||||||
version = "0.10.2"
|
version = "0.10.4"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "cebed5f897cd6c0d80fbe30adb36c0abf7400e93043a63ae56458495642b3485"
|
checksum = "e5d28de0a6cb2cdd83a076f1de9d965b973ae08b244df1aa70b432946dda0f32"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"atty",
|
"atty",
|
||||||
"beef",
|
"beef",
|
||||||
@ -717,7 +715,7 @@ dependencies = [
|
|||||||
"rayon",
|
"rayon",
|
||||||
"regex",
|
"regex",
|
||||||
"shlex",
|
"shlex",
|
||||||
"time 0.3.17",
|
"time 0.3.22",
|
||||||
"timer",
|
"timer",
|
||||||
"tuikit",
|
"tuikit",
|
||||||
"unicode-width",
|
"unicode-width",
|
||||||
@ -732,9 +730,20 @@ checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "syn"
|
name = "syn"
|
||||||
version = "1.0.105"
|
version = "1.0.109"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "60b9b43d45702de4c839cb9b51d9f529c5dd26a4aff255b42b1ebc03e88ee908"
|
checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237"
|
||||||
|
dependencies = [
|
||||||
|
"proc-macro2",
|
||||||
|
"quote",
|
||||||
|
"unicode-ident",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "syn"
|
||||||
|
version = "2.0.23"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "59fb7d6d8281a51045d62b8eb3a7d1ce347b76f312af50cd3dc0af39c87c1737"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
@ -754,9 +763,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "termcolor"
|
name = "termcolor"
|
||||||
version = "1.1.3"
|
version = "1.2.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "bab24d30b911b2376f3a13cc2cd443142f0c81dda04c118693e35b3835757755"
|
checksum = "be55cf8942feac5c765c2c993422806843c9a9a45d4d5c407ad6dd2ea95eb9b6"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"winapi-util",
|
"winapi-util",
|
||||||
]
|
]
|
||||||
@ -769,30 +778,31 @@ checksum = "222a222a5bfe1bba4a77b45ec488a741b3cb8872e5e499451fd7d0129c9c7c3d"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "thiserror"
|
name = "thiserror"
|
||||||
version = "1.0.37"
|
version = "1.0.40"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "10deb33631e3c9018b9baf9dcbbc4f737320d2b576bac10f6aefa048fa407e3e"
|
checksum = "978c9a314bd8dc99be594bc3c175faaa9794be04a5a5e153caba6915336cebac"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"thiserror-impl",
|
"thiserror-impl",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "thiserror-impl"
|
name = "thiserror-impl"
|
||||||
version = "1.0.37"
|
version = "1.0.40"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "982d17546b47146b28f7c22e3d08465f6b8903d0ea13c1660d9d84a6e7adcdbb"
|
checksum = "f9456a42c5b0d803c8cd86e73dd7cc9edd429499f37a3550d286d5e86720569f"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"syn",
|
"syn 2.0.23",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "thread_local"
|
name = "thread_local"
|
||||||
version = "1.1.4"
|
version = "1.1.7"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "5516c27b78311c50bf42c071425c560ac799b11c30b31f87e3081965fe5e0180"
|
checksum = "3fdd6f064ccff2d6567adcb3873ca630700f00b5ad3f060c25b5dcfd9a4ce152"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
|
"cfg-if",
|
||||||
"once_cell",
|
"once_cell",
|
||||||
]
|
]
|
||||||
|
|
||||||
@ -809,9 +819,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "time"
|
name = "time"
|
||||||
version = "0.3.17"
|
version = "0.3.22"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "a561bf4617eebd33bca6434b988f39ed798e527f51a1e797d0ee4f61c0a38376"
|
checksum = "ea9e1b3cf1243ae005d9e74085d4d542f3125458f3a81af210d901dcd7411efd"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"serde",
|
"serde",
|
||||||
"time-core",
|
"time-core",
|
||||||
@ -819,9 +829,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "time-core"
|
name = "time-core"
|
||||||
version = "0.1.0"
|
version = "0.1.1"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "2e153e1f1acaef8acc537e68b44906d2db6436e2b35ac2c6b42640fff91f00fd"
|
checksum = "7300fbefb4dadc1af235a9cef3737cea692a9d97e1b9cbcd4ebdae6f8868e6fb"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "timer"
|
name = "timer"
|
||||||
@ -848,9 +858,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "unicode-ident"
|
name = "unicode-ident"
|
||||||
version = "1.0.5"
|
version = "1.0.9"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "6ceab39d59e4c9499d4e5a8ee0e2735b891bb7308ac83dfb4e80cad195c9f6f3"
|
checksum = "b15811caf2415fb889178633e7724bad2509101cde276048e013b9def5e51fa0"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "unicode-width"
|
name = "unicode-width"
|
||||||
@ -860,15 +870,15 @@ checksum = "c0edd1e5b14653f783770bce4a4dabb4a5108a5370a5f5d8cfe8710c361f6c8b"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "utf8parse"
|
name = "utf8parse"
|
||||||
version = "0.2.0"
|
version = "0.2.1"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "936e4b492acfd135421d8dca4b1aa80a7bfc26e702ef3af710e0752684df5372"
|
checksum = "711b9620af191e0cdc7468a8d14e709c3dcdb115b36f838e601583af800a370a"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "vte"
|
name = "vte"
|
||||||
version = "0.11.0"
|
version = "0.11.1"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "1aae21c12ad2ec2d168c236f369c38ff332bc1134f7246350dca641437365045"
|
checksum = "f5022b5fbf9407086c180e9557be968742d839e68346af7792b8592489732197"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"arrayvec",
|
"arrayvec",
|
||||||
"utf8parse",
|
"utf8parse",
|
||||||
@ -899,9 +909,9 @@ checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "wasm-bindgen"
|
name = "wasm-bindgen"
|
||||||
version = "0.2.83"
|
version = "0.2.87"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "eaf9f5aceeec8be17c128b2e93e031fb8a4d469bb9c4ae2d7dc1888b26887268"
|
checksum = "7706a72ab36d8cb1f80ffbf0e071533974a60d0a308d01a5d0375bf60499a342"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"cfg-if",
|
"cfg-if",
|
||||||
"wasm-bindgen-macro",
|
"wasm-bindgen-macro",
|
||||||
@ -909,24 +919,24 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "wasm-bindgen-backend"
|
name = "wasm-bindgen-backend"
|
||||||
version = "0.2.83"
|
version = "0.2.87"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "4c8ffb332579b0557b52d268b91feab8df3615f265d5270fec2a8c95b17c1142"
|
checksum = "5ef2b6d3c510e9625e5fe6f509ab07d66a760f0885d858736483c32ed7809abd"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"bumpalo",
|
"bumpalo",
|
||||||
"log",
|
"log",
|
||||||
"once_cell",
|
"once_cell",
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"syn",
|
"syn 2.0.23",
|
||||||
"wasm-bindgen-shared",
|
"wasm-bindgen-shared",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "wasm-bindgen-macro"
|
name = "wasm-bindgen-macro"
|
||||||
version = "0.2.83"
|
version = "0.2.87"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "052be0f94026e6cbc75cdefc9bae13fd6052cdcaf532fa6c45e7ae33a1e6c810"
|
checksum = "dee495e55982a3bd48105a7b947fd2a9b4a8ae3010041b9e0faab3f9cd028f1d"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"quote",
|
"quote",
|
||||||
"wasm-bindgen-macro-support",
|
"wasm-bindgen-macro-support",
|
||||||
@ -934,22 +944,22 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "wasm-bindgen-macro-support"
|
name = "wasm-bindgen-macro-support"
|
||||||
version = "0.2.83"
|
version = "0.2.87"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "07bc0c051dc5f23e307b13285f9d75df86bfdf816c5721e573dec1f9b8aa193c"
|
checksum = "54681b18a46765f095758388f2d0cf16eb8d4169b639ab575a8f5693af210c7b"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"syn",
|
"syn 2.0.23",
|
||||||
"wasm-bindgen-backend",
|
"wasm-bindgen-backend",
|
||||||
"wasm-bindgen-shared",
|
"wasm-bindgen-shared",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "wasm-bindgen-shared"
|
name = "wasm-bindgen-shared"
|
||||||
version = "0.2.83"
|
version = "0.2.87"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "1c38c045535d93ec4f0b4defec448e4291638ee608530863b1e2ba115d4fff7f"
|
checksum = "ca6ad05a4870b2bf5fe995117d3728437bd27d7cd5f06f13c17443ef369775a1"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "winapi"
|
name = "winapi"
|
||||||
@ -981,3 +991,69 @@ name = "winapi-x86_64-pc-windows-gnu"
|
|||||||
version = "0.4.0"
|
version = "0.4.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
|
checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "windows"
|
||||||
|
version = "0.48.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "e686886bc078bc1b0b600cac0147aadb815089b6e4da64016cbd754b6342700f"
|
||||||
|
dependencies = [
|
||||||
|
"windows-targets",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "windows-targets"
|
||||||
|
version = "0.48.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "05d4b17490f70499f20b9e791dcf6a299785ce8af4d709018206dc5b4953e95f"
|
||||||
|
dependencies = [
|
||||||
|
"windows_aarch64_gnullvm",
|
||||||
|
"windows_aarch64_msvc",
|
||||||
|
"windows_i686_gnu",
|
||||||
|
"windows_i686_msvc",
|
||||||
|
"windows_x86_64_gnu",
|
||||||
|
"windows_x86_64_gnullvm",
|
||||||
|
"windows_x86_64_msvc",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "windows_aarch64_gnullvm"
|
||||||
|
version = "0.48.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "91ae572e1b79dba883e0d315474df7305d12f569b400fcf90581b06062f7e1bc"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "windows_aarch64_msvc"
|
||||||
|
version = "0.48.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "b2ef27e0d7bdfcfc7b868b317c1d32c641a6fe4629c171b8928c7b08d98d7cf3"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "windows_i686_gnu"
|
||||||
|
version = "0.48.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "622a1962a7db830d6fd0a69683c80a18fda201879f0f447f065a3b7467daa241"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "windows_i686_msvc"
|
||||||
|
version = "0.48.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "4542c6e364ce21bf45d69fdd2a8e455fa38d316158cfd43b3ac1c5b1b19f8e00"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "windows_x86_64_gnu"
|
||||||
|
version = "0.48.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "ca2b8a661f7628cbd23440e50b05d705db3686f894fc9580820623656af974b1"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "windows_x86_64_gnullvm"
|
||||||
|
version = "0.48.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "7896dbc1f41e08872e9d5e8f8baa8fdd2677f29468c4e156210174edc7f7b953"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "windows_x86_64_msvc"
|
||||||
|
version = "0.48.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "1a515f5799fe4961cb532f983ce2b23082366b898e52ffbce459c86f67c8378a"
|
||||||
|
@ -228,6 +228,12 @@ ContextAccess::ContextAccess(const AccessControl & access_control_, const Params
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
ContextAccess::ContextAccess(FullAccess)
|
||||||
|
: is_full_access(true), access(std::make_shared<AccessRights>(AccessRights::getFullAccess())), access_with_implicit(access)
|
||||||
|
{
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
ContextAccess::~ContextAccess()
|
ContextAccess::~ContextAccess()
|
||||||
{
|
{
|
||||||
enabled_settings.reset();
|
enabled_settings.reset();
|
||||||
@ -413,14 +419,8 @@ std::optional<QuotaUsage> ContextAccess::getQuotaUsage() const
|
|||||||
|
|
||||||
std::shared_ptr<const ContextAccess> ContextAccess::getFullAccess()
|
std::shared_ptr<const ContextAccess> ContextAccess::getFullAccess()
|
||||||
{
|
{
|
||||||
static const std::shared_ptr<const ContextAccess> res = []
|
static const std::shared_ptr<const ContextAccess> res =
|
||||||
{
|
[] { return std::shared_ptr<ContextAccess>(new ContextAccess{kFullAccess}); }();
|
||||||
auto full_access = std::make_shared<ContextAccess>();
|
|
||||||
full_access->is_full_access = true;
|
|
||||||
full_access->access = std::make_shared<AccessRights>(AccessRights::getFullAccess());
|
|
||||||
full_access->access_with_implicit = full_access->access;
|
|
||||||
return full_access;
|
|
||||||
}();
|
|
||||||
return res;
|
return res;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -69,7 +69,6 @@ public:
|
|||||||
using Params = ContextAccessParams;
|
using Params = ContextAccessParams;
|
||||||
const Params & getParams() const { return params; }
|
const Params & getParams() const { return params; }
|
||||||
|
|
||||||
ContextAccess() { } /// NOLINT
|
|
||||||
ContextAccess(const AccessControl & access_control_, const Params & params_);
|
ContextAccess(const AccessControl & access_control_, const Params & params_);
|
||||||
|
|
||||||
/// Returns the current user. Throws if user is nullptr.
|
/// Returns the current user. Throws if user is nullptr.
|
||||||
@ -171,10 +170,17 @@ public:
|
|||||||
private:
|
private:
|
||||||
friend class AccessControl;
|
friend class AccessControl;
|
||||||
|
|
||||||
|
struct FullAccess {};
|
||||||
|
static const FullAccess kFullAccess;
|
||||||
|
|
||||||
|
/// Makes an instance of ContextAccess which provides full access to everything
|
||||||
|
/// without any limitations. This is used for the global context.
|
||||||
|
explicit ContextAccess(FullAccess);
|
||||||
|
|
||||||
void initialize();
|
void initialize();
|
||||||
void setUser(const UserPtr & user_) const;
|
void setUser(const UserPtr & user_) const TSA_REQUIRES(mutex);
|
||||||
void setRolesInfo(const std::shared_ptr<const EnabledRolesInfo> & roles_info_) const;
|
void setRolesInfo(const std::shared_ptr<const EnabledRolesInfo> & roles_info_) const TSA_REQUIRES(mutex);
|
||||||
void calculateAccessRights() const;
|
void calculateAccessRights() const TSA_REQUIRES(mutex);
|
||||||
|
|
||||||
template <bool throw_if_denied, bool grant_option>
|
template <bool throw_if_denied, bool grant_option>
|
||||||
bool checkAccessImpl(const AccessFlags & flags) const;
|
bool checkAccessImpl(const AccessFlags & flags) const;
|
||||||
@ -217,20 +223,23 @@ private:
|
|||||||
|
|
||||||
const AccessControl * access_control = nullptr;
|
const AccessControl * access_control = nullptr;
|
||||||
const Params params;
|
const Params params;
|
||||||
bool is_full_access = false;
|
const bool is_full_access = false;
|
||||||
mutable Poco::Logger * trace_log = nullptr;
|
|
||||||
mutable UserPtr user;
|
mutable std::atomic<bool> user_was_dropped = false;
|
||||||
mutable String user_name;
|
mutable std::atomic<Poco::Logger *> trace_log = nullptr;
|
||||||
mutable bool user_was_dropped = false;
|
|
||||||
mutable scope_guard subscription_for_user_change;
|
mutable UserPtr user TSA_GUARDED_BY(mutex);
|
||||||
mutable std::shared_ptr<const EnabledRoles> enabled_roles;
|
mutable String user_name TSA_GUARDED_BY(mutex);
|
||||||
mutable scope_guard subscription_for_roles_changes;
|
mutable scope_guard subscription_for_user_change TSA_GUARDED_BY(mutex);
|
||||||
mutable std::shared_ptr<const EnabledRolesInfo> roles_info;
|
mutable std::shared_ptr<const EnabledRoles> enabled_roles TSA_GUARDED_BY(mutex);
|
||||||
mutable std::shared_ptr<const AccessRights> access;
|
mutable scope_guard subscription_for_roles_changes TSA_GUARDED_BY(mutex);
|
||||||
mutable std::shared_ptr<const AccessRights> access_with_implicit;
|
mutable std::shared_ptr<const EnabledRolesInfo> roles_info TSA_GUARDED_BY(mutex);
|
||||||
mutable std::shared_ptr<const EnabledRowPolicies> enabled_row_policies;
|
mutable std::shared_ptr<const AccessRights> access TSA_GUARDED_BY(mutex);
|
||||||
mutable std::shared_ptr<const EnabledQuota> enabled_quota;
|
mutable std::shared_ptr<const AccessRights> access_with_implicit TSA_GUARDED_BY(mutex);
|
||||||
mutable std::shared_ptr<const EnabledSettings> enabled_settings;
|
mutable std::shared_ptr<const EnabledRowPolicies> enabled_row_policies TSA_GUARDED_BY(mutex);
|
||||||
|
mutable std::shared_ptr<const EnabledQuota> enabled_quota TSA_GUARDED_BY(mutex);
|
||||||
|
mutable std::shared_ptr<const EnabledSettings> enabled_settings TSA_GUARDED_BY(mutex);
|
||||||
|
|
||||||
mutable std::mutex mutex;
|
mutable std::mutex mutex;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -25,6 +25,7 @@ IAggregateFunction * createWithNumericOrTimeType(const IDataType & argument_type
|
|||||||
WhichDataType which(argument_type);
|
WhichDataType which(argument_type);
|
||||||
if (which.idx == TypeIndex::Date) return new AggregateFunctionTemplate<UInt16, Data>(std::forward<TArgs>(args)...);
|
if (which.idx == TypeIndex::Date) return new AggregateFunctionTemplate<UInt16, Data>(std::forward<TArgs>(args)...);
|
||||||
if (which.idx == TypeIndex::DateTime) return new AggregateFunctionTemplate<UInt32, Data>(std::forward<TArgs>(args)...);
|
if (which.idx == TypeIndex::DateTime) return new AggregateFunctionTemplate<UInt32, Data>(std::forward<TArgs>(args)...);
|
||||||
|
if (which.idx == TypeIndex::IPv4) return new AggregateFunctionTemplate<IPv4, Data>(std::forward<TArgs>(args)...);
|
||||||
return createWithNumericType<AggregateFunctionTemplate, Data, TArgs...>(argument_type, std::forward<TArgs>(args)...);
|
return createWithNumericType<AggregateFunctionTemplate, Data, TArgs...>(argument_type, std::forward<TArgs>(args)...);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user