mirror of
https://github.com/ClickHouse/ClickHouse.git
synced 2024-12-15 10:52:30 +00:00
Merge branch 'master' into ch_canh_fix_prefix_not_like
This commit is contained in:
commit
d309e8793a
52
.github/workflows/nightly.yml
vendored
52
.github/workflows/nightly.yml
vendored
@ -122,3 +122,55 @@ jobs:
|
|||||||
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
|
sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
|
||||||
|
SonarCloud:
|
||||||
|
runs-on: [self-hosted, builder]
|
||||||
|
env:
|
||||||
|
SONAR_SCANNER_VERSION: 4.7.0.2747
|
||||||
|
SONAR_SERVER_URL: "https://sonarcloud.io"
|
||||||
|
BUILD_WRAPPER_OUT_DIR: build_wrapper_output_directory # Directory where build-wrapper output will be placed
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
with:
|
||||||
|
fetch-depth: 0 # Shallow clones should be disabled for a better relevancy of analysis
|
||||||
|
submodules: true
|
||||||
|
- name: Set up JDK 11
|
||||||
|
uses: actions/setup-java@v1
|
||||||
|
with:
|
||||||
|
java-version: 11
|
||||||
|
- name: Download and set up sonar-scanner
|
||||||
|
env:
|
||||||
|
SONAR_SCANNER_DOWNLOAD_URL: https://binaries.sonarsource.com/Distribution/sonar-scanner-cli/sonar-scanner-cli-${{ env.SONAR_SCANNER_VERSION }}-linux.zip
|
||||||
|
run: |
|
||||||
|
mkdir -p "$HOME/.sonar"
|
||||||
|
curl -sSLo "$HOME/.sonar/sonar-scanner.zip" "${{ env.SONAR_SCANNER_DOWNLOAD_URL }}"
|
||||||
|
unzip -o "$HOME/.sonar/sonar-scanner.zip" -d "$HOME/.sonar/"
|
||||||
|
echo "$HOME/.sonar/sonar-scanner-${{ env.SONAR_SCANNER_VERSION }}-linux/bin" >> "$GITHUB_PATH"
|
||||||
|
- name: Download and set up build-wrapper
|
||||||
|
env:
|
||||||
|
BUILD_WRAPPER_DOWNLOAD_URL: ${{ env.SONAR_SERVER_URL }}/static/cpp/build-wrapper-linux-x86.zip
|
||||||
|
run: |
|
||||||
|
curl -sSLo "$HOME/.sonar/build-wrapper-linux-x86.zip ${{ env.BUILD_WRAPPER_DOWNLOAD_URL }}"
|
||||||
|
unzip -o "$HOME/.sonar/build-wrapper-linux-x86.zip" -d "$HOME/.sonar/"
|
||||||
|
echo "$HOME/.sonar/build-wrapper-linux-x86" >> "$GITHUB_PATH"
|
||||||
|
- name: Set Up Build Tools
|
||||||
|
run: |
|
||||||
|
sudo apt-get update
|
||||||
|
sudo apt-get install -yq git cmake ccache python3 ninja-build
|
||||||
|
sudo bash -c "$(wget -O - https://apt.llvm.org/llvm.sh)"
|
||||||
|
- name: Run build-wrapper
|
||||||
|
run: |
|
||||||
|
mkdir build
|
||||||
|
cd build
|
||||||
|
cmake ..
|
||||||
|
cd ..
|
||||||
|
build-wrapper-linux-x86-64 --out-dir ${{ env.BUILD_WRAPPER_OUT_DIR }} cmake --build build/
|
||||||
|
- name: Run sonar-scanner
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}
|
||||||
|
run: |
|
||||||
|
sonar-scanner \
|
||||||
|
--define sonar.host.url="${{ env.SONAR_SERVER_URL }}" \
|
||||||
|
--define sonar.cfamily.build-wrapper-output="${{ env.BUILD_WRAPPER_OUT_DIR }}" \
|
||||||
|
--define sonar.projectKey="ClickHouse_ClickHouse" \
|
||||||
|
--define sonar.organization="clickhouse-java"
|
||||||
|
@ -5,7 +5,7 @@ ClickHouse® is an open-source column-oriented database management system that a
|
|||||||
## Useful Links
|
## Useful Links
|
||||||
|
|
||||||
* [Official website](https://clickhouse.com/) has a quick high-level overview of ClickHouse on the main page.
|
* [Official website](https://clickhouse.com/) has a quick high-level overview of ClickHouse on the main page.
|
||||||
* [ClickHouse Cloud](https://clickhouse.com/cloud) ClickHouse as a service, built by the creators and maintainers.
|
* [ClickHouse Cloud](https://clickhouse.cloud) ClickHouse as a service, built by the creators and maintainers.
|
||||||
* [Tutorial](https://clickhouse.com/docs/en/getting_started/tutorial/) shows how to set up and query a small ClickHouse cluster.
|
* [Tutorial](https://clickhouse.com/docs/en/getting_started/tutorial/) shows how to set up and query a small ClickHouse cluster.
|
||||||
* [Documentation](https://clickhouse.com/docs/en/) provides more in-depth information.
|
* [Documentation](https://clickhouse.com/docs/en/) provides more in-depth information.
|
||||||
* [YouTube channel](https://www.youtube.com/c/ClickHouseDB) has a lot of content about ClickHouse in video format.
|
* [YouTube channel](https://www.youtube.com/c/ClickHouseDB) has a lot of content about ClickHouse in video format.
|
||||||
@ -16,5 +16,6 @@ ClickHouse® is an open-source column-oriented database management system that a
|
|||||||
* [Contacts](https://clickhouse.com/company/contact) can help to get your questions answered if there are any.
|
* [Contacts](https://clickhouse.com/company/contact) can help to get your questions answered if there are any.
|
||||||
|
|
||||||
## Upcoming events
|
## Upcoming events
|
||||||
* [**v22.10 Release Webinar**](https://clickhouse.com/company/events/v22-10-release-webinar) Original creator, co-founder, and CTO of ClickHouse Alexey Milovidov will walk us through the highlights of the release, provide live demos, and share vision into what is coming in the roadmap.
|
* [**v22.11 Release Webinar**](https://clickhouse.com/company/events/v22-11-release-webinar) Original creator, co-founder, and CTO of ClickHouse Alexey Milovidov will walk us through the highlights of the release, provide live demos, and share vision into what is coming in the roadmap.
|
||||||
* [**Introducing ClickHouse Cloud**](https://clickhouse.com/company/events/cloud-beta) Introducing ClickHouse as a service, built by creators and maintainers of the fastest OLAP database on earth. Join Tanya Bragin for a detailed walkthrough of ClickHouse Cloud capabilities, as well as a peek behind the curtain to understand the unique architecture that makes our service tick.
|
* [**ClickHouse Meetup at the Deutsche Bank office in Berlin**](https://www.meetup.com/clickhouse-berlin-user-group/events/289311596/) Hear from Deutsche Bank on why they chose ClickHouse for big sensitive data in a regulated environment. The ClickHouse team will then present how ClickHouse is used for real time financial data analytics, including tick data, trade analytics and risk management.
|
||||||
|
* [**AWS re:Invent**](https://clickhouse.com/company/events/aws-reinvent) Core members of the ClickHouse team -- including 2 of our founders -- will be at re:Invent from November 29 to December 3. We are available on the show floor, but are also determining interest in holding an event during the time there.
|
||||||
|
26
docs/changelogs/v22.3.14.18-lts.md
Normal file
26
docs/changelogs/v22.3.14.18-lts.md
Normal file
@ -0,0 +1,26 @@
|
|||||||
|
---
|
||||||
|
sidebar_position: 1
|
||||||
|
sidebar_label: 2022
|
||||||
|
---
|
||||||
|
|
||||||
|
# 2022 Changelog
|
||||||
|
|
||||||
|
### ClickHouse release v22.3.14.18-lts (642946f61b2) FIXME as compared to v22.3.13.80-lts (e2708b01fba)
|
||||||
|
|
||||||
|
#### Bug Fix
|
||||||
|
* Backported in [#42432](https://github.com/ClickHouse/ClickHouse/issues/42432): - Choose correct aggregation method for LowCardinality with BigInt. [#42342](https://github.com/ClickHouse/ClickHouse/pull/42342) ([Duc Canh Le](https://github.com/canhld94)).
|
||||||
|
|
||||||
|
#### Build/Testing/Packaging Improvement
|
||||||
|
* Backported in [#42328](https://github.com/ClickHouse/ClickHouse/issues/42328): Update cctz to the latest master, update tzdb to 2020e. [#42273](https://github.com/ClickHouse/ClickHouse/pull/42273) ([Dom Del Nano](https://github.com/ddelnano)).
|
||||||
|
* Backported in [#42358](https://github.com/ClickHouse/ClickHouse/issues/42358): Update tzdata to 2022e to support the new timezone changes. Palestine transitions are now Saturdays at 02:00. Simplify three Ukraine zones into one. Jordan and Syria switch from +02/+03 with DST to year-round +03. (https://data.iana.org/time-zones/tzdb/NEWS). This closes [#42252](https://github.com/ClickHouse/ClickHouse/issues/42252). [#42327](https://github.com/ClickHouse/ClickHouse/pull/42327) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
|
||||||
|
#### Bug Fix (user-visible misbehavior in official stable or prestable release)
|
||||||
|
|
||||||
|
* Backported in [#42298](https://github.com/ClickHouse/ClickHouse/issues/42298): Fix a bug with projections and the `aggregate_functions_null_for_empty` setting. This bug is very rare and appears only if you enable the `aggregate_functions_null_for_empty` setting in the server's config. This closes [#41647](https://github.com/ClickHouse/ClickHouse/issues/41647). [#42198](https://github.com/ClickHouse/ClickHouse/pull/42198) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* Backported in [#42592](https://github.com/ClickHouse/ClickHouse/issues/42592): This closes [#42453](https://github.com/ClickHouse/ClickHouse/issues/42453). [#42573](https://github.com/ClickHouse/ClickHouse/pull/42573) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
|
||||||
|
#### NOT FOR CHANGELOG / INSIGNIFICANT
|
||||||
|
|
||||||
|
* Add a warning message to release.py script, require release type [#41975](https://github.com/ClickHouse/ClickHouse/pull/41975) ([Mikhail f. Shiryaev](https://github.com/Felixoid)).
|
||||||
|
* Revert [#27787](https://github.com/ClickHouse/ClickHouse/issues/27787) [#42136](https://github.com/ClickHouse/ClickHouse/pull/42136) ([Nikolai Kochetov](https://github.com/KochetovNicolai)).
|
||||||
|
|
29
docs/changelogs/v22.3.14.23-lts.md
Normal file
29
docs/changelogs/v22.3.14.23-lts.md
Normal file
@ -0,0 +1,29 @@
|
|||||||
|
---
|
||||||
|
sidebar_position: 1
|
||||||
|
sidebar_label: 2022
|
||||||
|
---
|
||||||
|
|
||||||
|
# 2022 Changelog
|
||||||
|
|
||||||
|
### ClickHouse release v22.3.14.23-lts (74956bfee4d) FIXME as compared to v22.3.13.80-lts (e2708b01fba)
|
||||||
|
|
||||||
|
#### Improvement
|
||||||
|
* Backported in [#42527](https://github.com/ClickHouse/ClickHouse/issues/42527): Fix issue with passing MySQL timeouts for MySQL database engine and MySQL table function. Closes [#34168](https://github.com/ClickHouse/ClickHouse/issues/34168)?notification_referrer_id=NT_kwDOAzsV57MzMDMxNjAzNTY5OjU0MjAzODc5. [#40751](https://github.com/ClickHouse/ClickHouse/pull/40751) ([Kseniia Sumarokova](https://github.com/kssenii)).
|
||||||
|
|
||||||
|
#### Bug Fix
|
||||||
|
* Backported in [#42432](https://github.com/ClickHouse/ClickHouse/issues/42432): - Choose correct aggregation method for LowCardinality with BigInt. [#42342](https://github.com/ClickHouse/ClickHouse/pull/42342) ([Duc Canh Le](https://github.com/canhld94)).
|
||||||
|
|
||||||
|
#### Build/Testing/Packaging Improvement
|
||||||
|
* Backported in [#42328](https://github.com/ClickHouse/ClickHouse/issues/42328): Update cctz to the latest master, update tzdb to 2020e. [#42273](https://github.com/ClickHouse/ClickHouse/pull/42273) ([Dom Del Nano](https://github.com/ddelnano)).
|
||||||
|
* Backported in [#42358](https://github.com/ClickHouse/ClickHouse/issues/42358): Update tzdata to 2022e to support the new timezone changes. Palestine transitions are now Saturdays at 02:00. Simplify three Ukraine zones into one. Jordan and Syria switch from +02/+03 with DST to year-round +03. (https://data.iana.org/time-zones/tzdb/NEWS). This closes [#42252](https://github.com/ClickHouse/ClickHouse/issues/42252). [#42327](https://github.com/ClickHouse/ClickHouse/pull/42327) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
|
||||||
|
#### Bug Fix (user-visible misbehavior in official stable or prestable release)
|
||||||
|
|
||||||
|
* Backported in [#42298](https://github.com/ClickHouse/ClickHouse/issues/42298): Fix a bug with projections and the `aggregate_functions_null_for_empty` setting. This bug is very rare and appears only if you enable the `aggregate_functions_null_for_empty` setting in the server's config. This closes [#41647](https://github.com/ClickHouse/ClickHouse/issues/41647). [#42198](https://github.com/ClickHouse/ClickHouse/pull/42198) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* Backported in [#42592](https://github.com/ClickHouse/ClickHouse/issues/42592): This closes [#42453](https://github.com/ClickHouse/ClickHouse/issues/42453). [#42573](https://github.com/ClickHouse/ClickHouse/pull/42573) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
|
||||||
|
#### NOT FOR CHANGELOG / INSIGNIFICANT
|
||||||
|
|
||||||
|
* Add a warning message to release.py script, require release type [#41975](https://github.com/ClickHouse/ClickHouse/pull/41975) ([Mikhail f. Shiryaev](https://github.com/Felixoid)).
|
||||||
|
* Revert [#27787](https://github.com/ClickHouse/ClickHouse/issues/27787) [#42136](https://github.com/ClickHouse/ClickHouse/pull/42136) ([Nikolai Kochetov](https://github.com/KochetovNicolai)).
|
||||||
|
|
@ -14,8 +14,10 @@ Example of a polygon dictionary configuration:
|
|||||||
<dictionary>
|
<dictionary>
|
||||||
<structure>
|
<structure>
|
||||||
<key>
|
<key>
|
||||||
<name>key</name>
|
<attribute>
|
||||||
<type>Array(Array(Array(Array(Float64))))</type>
|
<name>key</name>
|
||||||
|
<type>Array(Array(Array(Array(Float64))))</type>
|
||||||
|
</attribute>
|
||||||
</key>
|
</key>
|
||||||
|
|
||||||
<attribute>
|
<attribute>
|
||||||
|
@ -1068,7 +1068,7 @@ Example:
|
|||||||
SELECT timeSlots(toDateTime('2012-01-01 12:20:00'), toUInt32(600));
|
SELECT timeSlots(toDateTime('2012-01-01 12:20:00'), toUInt32(600));
|
||||||
SELECT timeSlots(toDateTime('1980-12-12 21:01:02', 'UTC'), toUInt32(600), 299);
|
SELECT timeSlots(toDateTime('1980-12-12 21:01:02', 'UTC'), toUInt32(600), 299);
|
||||||
SELECT timeSlots(toDateTime64('1980-12-12 21:01:02.1234', 4, 'UTC'), toDecimal64(600.1, 1), toDecimal64(299, 0));
|
SELECT timeSlots(toDateTime64('1980-12-12 21:01:02.1234', 4, 'UTC'), toDecimal64(600.1, 1), toDecimal64(299, 0));
|
||||||
```
|
```
|
||||||
``` text
|
``` text
|
||||||
┌─timeSlots(toDateTime('2012-01-01 12:20:00'), toUInt32(600))─┐
|
┌─timeSlots(toDateTime('2012-01-01 12:20:00'), toUInt32(600))─┐
|
||||||
│ ['2012-01-01 12:00:00','2012-01-01 12:30:00'] │
|
│ ['2012-01-01 12:00:00','2012-01-01 12:30:00'] │
|
||||||
@ -1244,7 +1244,7 @@ Result:
|
|||||||
└──────────────────────────┘
|
└──────────────────────────┘
|
||||||
```
|
```
|
||||||
|
|
||||||
When there are two arguments: first is an [Integer](../../sql-reference/data-types/int-uint.md) or [DateTime](../../sql-reference/data-types/datetime.md), second is a constant format string — it acts in the same way as [formatDateTime](#formatdatetime) and return [String](../../sql-reference/data-types/string.md#string) type.
|
When there are two or three arguments, the first an [Integer](../../sql-reference/data-types/int-uint.md), [Date](../../sql-reference/data-types/date.md), [Date32](../../sql-reference/data-types/date32.md), [DateTime](../../sql-reference/data-types/datetime.md) or [DateTime64](../../sql-reference/data-types/datetime64.md), the second a constant format string and the third an optional constant time zone string — it acts in the same way as [formatDateTime](#formatdatetime) and return [String](../../sql-reference/data-types/string.md#string) type.
|
||||||
|
|
||||||
For example:
|
For example:
|
||||||
|
|
||||||
|
@ -204,8 +204,9 @@ It is used if it is necessary to add or update a column with a complicated expre
|
|||||||
Syntax:
|
Syntax:
|
||||||
|
|
||||||
```sql
|
```sql
|
||||||
ALTER TABLE table MATERIALIZE COLUMN col;
|
ALTER TABLE [db.]table [ON CLUSTER cluster] MATERIALIZE COLUMN col [IN PARTITION partition | IN PARTITION ID 'partition_id'];
|
||||||
```
|
```
|
||||||
|
- If you specify a PARTITION, a column will be materialized with only the specified partition.
|
||||||
|
|
||||||
**Example**
|
**Example**
|
||||||
|
|
||||||
|
@ -31,7 +31,7 @@ By default, ClickHouse uses its own [Atomic](../../../engines/database-engines/a
|
|||||||
|
|
||||||
### COMMENT
|
### COMMENT
|
||||||
|
|
||||||
You can add a comment to the database when you creating it.
|
You can add a comment to the database when you are creating it.
|
||||||
|
|
||||||
The comment is supported for all database engines.
|
The comment is supported for all database engines.
|
||||||
|
|
||||||
|
@ -1126,8 +1126,7 @@ SELECT FROM_UNIXTIME(423543535);
|
|||||||
└──────────────────────────┘
|
└──────────────────────────┘
|
||||||
```
|
```
|
||||||
|
|
||||||
В случае, когда есть два аргумента: первый типа [Integer](../../sql-reference/data-types/int-uint.md) или [DateTime](../../sql-reference/data-types/datetime.md), а второй является строкой постоянного формата — функция работает также, как [formatDateTime](#formatdatetime), и возвращает значение типа [String](../../sql-reference/data-types/string.md#string).
|
В случае, когда есть два или три аргумента: первый типа [Integer](../../sql-reference/data-types/int-uint.md), [Date](../../sql-reference/data-types/date.md), [Date32](../../sql-reference/data-types/date32.md), [DateTime](../../sql-reference/data-types/datetime.md) или [DateTime64](../../sql-reference/data-types/datetime64.md), а второй является строкой постоянного формата и третий является строкой постоянной временной зоны — функция работает также, как [formatDateTime](#formatdatetime), и возвращает значение типа [String](../../sql-reference/data-types/string.md#string).
|
||||||
|
|
||||||
|
|
||||||
Запрос:
|
Запрос:
|
||||||
|
|
||||||
|
@ -10,23 +10,34 @@ namespace DB
|
|||||||
namespace
|
namespace
|
||||||
{
|
{
|
||||||
|
|
||||||
struct QueryTreeNodeHash
|
struct QueryTreeNodeWithHash
|
||||||
{
|
{
|
||||||
size_t operator()(const IQueryTreeNode * node) const
|
explicit QueryTreeNodeWithHash(const IQueryTreeNode * node_)
|
||||||
|
: node(node_)
|
||||||
|
, hash(node->getTreeHash().first)
|
||||||
|
{}
|
||||||
|
|
||||||
|
const IQueryTreeNode * node = nullptr;
|
||||||
|
size_t hash = 0;
|
||||||
|
};
|
||||||
|
|
||||||
|
struct QueryTreeNodeWithHashHash
|
||||||
|
{
|
||||||
|
size_t operator()(const QueryTreeNodeWithHash & node_with_hash) const
|
||||||
{
|
{
|
||||||
return node->getTreeHash().first;
|
return node_with_hash.hash;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
struct QueryTreeNodeEqualTo
|
struct QueryTreeNodeWithHashEqualTo
|
||||||
{
|
{
|
||||||
size_t operator()(const IQueryTreeNode * lhs_node, const IQueryTreeNode * rhs_node) const
|
bool operator()(const QueryTreeNodeWithHash & lhs_node, const QueryTreeNodeWithHash & rhs_node) const
|
||||||
{
|
{
|
||||||
return lhs_node->isEqual(*rhs_node);
|
return lhs_node.hash == rhs_node.hash && lhs_node.node->isEqual(*rhs_node.node);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
using QueryTreeNodeSet = std::unordered_set<const IQueryTreeNode *, QueryTreeNodeHash, QueryTreeNodeEqualTo>;
|
using QueryTreeNodeWithHashSet = std::unordered_set<QueryTreeNodeWithHash, QueryTreeNodeWithHashHash, QueryTreeNodeWithHashEqualTo>;
|
||||||
|
|
||||||
class OrderByLimitByDuplicateEliminationVisitor : public InDepthQueryTreeVisitor<OrderByLimitByDuplicateEliminationVisitor>
|
class OrderByLimitByDuplicateEliminationVisitor : public InDepthQueryTreeVisitor<OrderByLimitByDuplicateEliminationVisitor>
|
||||||
{
|
{
|
||||||
@ -82,7 +93,7 @@ public:
|
|||||||
}
|
}
|
||||||
|
|
||||||
private:
|
private:
|
||||||
QueryTreeNodeSet unique_expressions_nodes_set;
|
QueryTreeNodeWithHashSet unique_expressions_nodes_set;
|
||||||
};
|
};
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -128,7 +128,7 @@ namespace DB
|
|||||||
int number_of_fds_ready = 0;
|
int number_of_fds_ready = 0;
|
||||||
if (!readable_sockets.empty())
|
if (!readable_sockets.empty())
|
||||||
{
|
{
|
||||||
number_of_fds_ready = poll(readable_sockets.data(), readable_sockets.size(), static_cast<int>(timeout));
|
number_of_fds_ready = poll(readable_sockets.data(), static_cast<nfds_t>(readable_sockets.size()), static_cast<int>(timeout));
|
||||||
}
|
}
|
||||||
|
|
||||||
if (number_of_fds_ready > 0)
|
if (number_of_fds_ready > 0)
|
||||||
|
@ -55,6 +55,9 @@ bool MetadataStorageFromStaticFilesWebServer::exists(const std::string & path) c
|
|||||||
path,
|
path,
|
||||||
[](const auto & file, const std::string & path_) { return file.first < path_; }
|
[](const auto & file, const std::string & path_) { return file.first < path_; }
|
||||||
);
|
);
|
||||||
|
if (it == object_storage.files.end())
|
||||||
|
return false;
|
||||||
|
|
||||||
if (startsWith(it->first, path)
|
if (startsWith(it->first, path)
|
||||||
|| (it != object_storage.files.begin() && startsWith(std::prev(it)->first, path)))
|
|| (it != object_storage.files.begin() && startsWith(std::prev(it)->first, path)))
|
||||||
return true;
|
return true;
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
#include <DataTypes/DataTypeString.h>
|
#include <DataTypes/DataTypeString.h>
|
||||||
#include <DataTypes/DataTypeDate.h>
|
#include <DataTypes/DataTypeDate.h>
|
||||||
|
#include <DataTypes/DataTypeDate32.h>
|
||||||
#include <DataTypes/DataTypeDateTime.h>
|
#include <DataTypes/DataTypeDateTime.h>
|
||||||
#include <DataTypes/DataTypeDateTime64.h>
|
#include <DataTypes/DataTypeDateTime64.h>
|
||||||
#include <Columns/ColumnString.h>
|
#include <Columns/ColumnString.h>
|
||||||
@ -45,6 +46,7 @@ template <> struct ActionValueTypeMap<DataTypeUInt32> { using ActionValueTyp
|
|||||||
template <> struct ActionValueTypeMap<DataTypeInt64> { using ActionValueType = UInt32; };
|
template <> struct ActionValueTypeMap<DataTypeInt64> { using ActionValueType = UInt32; };
|
||||||
template <> struct ActionValueTypeMap<DataTypeUInt64> { using ActionValueType = UInt32; };
|
template <> struct ActionValueTypeMap<DataTypeUInt64> { using ActionValueType = UInt32; };
|
||||||
template <> struct ActionValueTypeMap<DataTypeDate> { using ActionValueType = UInt16; };
|
template <> struct ActionValueTypeMap<DataTypeDate> { using ActionValueType = UInt16; };
|
||||||
|
template <> struct ActionValueTypeMap<DataTypeDate32> { using ActionValueType = Int32; };
|
||||||
template <> struct ActionValueTypeMap<DataTypeDateTime> { using ActionValueType = UInt32; };
|
template <> struct ActionValueTypeMap<DataTypeDateTime> { using ActionValueType = UInt32; };
|
||||||
// TODO(vnemkov): to add sub-second format instruction, make that DateTime64 and do some math in Action<T>.
|
// TODO(vnemkov): to add sub-second format instruction, make that DateTime64 and do some math in Action<T>.
|
||||||
template <> struct ActionValueTypeMap<DataTypeDateTime64> { using ActionValueType = Int64; };
|
template <> struct ActionValueTypeMap<DataTypeDateTime64> { using ActionValueType = Int64; };
|
||||||
@ -315,44 +317,39 @@ public:
|
|||||||
if constexpr (support_integer)
|
if constexpr (support_integer)
|
||||||
{
|
{
|
||||||
if (arguments.size() != 1 && arguments.size() != 2 && arguments.size() != 3)
|
if (arguments.size() != 1 && arguments.size() != 2 && arguments.size() != 3)
|
||||||
throw Exception(
|
throw Exception(ErrorCodes::NUMBER_OF_ARGUMENTS_DOESNT_MATCH,
|
||||||
"Number of arguments for function " + getName() + " doesn't match: passed " + toString(arguments.size())
|
"Number of arguments for function {} doesn't match: passed {}, should be 1, 2 or 3",
|
||||||
+ ", should be 1, 2 or 3",
|
getName(), arguments.size());
|
||||||
ErrorCodes::NUMBER_OF_ARGUMENTS_DOESNT_MATCH);
|
|
||||||
if (arguments.size() == 1 && !isInteger(arguments[0].type))
|
if (arguments.size() == 1 && !isInteger(arguments[0].type))
|
||||||
throw Exception(
|
throw Exception(ErrorCodes::ILLEGAL_TYPE_OF_ARGUMENT,
|
||||||
"Illegal type " + arguments[0].type->getName() + " of 1 argument of function " + getName()
|
"Illegal type {} of first argument of function {} when arguments size is 1. Should be integer",
|
||||||
+ " when arguments size is 1. Should be integer",
|
arguments[0].type->getName(), getName());
|
||||||
ErrorCodes::ILLEGAL_TYPE_OF_ARGUMENT);
|
if (arguments.size() > 1 && !(isInteger(arguments[0].type) || isDate(arguments[0].type) || isDateTime(arguments[0].type) || isDate32(arguments[0].type) || isDateTime64(arguments[0].type)))
|
||||||
if (arguments.size() > 1 && !(isInteger(arguments[0].type) || isDate(arguments[0].type) || isDateTime(arguments[0].type) || isDateTime64(arguments[0].type)))
|
throw Exception(ErrorCodes::ILLEGAL_TYPE_OF_ARGUMENT,
|
||||||
throw Exception(
|
"Illegal type {} of first argument of function {} when arguments size is 2 or 3. Should be a integer or a date with time",
|
||||||
"Illegal type " + arguments[0].type->getName() + " of 1 argument of function " + getName()
|
arguments[0].type->getName(), getName());
|
||||||
+ " when arguments size is 2 or 3. Should be a integer or a date with time",
|
|
||||||
ErrorCodes::ILLEGAL_TYPE_OF_ARGUMENT);
|
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
if (arguments.size() != 2 && arguments.size() != 3)
|
if (arguments.size() != 2 && arguments.size() != 3)
|
||||||
throw Exception(
|
throw Exception(ErrorCodes::NUMBER_OF_ARGUMENTS_DOESNT_MATCH,
|
||||||
"Number of arguments for function " + getName() + " doesn't match: passed " + toString(arguments.size())
|
"Number of arguments for function {} doesn't match: passed {}, should be 2 or 3",
|
||||||
+ ", should be 2 or 3",
|
getName(), arguments.size());
|
||||||
ErrorCodes::NUMBER_OF_ARGUMENTS_DOESNT_MATCH);
|
if (!isDate(arguments[0].type) && !isDateTime(arguments[0].type) && !isDate32(arguments[0].type) && !isDateTime64(arguments[0].type))
|
||||||
if (!isDate(arguments[0].type) && !isDateTime(arguments[0].type) && !isDateTime64(arguments[0].type))
|
throw Exception(ErrorCodes::ILLEGAL_TYPE_OF_ARGUMENT,
|
||||||
throw Exception(
|
"Illegal type {} of first argument of function {}. Should be a date or a date with time",
|
||||||
"Illegal type " + arguments[0].type->getName() + " of 1 argument of function " + getName()
|
arguments[0].type->getName(), getName());
|
||||||
+ ". Should be a date or a date with time",
|
|
||||||
ErrorCodes::ILLEGAL_TYPE_OF_ARGUMENT);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (arguments.size() == 2 && !WhichDataType(arguments[1].type).isString())
|
if (arguments.size() == 2 && !WhichDataType(arguments[1].type).isString())
|
||||||
throw Exception(
|
throw Exception(ErrorCodes::ILLEGAL_TYPE_OF_ARGUMENT,
|
||||||
"Illegal type " + arguments[1].type->getName() + " of 2 argument of function " + getName() + ". Must be String.",
|
"Illegal type {} of second argument of function {}. Must be String.",
|
||||||
ErrorCodes::ILLEGAL_TYPE_OF_ARGUMENT);
|
arguments[1].type->getName(), getName());
|
||||||
|
|
||||||
if (arguments.size() == 3 && !WhichDataType(arguments[2].type).isString())
|
if (arguments.size() == 3 && !WhichDataType(arguments[2].type).isString())
|
||||||
throw Exception(
|
throw Exception(ErrorCodes::ILLEGAL_TYPE_OF_ARGUMENT,
|
||||||
"Illegal type " + arguments[2].type->getName() + " of 3 argument of function " + getName() + ". Must be String.",
|
"Illegal type {} of third argument of function {}. Must be String.",
|
||||||
ErrorCodes::ILLEGAL_TYPE_OF_ARGUMENT);
|
arguments[2].type->getName(), getName());
|
||||||
|
|
||||||
if (arguments.size() == 1)
|
if (arguments.size() == 1)
|
||||||
return std::make_shared<DataTypeDateTime>();
|
return std::make_shared<DataTypeDateTime>();
|
||||||
@ -373,10 +370,9 @@ public:
|
|||||||
return true;
|
return true;
|
||||||
}))
|
}))
|
||||||
{
|
{
|
||||||
throw Exception(
|
throw Exception(ErrorCodes::ILLEGAL_COLUMN,
|
||||||
"Illegal column " + arguments[0].column->getName() + " of function " + getName()
|
"Illegal column {} of function {}, must be Integer, Date, Date32, DateTime or DateTime64 when arguments size is 1.",
|
||||||
+ ", must be Integer or DateTime when arguments size is 1.",
|
arguments[0].column->getName(), getName());
|
||||||
ErrorCodes::ILLEGAL_COLUMN);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
@ -385,32 +381,31 @@ public:
|
|||||||
{
|
{
|
||||||
using FromDataType = std::decay_t<decltype(type)>;
|
using FromDataType = std::decay_t<decltype(type)>;
|
||||||
if (!(res = executeType<FromDataType>(arguments, result_type)))
|
if (!(res = executeType<FromDataType>(arguments, result_type)))
|
||||||
throw Exception(
|
throw Exception(ErrorCodes::ILLEGAL_COLUMN,
|
||||||
"Illegal column " + arguments[0].column->getName() + " of function " + getName()
|
"Illegal column {} of function {}, must be Integer, Date, Date32, DateTime or DateTime64.",
|
||||||
+ ", must be Integer or DateTime.",
|
arguments[0].column->getName(), getName());
|
||||||
ErrorCodes::ILLEGAL_COLUMN);
|
|
||||||
return true;
|
return true;
|
||||||
}))
|
}))
|
||||||
{
|
{
|
||||||
if (!((res = executeType<DataTypeDate>(arguments, result_type))
|
if (!((res = executeType<DataTypeDate>(arguments, result_type))
|
||||||
|
|| (res = executeType<DataTypeDate32>(arguments, result_type))
|
||||||
|| (res = executeType<DataTypeDateTime>(arguments, result_type))
|
|| (res = executeType<DataTypeDateTime>(arguments, result_type))
|
||||||
|| (res = executeType<DataTypeDateTime64>(arguments, result_type))))
|
|| (res = executeType<DataTypeDateTime64>(arguments, result_type))))
|
||||||
throw Exception(
|
throw Exception(ErrorCodes::ILLEGAL_COLUMN,
|
||||||
"Illegal column " + arguments[0].column->getName() + " of function " + getName()
|
"Illegal column {} of function {}, must be Integer or DateTime.",
|
||||||
+ ", must be Integer or DateTime.",
|
arguments[0].column->getName(), getName());
|
||||||
ErrorCodes::ILLEGAL_COLUMN);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
if (!((res = executeType<DataTypeDate>(arguments, result_type))
|
if (!((res = executeType<DataTypeDate>(arguments, result_type))
|
||||||
|
|| (res = executeType<DataTypeDate32>(arguments, result_type))
|
||||||
|| (res = executeType<DataTypeDateTime>(arguments, result_type))
|
|| (res = executeType<DataTypeDateTime>(arguments, result_type))
|
||||||
|| (res = executeType<DataTypeDateTime64>(arguments, result_type))))
|
|| (res = executeType<DataTypeDateTime64>(arguments, result_type))))
|
||||||
throw Exception(
|
throw Exception(ErrorCodes::ILLEGAL_COLUMN,
|
||||||
"Illegal column " + arguments[0].column->getName() + " of function " + getName()
|
"Illegal column {} of function {}, must be Date or DateTime.",
|
||||||
+ ", must be Date or DateTime.",
|
arguments[0].column->getName(), getName());
|
||||||
ErrorCodes::ILLEGAL_COLUMN);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return res;
|
return res;
|
||||||
@ -425,10 +420,9 @@ public:
|
|||||||
|
|
||||||
const ColumnConst * pattern_column = checkAndGetColumnConst<ColumnString>(arguments[1].column.get());
|
const ColumnConst * pattern_column = checkAndGetColumnConst<ColumnString>(arguments[1].column.get());
|
||||||
if (!pattern_column)
|
if (!pattern_column)
|
||||||
throw Exception("Illegal column " + arguments[1].column->getName()
|
throw Exception(ErrorCodes::ILLEGAL_COLUMN,
|
||||||
+ " of second ('format') argument of function " + getName()
|
"Illegal column {} of second ('format') argument of function {}. Must be constant string.",
|
||||||
+ ". Must be constant string.",
|
arguments[1].column->getName(), getName());
|
||||||
ErrorCodes::ILLEGAL_COLUMN);
|
|
||||||
|
|
||||||
String pattern = pattern_column->getValue<String>();
|
String pattern = pattern_column->getValue<String>();
|
||||||
|
|
||||||
@ -712,12 +706,14 @@ public:
|
|||||||
// Unimplemented
|
// Unimplemented
|
||||||
case 'U': [[fallthrough]];
|
case 'U': [[fallthrough]];
|
||||||
case 'W':
|
case 'W':
|
||||||
throw Exception("Wrong pattern '" + pattern + "', symbol '" + *pos + " is not implemented ' for function " + getName(),
|
throw Exception(ErrorCodes::NOT_IMPLEMENTED,
|
||||||
ErrorCodes::NOT_IMPLEMENTED);
|
"Wrong pattern '{}', symbol '{}' is not implemented for function {}",
|
||||||
|
pattern, *pos, getName());
|
||||||
|
|
||||||
default:
|
default:
|
||||||
throw Exception(
|
throw Exception(ErrorCodes::ILLEGAL_COLUMN,
|
||||||
"Wrong pattern '" + pattern + "', unexpected symbol '" + *pos + "' for function " + getName(), ErrorCodes::ILLEGAL_COLUMN);
|
"Wrong pattern '{}', unexpected symbol '{}' for function {}",
|
||||||
|
pattern, *pos, getName());
|
||||||
}
|
}
|
||||||
|
|
||||||
++pos;
|
++pos;
|
||||||
|
@ -213,7 +213,7 @@ BlockIO InterpreterDropQuery::executeToTableImpl(ContextPtr context_, ASTDropQue
|
|||||||
{
|
{
|
||||||
/// And for simple MergeTree we can stop merges before acquiring the lock
|
/// And for simple MergeTree we can stop merges before acquiring the lock
|
||||||
auto merges_blocker = table->getActionLock(ActionLocks::PartsMerge);
|
auto merges_blocker = table->getActionLock(ActionLocks::PartsMerge);
|
||||||
auto table_lock = table->lockExclusively(context_->getCurrentQueryId(), context_->getSettingsRef().lock_acquire_timeout);
|
table_lock = table->lockExclusively(context_->getCurrentQueryId(), context_->getSettingsRef().lock_acquire_timeout);
|
||||||
}
|
}
|
||||||
|
|
||||||
auto metadata_snapshot = table->getInMemoryMetadataPtr();
|
auto metadata_snapshot = table->getInMemoryMetadataPtr();
|
||||||
|
@ -236,10 +236,11 @@ Field convertFieldToTypeImpl(const Field & src, const IDataType & type, const ID
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (which_type.isDateTime64()
|
if (which_type.isDateTime64()
|
||||||
&& (which_from_type.isNativeInt() || which_from_type.isNativeUInt() || which_from_type.isDate() || which_from_type.isDate32() || which_from_type.isDateTime() || which_from_type.isDateTime64()))
|
&& (src.getType() == Field::Types::UInt64 || src.getType() == Field::Types::Int64 || src.getType() == Field::Types::Decimal64))
|
||||||
{
|
{
|
||||||
const auto scale = static_cast<const DataTypeDateTime64 &>(type).getScale();
|
const auto scale = static_cast<const DataTypeDateTime64 &>(type).getScale();
|
||||||
const auto decimal_value = DecimalUtils::decimalFromComponents<DateTime64>(applyVisitor(FieldVisitorConvertToNumber<Int64>(), src), 0, scale);
|
const auto decimal_value
|
||||||
|
= DecimalUtils::decimalFromComponents<DateTime64>(applyVisitor(FieldVisitorConvertToNumber<Int64>(), src), 0, scale);
|
||||||
return Field(DecimalField<DateTime64>(decimal_value, scale));
|
return Field(DecimalField<DateTime64>(decimal_value, scale));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -561,13 +561,10 @@ public:
|
|||||||
|
|
||||||
virtual bool getResult(ASTPtr & node)
|
virtual bool getResult(ASTPtr & node)
|
||||||
{
|
{
|
||||||
if (elements.size() == 1)
|
if (!finished)
|
||||||
{
|
return false;
|
||||||
node = std::move(elements[0]);
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
return false;
|
return getResultImpl(node);
|
||||||
}
|
}
|
||||||
|
|
||||||
virtual bool parse(IParser::Pos & /*pos*/, Expected & /*expected*/, Action & /*action*/) = 0;
|
virtual bool parse(IParser::Pos & /*pos*/, Expected & /*expected*/, Action & /*action*/) = 0;
|
||||||
@ -746,6 +743,17 @@ public:
|
|||||||
Checkpoint current_checkpoint = Checkpoint::None;
|
Checkpoint current_checkpoint = Checkpoint::None;
|
||||||
|
|
||||||
protected:
|
protected:
|
||||||
|
virtual bool getResultImpl(ASTPtr & node)
|
||||||
|
{
|
||||||
|
if (elements.size() == 1)
|
||||||
|
{
|
||||||
|
node = std::move(elements[0]);
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
std::vector<Operator> operators;
|
std::vector<Operator> operators;
|
||||||
ASTs operands;
|
ASTs operands;
|
||||||
ASTs elements;
|
ASTs elements;
|
||||||
@ -766,17 +774,12 @@ public:
|
|||||||
bool getResult(ASTPtr & node) override
|
bool getResult(ASTPtr & node) override
|
||||||
{
|
{
|
||||||
/// We can exit the main cycle outside the parse() function,
|
/// We can exit the main cycle outside the parse() function,
|
||||||
/// so we need to merge the element here
|
/// so we need to merge the element here.
|
||||||
|
/// Because of this 'finished' flag can also not be set.
|
||||||
if (!mergeElement())
|
if (!mergeElement())
|
||||||
return false;
|
return false;
|
||||||
|
|
||||||
if (elements.size() == 1)
|
return Layer::getResultImpl(node);
|
||||||
{
|
|
||||||
node = std::move(elements[0]);
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
return false;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
bool parse(IParser::Pos & pos, Expected & /*expected*/, Action & /*action*/) override
|
bool parse(IParser::Pos & pos, Expected & /*expected*/, Action & /*action*/) override
|
||||||
@ -1029,17 +1032,6 @@ private:
|
|||||||
class RoundBracketsLayer : public Layer
|
class RoundBracketsLayer : public Layer
|
||||||
{
|
{
|
||||||
public:
|
public:
|
||||||
bool getResult(ASTPtr & node) override
|
|
||||||
{
|
|
||||||
// Round brackets can mean priority operator as well as function tuple()
|
|
||||||
if (!is_tuple && elements.size() == 1)
|
|
||||||
node = std::move(elements[0]);
|
|
||||||
else
|
|
||||||
node = makeASTFunction("tuple", std::move(elements));
|
|
||||||
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
bool parse(IParser::Pos & pos, Expected & expected, Action & action) override
|
bool parse(IParser::Pos & pos, Expected & expected, Action & action) override
|
||||||
{
|
{
|
||||||
if (ParserToken(TokenType::Comma).ignore(pos, expected))
|
if (ParserToken(TokenType::Comma).ignore(pos, expected))
|
||||||
@ -1069,6 +1061,19 @@ public:
|
|||||||
|
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
protected:
|
||||||
|
bool getResultImpl(ASTPtr & node) override
|
||||||
|
{
|
||||||
|
// Round brackets can mean priority operator as well as function tuple()
|
||||||
|
if (!is_tuple && elements.size() == 1)
|
||||||
|
node = std::move(elements[0]);
|
||||||
|
else
|
||||||
|
node = makeASTFunction("tuple", std::move(elements));
|
||||||
|
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
private:
|
private:
|
||||||
bool is_tuple = false;
|
bool is_tuple = false;
|
||||||
};
|
};
|
||||||
@ -1077,16 +1082,17 @@ private:
|
|||||||
class ArrayLayer : public LayerWithSeparator<TokenType::Comma, TokenType::ClosingSquareBracket>
|
class ArrayLayer : public LayerWithSeparator<TokenType::Comma, TokenType::ClosingSquareBracket>
|
||||||
{
|
{
|
||||||
public:
|
public:
|
||||||
bool getResult(ASTPtr & node) override
|
|
||||||
{
|
|
||||||
node = makeASTFunction("array", std::move(elements));
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
bool parse(IParser::Pos & pos, Expected & expected, Action & action) override
|
bool parse(IParser::Pos & pos, Expected & expected, Action & action) override
|
||||||
{
|
{
|
||||||
return LayerWithSeparator::parse(pos, expected, action);
|
return LayerWithSeparator::parse(pos, expected, action);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
protected:
|
||||||
|
bool getResultImpl(ASTPtr & node) override
|
||||||
|
{
|
||||||
|
node = makeASTFunction("array", std::move(elements));
|
||||||
|
return true;
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
/// Layer for arrayElement square brackets operator
|
/// Layer for arrayElement square brackets operator
|
||||||
@ -1206,23 +1212,6 @@ class ExtractLayer : public LayerWithSeparator<TokenType::Comma, TokenType::Clos
|
|||||||
public:
|
public:
|
||||||
ExtractLayer() : LayerWithSeparator(/*allow_alias*/ true, /*allow_alias_without_as_keyword*/ true) {}
|
ExtractLayer() : LayerWithSeparator(/*allow_alias*/ true, /*allow_alias_without_as_keyword*/ true) {}
|
||||||
|
|
||||||
bool getResult(ASTPtr & node) override
|
|
||||||
{
|
|
||||||
if (state == 2)
|
|
||||||
{
|
|
||||||
if (elements.empty())
|
|
||||||
return false;
|
|
||||||
|
|
||||||
node = makeASTFunction(interval_kind.toNameOfFunctionExtractTimePart(), elements[0]);
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
node = makeASTFunction("extract", std::move(elements));
|
|
||||||
}
|
|
||||||
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
bool parse(IParser::Pos & pos, Expected & expected, Action & action) override
|
bool parse(IParser::Pos & pos, Expected & expected, Action & action) override
|
||||||
{
|
{
|
||||||
/// extract(haystack, pattern) or EXTRACT(DAY FROM Date)
|
/// extract(haystack, pattern) or EXTRACT(DAY FROM Date)
|
||||||
@ -1268,6 +1257,25 @@ public:
|
|||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
protected:
|
||||||
|
bool getResultImpl(ASTPtr & node) override
|
||||||
|
{
|
||||||
|
if (state == 2)
|
||||||
|
{
|
||||||
|
if (elements.empty())
|
||||||
|
return false;
|
||||||
|
|
||||||
|
node = makeASTFunction(interval_kind.toNameOfFunctionExtractTimePart(), elements[0]);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
node = makeASTFunction("extract", std::move(elements));
|
||||||
|
}
|
||||||
|
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
private:
|
private:
|
||||||
IntervalKind interval_kind;
|
IntervalKind interval_kind;
|
||||||
};
|
};
|
||||||
@ -1277,12 +1285,6 @@ class SubstringLayer : public Layer
|
|||||||
public:
|
public:
|
||||||
SubstringLayer() : Layer(/*allow_alias*/ true, /*allow_alias_without_as_keyword*/ true) {}
|
SubstringLayer() : Layer(/*allow_alias*/ true, /*allow_alias_without_as_keyword*/ true) {}
|
||||||
|
|
||||||
bool getResult(ASTPtr & node) override
|
|
||||||
{
|
|
||||||
node = makeASTFunction("substring", std::move(elements));
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
bool parse(IParser::Pos & pos, Expected & expected, Action & action) override
|
bool parse(IParser::Pos & pos, Expected & expected, Action & action) override
|
||||||
{
|
{
|
||||||
/// Either SUBSTRING(expr FROM start [FOR length]) or SUBSTRING(expr, start, length)
|
/// Either SUBSTRING(expr FROM start [FOR length]) or SUBSTRING(expr, start, length)
|
||||||
@ -1332,6 +1334,13 @@ public:
|
|||||||
|
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
protected:
|
||||||
|
bool getResultImpl(ASTPtr & node) override
|
||||||
|
{
|
||||||
|
node = makeASTFunction("substring", std::move(elements));
|
||||||
|
return true;
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
class PositionLayer : public Layer
|
class PositionLayer : public Layer
|
||||||
@ -1339,15 +1348,6 @@ class PositionLayer : public Layer
|
|||||||
public:
|
public:
|
||||||
PositionLayer() : Layer(/*allow_alias*/ true, /*allow_alias_without_as_keyword*/ true) {}
|
PositionLayer() : Layer(/*allow_alias*/ true, /*allow_alias_without_as_keyword*/ true) {}
|
||||||
|
|
||||||
bool getResult(ASTPtr & node) override
|
|
||||||
{
|
|
||||||
if (state == 2)
|
|
||||||
std::swap(elements[1], elements[0]);
|
|
||||||
|
|
||||||
node = makeASTFunction("position", std::move(elements));
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
bool parse(IParser::Pos & pos, Expected & expected, Action & action) override
|
bool parse(IParser::Pos & pos, Expected & expected, Action & action) override
|
||||||
{
|
{
|
||||||
/// position(haystack, needle[, start_pos]) or position(needle IN haystack)
|
/// position(haystack, needle[, start_pos]) or position(needle IN haystack)
|
||||||
@ -1402,6 +1402,16 @@ public:
|
|||||||
|
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
protected:
|
||||||
|
bool getResultImpl(ASTPtr & node) override
|
||||||
|
{
|
||||||
|
if (state == 2)
|
||||||
|
std::swap(elements[1], elements[0]);
|
||||||
|
|
||||||
|
node = makeASTFunction("position", std::move(elements));
|
||||||
|
return true;
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
class ExistsLayer : public Layer
|
class ExistsLayer : public Layer
|
||||||
@ -1436,12 +1446,6 @@ public:
|
|||||||
TrimLayer(bool trim_left_, bool trim_right_)
|
TrimLayer(bool trim_left_, bool trim_right_)
|
||||||
: Layer(/*allow_alias*/ true, /*allow_alias_without_as_keyword*/ true), trim_left(trim_left_), trim_right(trim_right_) {}
|
: Layer(/*allow_alias*/ true, /*allow_alias_without_as_keyword*/ true), trim_left(trim_left_), trim_right(trim_right_) {}
|
||||||
|
|
||||||
bool getResult(ASTPtr & node) override
|
|
||||||
{
|
|
||||||
node = makeASTFunction(function_name, std::move(elements));
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
bool parse(IParser::Pos & pos, Expected & expected, Action & action) override
|
bool parse(IParser::Pos & pos, Expected & expected, Action & action) override
|
||||||
{
|
{
|
||||||
/// Handles all possible TRIM/LTRIM/RTRIM call variants
|
/// Handles all possible TRIM/LTRIM/RTRIM call variants
|
||||||
@ -1583,6 +1587,14 @@ public:
|
|||||||
|
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
protected:
|
||||||
|
bool getResultImpl(ASTPtr & node) override
|
||||||
|
{
|
||||||
|
node = makeASTFunction(function_name, std::move(elements));
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
private:
|
private:
|
||||||
bool trim_left;
|
bool trim_left;
|
||||||
bool trim_right;
|
bool trim_right;
|
||||||
@ -1598,23 +1610,6 @@ public:
|
|||||||
explicit DateAddLayer(const char * function_name_)
|
explicit DateAddLayer(const char * function_name_)
|
||||||
: LayerWithSeparator(/*allow_alias*/ true, /*allow_alias_without_as_keyword*/ true), function_name(function_name_) {}
|
: LayerWithSeparator(/*allow_alias*/ true, /*allow_alias_without_as_keyword*/ true), function_name(function_name_) {}
|
||||||
|
|
||||||
bool getResult(ASTPtr & node) override
|
|
||||||
{
|
|
||||||
if (parsed_interval_kind)
|
|
||||||
{
|
|
||||||
if (elements.size() < 2)
|
|
||||||
return false;
|
|
||||||
|
|
||||||
elements[0] = makeASTFunction(interval_kind.toNameOfFunctionToIntervalDataType(), elements[0]);
|
|
||||||
node = makeASTFunction(function_name, elements[1], elements[0]);
|
|
||||||
}
|
|
||||||
else
|
|
||||||
node = makeASTFunction(function_name, std::move(elements));
|
|
||||||
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
bool parse(IParser::Pos & pos, Expected & expected, Action & action) override
|
bool parse(IParser::Pos & pos, Expected & expected, Action & action) override
|
||||||
{
|
{
|
||||||
/// DATEADD(YEAR, 1, date) or DATEADD(INTERVAL 1 YEAR, date);
|
/// DATEADD(YEAR, 1, date) or DATEADD(INTERVAL 1 YEAR, date);
|
||||||
@ -1644,6 +1639,23 @@ public:
|
|||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
protected:
|
||||||
|
bool getResultImpl(ASTPtr & node) override
|
||||||
|
{
|
||||||
|
if (parsed_interval_kind)
|
||||||
|
{
|
||||||
|
if (elements.size() < 2)
|
||||||
|
return false;
|
||||||
|
|
||||||
|
elements[0] = makeASTFunction(interval_kind.toNameOfFunctionToIntervalDataType(), elements[0]);
|
||||||
|
node = makeASTFunction(function_name, elements[1], elements[0]);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
node = makeASTFunction(function_name, std::move(elements));
|
||||||
|
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
private:
|
private:
|
||||||
IntervalKind interval_kind;
|
IntervalKind interval_kind;
|
||||||
const char * function_name;
|
const char * function_name;
|
||||||
@ -1655,24 +1667,6 @@ class DateDiffLayer : public LayerWithSeparator<TokenType::Comma, TokenType::Clo
|
|||||||
public:
|
public:
|
||||||
DateDiffLayer() : LayerWithSeparator(/*allow_alias*/ true, /*allow_alias_without_as_keyword*/ true) {}
|
DateDiffLayer() : LayerWithSeparator(/*allow_alias*/ true, /*allow_alias_without_as_keyword*/ true) {}
|
||||||
|
|
||||||
bool getResult(ASTPtr & node) override
|
|
||||||
{
|
|
||||||
if (parsed_interval_kind)
|
|
||||||
{
|
|
||||||
if (elements.size() == 2)
|
|
||||||
node = makeASTFunction("dateDiff", std::make_shared<ASTLiteral>(interval_kind.toDateDiffUnit()), elements[0], elements[1]);
|
|
||||||
else if (elements.size() == 3)
|
|
||||||
node = makeASTFunction("dateDiff", std::make_shared<ASTLiteral>(interval_kind.toDateDiffUnit()), elements[0], elements[1], elements[2]);
|
|
||||||
else
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
node = makeASTFunction("dateDiff", std::move(elements));
|
|
||||||
}
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
bool parse(IParser::Pos & pos, Expected & expected, Action & action) override
|
bool parse(IParser::Pos & pos, Expected & expected, Action & action) override
|
||||||
{
|
{
|
||||||
/// 0. Try to parse interval_kind (-> 1)
|
/// 0. Try to parse interval_kind (-> 1)
|
||||||
@ -1699,6 +1693,25 @@ public:
|
|||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
protected:
|
||||||
|
bool getResultImpl(ASTPtr & node) override
|
||||||
|
{
|
||||||
|
if (parsed_interval_kind)
|
||||||
|
{
|
||||||
|
if (elements.size() == 2)
|
||||||
|
node = makeASTFunction("dateDiff", std::make_shared<ASTLiteral>(interval_kind.toDateDiffUnit()), elements[0], elements[1]);
|
||||||
|
else if (elements.size() == 3)
|
||||||
|
node = makeASTFunction("dateDiff", std::make_shared<ASTLiteral>(interval_kind.toDateDiffUnit()), elements[0], elements[1], elements[2]);
|
||||||
|
else
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
node = makeASTFunction("dateDiff", std::move(elements));
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
private:
|
private:
|
||||||
IntervalKind interval_kind;
|
IntervalKind interval_kind;
|
||||||
bool parsed_interval_kind = false;
|
bool parsed_interval_kind = false;
|
||||||
@ -1882,16 +1895,6 @@ class ViewLayer : public Layer
|
|||||||
public:
|
public:
|
||||||
explicit ViewLayer(bool if_permitted_) : if_permitted(if_permitted_) {}
|
explicit ViewLayer(bool if_permitted_) : if_permitted(if_permitted_) {}
|
||||||
|
|
||||||
bool getResult(ASTPtr & node) override
|
|
||||||
{
|
|
||||||
if (if_permitted)
|
|
||||||
node = makeASTFunction("viewIfPermitted", std::move(elements));
|
|
||||||
else
|
|
||||||
node = makeASTFunction("view", std::move(elements));
|
|
||||||
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
bool parse(IParser::Pos & pos, Expected & expected, Action & /*action*/) override
|
bool parse(IParser::Pos & pos, Expected & expected, Action & /*action*/) override
|
||||||
{
|
{
|
||||||
/// view(SELECT ...)
|
/// view(SELECT ...)
|
||||||
@ -1948,6 +1951,17 @@ public:
|
|||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
protected:
|
||||||
|
bool getResultImpl(ASTPtr & node) override
|
||||||
|
{
|
||||||
|
if (if_permitted)
|
||||||
|
node = makeASTFunction("viewIfPermitted", std::move(elements));
|
||||||
|
else
|
||||||
|
node = makeASTFunction("view", std::move(elements));
|
||||||
|
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
private:
|
private:
|
||||||
bool if_permitted;
|
bool if_permitted;
|
||||||
};
|
};
|
||||||
|
@ -1220,7 +1220,8 @@ bool KeyCondition::transformConstantWithValidFunctions(
|
|||||||
|
|
||||||
if (is_valid_chain)
|
if (is_valid_chain)
|
||||||
{
|
{
|
||||||
auto const_type = cur_node->result_type;
|
out_type = removeLowCardinality(out_type);
|
||||||
|
auto const_type = removeLowCardinality(cur_node->result_type);
|
||||||
auto const_column = out_type->createColumnConst(1, out_value);
|
auto const_column = out_type->createColumnConst(1, out_value);
|
||||||
auto const_value = (*castColumnAccurateOrNull({const_column, out_type, ""}, const_type))[0];
|
auto const_value = (*castColumnAccurateOrNull({const_column, out_type, ""}, const_type))[0];
|
||||||
|
|
||||||
|
@ -37,12 +37,8 @@ class ClickHouseHelper:
|
|||||||
url, params=params, data=json_str, headers=auth
|
url, params=params, data=json_str, headers=auth
|
||||||
)
|
)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logging.warning(
|
error = f"Received exception while sending data to {url} on {i} attempt: {e}"
|
||||||
"Received exception while sending data to %s on %s attempt: %s",
|
logging.warning(error)
|
||||||
url,
|
|
||||||
i,
|
|
||||||
e,
|
|
||||||
)
|
|
||||||
continue
|
continue
|
||||||
|
|
||||||
logging.info("Response content '%s'", response.content)
|
logging.info("Response content '%s'", response.content)
|
||||||
|
@ -129,6 +129,9 @@ def test_incorrect_usage(cluster):
|
|||||||
result = node2.query_and_get_error("TRUNCATE TABLE test0")
|
result = node2.query_and_get_error("TRUNCATE TABLE test0")
|
||||||
assert "Table is read-only" in result
|
assert "Table is read-only" in result
|
||||||
|
|
||||||
|
result = node2.query_and_get_error("OPTIMIZE TABLE test0 FINAL")
|
||||||
|
assert "Only read-only operations are supported" in result
|
||||||
|
|
||||||
node2.query("DROP TABLE test0 SYNC")
|
node2.query("DROP TABLE test0 SYNC")
|
||||||
|
|
||||||
|
|
||||||
|
@ -1,33 +1,34 @@
|
|||||||
20
|
20 20
|
||||||
|
02 02
|
||||||
|
01/02/18 01/02/18
|
||||||
|
2 2
|
||||||
|
2018-01-02 2018-01-02
|
||||||
|
22 00
|
||||||
02
|
02
|
||||||
01/02/18
|
10 12
|
||||||
2
|
|
||||||
2018-01-02
|
|
||||||
22
|
|
||||||
02
|
|
||||||
10
|
|
||||||
11
|
11
|
||||||
12
|
12
|
||||||
001
|
001 001
|
||||||
366
|
366 366
|
||||||
01
|
01 01
|
||||||
33
|
33 00
|
||||||
\n
|
\n \n
|
||||||
AM
|
AM AM
|
||||||
AM
|
AM
|
||||||
PM
|
PM
|
||||||
22:33
|
22:33 00:00
|
||||||
44
|
44 00
|
||||||
\t
|
\t \t
|
||||||
22:33:44
|
22:33:44 00:00:00
|
||||||
1 7
|
1 7 1 7
|
||||||
01 01 53 52
|
01 01 53 52 01 01 53 52
|
||||||
1 0
|
1 0 1 0
|
||||||
18
|
18 18
|
||||||
2018
|
2018 2018
|
||||||
%
|
% %
|
||||||
no formatting pattern
|
no formatting pattern no formatting pattern
|
||||||
2018-01-01 00:00:00
|
2018-01-01 00:00:00
|
||||||
|
1927-01-01 00:00:00
|
||||||
2018-01-01 01:00:00 2018-01-01 04:00:00
|
2018-01-01 01:00:00 2018-01-01 04:00:00
|
||||||
+0000
|
+0000
|
||||||
-1100
|
-1100
|
||||||
|
@ -8,38 +8,44 @@ SELECT formatDateTime(now(), 'unescaped %'); -- { serverError 36 }
|
|||||||
SELECT formatDateTime(toDateTime('2018-01-02 22:33:44'), '%U'); -- { serverError 48 }
|
SELECT formatDateTime(toDateTime('2018-01-02 22:33:44'), '%U'); -- { serverError 48 }
|
||||||
SELECT formatDateTime(toDateTime('2018-01-02 22:33:44'), '%W'); -- { serverError 48 }
|
SELECT formatDateTime(toDateTime('2018-01-02 22:33:44'), '%W'); -- { serverError 48 }
|
||||||
|
|
||||||
SELECT formatDateTime(toDateTime('2018-01-02 22:33:44'), '%C');
|
SELECT formatDateTime(toDateTime('2018-01-02 22:33:44'), '%C'), formatDateTime(toDate32('2018-01-02'), '%C');
|
||||||
SELECT formatDateTime(toDateTime('2018-01-02 22:33:44'), '%d');
|
SELECT formatDateTime(toDateTime('2018-01-02 22:33:44'), '%d'), formatDateTime(toDate32('2018-01-02'), '%d');
|
||||||
SELECT formatDateTime(toDateTime('2018-01-02 22:33:44'), '%D');
|
SELECT formatDateTime(toDateTime('2018-01-02 22:33:44'), '%D'), formatDateTime(toDate32('2018-01-02'), '%D');
|
||||||
SELECT formatDateTime(toDateTime('2018-01-02 22:33:44'), '%e');
|
SELECT formatDateTime(toDateTime('2018-01-02 22:33:44'), '%e'), formatDateTime(toDate32('2018-01-02'), '%e');
|
||||||
SELECT formatDateTime(toDateTime('2018-01-02 22:33:44'), '%F');
|
SELECT formatDateTime(toDateTime('2018-01-02 22:33:44'), '%F'), formatDateTime(toDate32('2018-01-02'), '%F');
|
||||||
SELECT formatDateTime(toDateTime('2018-01-02 22:33:44'), '%H');
|
SELECT formatDateTime(toDateTime('2018-01-02 22:33:44'), '%H'), formatDateTime(toDate32('2018-01-02'), '%H');
|
||||||
SELECT formatDateTime(toDateTime('2018-01-02 02:33:44'), '%H');
|
SELECT formatDateTime(toDateTime('2018-01-02 02:33:44'), '%H');
|
||||||
SELECT formatDateTime(toDateTime('2018-01-02 22:33:44'), '%I');
|
SELECT formatDateTime(toDateTime('2018-01-02 22:33:44'), '%I'), formatDateTime(toDate32('2018-01-02'), '%I');
|
||||||
SELECT formatDateTime(toDateTime('2018-01-02 11:33:44'), '%I');
|
SELECT formatDateTime(toDateTime('2018-01-02 11:33:44'), '%I');
|
||||||
SELECT formatDateTime(toDateTime('2018-01-02 00:33:44'), '%I');
|
SELECT formatDateTime(toDateTime('2018-01-02 00:33:44'), '%I');
|
||||||
SELECT formatDateTime(toDateTime('2018-01-01 00:33:44'), '%j');
|
SELECT formatDateTime(toDateTime('2018-01-01 00:33:44'), '%j'), formatDateTime(toDate32('2018-01-01'), '%j');
|
||||||
SELECT formatDateTime(toDateTime('2000-12-31 00:33:44'), '%j');
|
SELECT formatDateTime(toDateTime('2000-12-31 00:33:44'), '%j'), formatDateTime(toDate32('2000-12-31'), '%j');
|
||||||
SELECT formatDateTime(toDateTime('2018-01-02 22:33:44'), '%m');
|
SELECT formatDateTime(toDateTime('2018-01-02 22:33:44'), '%m'), formatDateTime(toDate32('2018-01-02'), '%m');
|
||||||
SELECT formatDateTime(toDateTime('2018-01-02 22:33:44'), '%M');
|
SELECT formatDateTime(toDateTime('2018-01-02 22:33:44'), '%M'), formatDateTime(toDate32('2018-01-02'), '%M');
|
||||||
SELECT formatDateTime(toDateTime('2018-01-02 22:33:44'), '%n');
|
SELECT formatDateTime(toDateTime('2018-01-02 22:33:44'), '%n'), formatDateTime(toDate32('2018-01-02'), '%n');
|
||||||
SELECT formatDateTime(toDateTime('2018-01-02 00:33:44'), '%p');
|
SELECT formatDateTime(toDateTime('2018-01-02 00:33:44'), '%p'), formatDateTime(toDateTime('2018-01-02'), '%p');
|
||||||
SELECT formatDateTime(toDateTime('2018-01-02 11:33:44'), '%p');
|
SELECT formatDateTime(toDateTime('2018-01-02 11:33:44'), '%p');
|
||||||
SELECT formatDateTime(toDateTime('2018-01-02 12:33:44'), '%p');
|
SELECT formatDateTime(toDateTime('2018-01-02 12:33:44'), '%p');
|
||||||
SELECT formatDateTime(toDateTime('2018-01-02 22:33:44'), '%R');
|
SELECT formatDateTime(toDateTime('2018-01-02 22:33:44'), '%R'), formatDateTime(toDate32('2018-01-02'), '%R');
|
||||||
SELECT formatDateTime(toDateTime('2018-01-02 22:33:44'), '%S');
|
SELECT formatDateTime(toDateTime('2018-01-02 22:33:44'), '%S'), formatDateTime(toDate32('2018-01-02'), '%S');
|
||||||
SELECT formatDateTime(toDateTime('2018-01-02 22:33:44'), '%t');
|
SELECT formatDateTime(toDateTime('2018-01-02 22:33:44'), '%t'), formatDateTime(toDate32('2018-01-02'), '%t');
|
||||||
SELECT formatDateTime(toDateTime('2018-01-02 22:33:44'), '%T');
|
SELECT formatDateTime(toDateTime('2018-01-02 22:33:44'), '%T'), formatDateTime(toDate32('2018-01-02'), '%T');
|
||||||
SELECT formatDateTime(toDateTime('2018-01-01 22:33:44'), '%u'), formatDateTime(toDateTime('2018-01-07 22:33:44'), '%u');
|
SELECT formatDateTime(toDateTime('2018-01-01 22:33:44'), '%u'), formatDateTime(toDateTime('2018-01-07 22:33:44'), '%u'),
|
||||||
|
formatDateTime(toDate32('2018-01-01'), '%u'), formatDateTime(toDate32('2018-01-07'), '%u');
|
||||||
SELECT formatDateTime(toDateTime('1996-01-01 22:33:44'), '%V'), formatDateTime(toDateTime('1996-12-31 22:33:44'), '%V'),
|
SELECT formatDateTime(toDateTime('1996-01-01 22:33:44'), '%V'), formatDateTime(toDateTime('1996-12-31 22:33:44'), '%V'),
|
||||||
formatDateTime(toDateTime('1999-01-01 22:33:44'), '%V'), formatDateTime(toDateTime('1999-12-31 22:33:44'), '%V');
|
formatDateTime(toDateTime('1999-01-01 22:33:44'), '%V'), formatDateTime(toDateTime('1999-12-31 22:33:44'), '%V'),
|
||||||
SELECT formatDateTime(toDateTime('2018-01-01 22:33:44'), '%w'), formatDateTime(toDateTime('2018-01-07 22:33:44'), '%w');
|
formatDateTime(toDate32('1996-01-01'), '%V'), formatDateTime(toDate32('1996-12-31'), '%V'),
|
||||||
SELECT formatDateTime(toDateTime('2018-01-02 22:33:44'), '%y');
|
formatDateTime(toDate32('1999-01-01'), '%V'), formatDateTime(toDate32('1999-12-31'), '%V');
|
||||||
SELECT formatDateTime(toDateTime('2018-01-02 22:33:44'), '%Y');
|
SELECT formatDateTime(toDateTime('2018-01-01 22:33:44'), '%w'), formatDateTime(toDateTime('2018-01-07 22:33:44'), '%w'),
|
||||||
SELECT formatDateTime(toDateTime('2018-01-02 22:33:44'), '%%');
|
formatDateTime(toDate32('2018-01-01'), '%w'), formatDateTime(toDate32('2018-01-07'), '%w');
|
||||||
SELECT formatDateTime(toDateTime('2018-01-02 22:33:44'), 'no formatting pattern');
|
SELECT formatDateTime(toDateTime('2018-01-02 22:33:44'), '%y'), formatDateTime(toDate32('2018-01-02'), '%y');
|
||||||
|
SELECT formatDateTime(toDateTime('2018-01-02 22:33:44'), '%Y'), formatDateTime(toDate32('2018-01-02'), '%Y');
|
||||||
|
SELECT formatDateTime(toDateTime('2018-01-02 22:33:44'), '%%'), formatDateTime(toDate32('2018-01-02'), '%%');
|
||||||
|
SELECT formatDateTime(toDateTime('2018-01-02 22:33:44'), 'no formatting pattern'), formatDateTime(toDate32('2018-01-02'), 'no formatting pattern');
|
||||||
|
|
||||||
SELECT formatDateTime(toDate('2018-01-01'), '%F %T');
|
SELECT formatDateTime(toDate('2018-01-01'), '%F %T');
|
||||||
|
SELECT formatDateTime(toDate32('1927-01-01'), '%F %T');
|
||||||
|
|
||||||
SELECT
|
SELECT
|
||||||
formatDateTime(toDateTime('2018-01-01 01:00:00', 'UTC'), '%F %T', 'UTC'),
|
formatDateTime(toDateTime('2018-01-01 01:00:00', 'UTC'), '%F %T', 'UTC'),
|
||||||
formatDateTime(toDateTime('2018-01-01 01:00:00', 'UTC'), '%F %T', 'Asia/Istanbul');
|
formatDateTime(toDateTime('2018-01-01 01:00:00', 'UTC'), '%F %T', 'Asia/Istanbul');
|
||||||
|
@ -5,25 +5,25 @@
|
|||||||
11
|
11
|
||||||
1970-01-15
|
1970-01-15
|
||||||
1970-01-15 06:52:36
|
1970-01-15 06:52:36
|
||||||
20
|
20 20
|
||||||
|
02 02
|
||||||
|
01/02/18 01/02/18
|
||||||
|
2 2
|
||||||
|
2018-01-02 2018-01-02
|
||||||
|
22 00
|
||||||
02
|
02
|
||||||
01/02/18
|
10 12
|
||||||
2
|
|
||||||
2018-01-02
|
|
||||||
22
|
|
||||||
02
|
|
||||||
10
|
|
||||||
11
|
11
|
||||||
12
|
12
|
||||||
001
|
001 001
|
||||||
366
|
366 366
|
||||||
01
|
01 01
|
||||||
33
|
33 00
|
||||||
\n
|
\n \n
|
||||||
AM
|
AM AM
|
||||||
AM
|
AM
|
||||||
PM
|
PM
|
||||||
22:33
|
22:33 00:00
|
||||||
44
|
44 00
|
||||||
\t
|
\t \t
|
||||||
22:33:44
|
22:33:44 00:00:00
|
||||||
|
@ -5,25 +5,25 @@ SELECT FROM_UNIXTIME(5345345, '%C', 'UTC');
|
|||||||
SELECT FROM_UNIXTIME(645123, '%H', 'UTC');
|
SELECT FROM_UNIXTIME(645123, '%H', 'UTC');
|
||||||
SELECT FROM_UNIXTIME(1232456, '%Y-%m-%d', 'UTC');
|
SELECT FROM_UNIXTIME(1232456, '%Y-%m-%d', 'UTC');
|
||||||
SELECT FROM_UNIXTIME(1234356, '%Y-%m-%d %R:%S', 'UTC');
|
SELECT FROM_UNIXTIME(1234356, '%Y-%m-%d %R:%S', 'UTC');
|
||||||
SELECT FROM_UNIXTIME(toDateTime('2018-01-02 22:33:44'), '%C');
|
SELECT FROM_UNIXTIME(toDateTime('2018-01-02 22:33:44'), '%C'), FROM_UNIXTIME(toDate32('2018-01-02'), '%C');
|
||||||
SELECT FROM_UNIXTIME(toDateTime('2018-01-02 22:33:44'), '%d');
|
SELECT FROM_UNIXTIME(toDateTime('2018-01-02 22:33:44'), '%d'), FROM_UNIXTIME(toDate32('2018-01-02'), '%d');
|
||||||
SELECT FROM_UNIXTIME(toDateTime('2018-01-02 22:33:44'), '%D');
|
SELECT FROM_UNIXTIME(toDateTime('2018-01-02 22:33:44'), '%D'), FROM_UNIXTIME(toDate32('2018-01-02'), '%D');
|
||||||
SELECT FROM_UNIXTIME(toDateTime('2018-01-02 22:33:44'), '%e');
|
SELECT FROM_UNIXTIME(toDateTime('2018-01-02 22:33:44'), '%e'), FROM_UNIXTIME(toDate32('2018-01-02'), '%e');
|
||||||
SELECT FROM_UNIXTIME(toDateTime('2018-01-02 22:33:44'), '%F');
|
SELECT FROM_UNIXTIME(toDateTime('2018-01-02 22:33:44'), '%F'), FROM_UNIXTIME(toDate32('2018-01-02'), '%F');
|
||||||
SELECT FROM_UNIXTIME(toDateTime('2018-01-02 22:33:44'), '%H');
|
SELECT FROM_UNIXTIME(toDateTime('2018-01-02 22:33:44'), '%H'), FROM_UNIXTIME(toDate32('2018-01-02'), '%H');
|
||||||
SELECT FROM_UNIXTIME(toDateTime('2018-01-02 02:33:44'), '%H');
|
SELECT FROM_UNIXTIME(toDateTime('2018-01-02 02:33:44'), '%H');
|
||||||
SELECT FROM_UNIXTIME(toDateTime('2018-01-02 22:33:44'), '%I');
|
SELECT FROM_UNIXTIME(toDateTime('2018-01-02 22:33:44'), '%I'), FROM_UNIXTIME(toDate32('2018-01-02'), '%I');
|
||||||
SELECT FROM_UNIXTIME(toDateTime('2018-01-02 11:33:44'), '%I');
|
SELECT FROM_UNIXTIME(toDateTime('2018-01-02 11:33:44'), '%I');
|
||||||
SELECT FROM_UNIXTIME(toDateTime('2018-01-02 00:33:44'), '%I');
|
SELECT FROM_UNIXTIME(toDateTime('2018-01-02 00:33:44'), '%I');
|
||||||
SELECT FROM_UNIXTIME(toDateTime('2018-01-01 00:33:44'), '%j');
|
SELECT FROM_UNIXTIME(toDateTime('2018-01-01 00:33:44'), '%j'), FROM_UNIXTIME(toDate32('2018-01-01'), '%j');
|
||||||
SELECT FROM_UNIXTIME(toDateTime('2000-12-31 00:33:44'), '%j');
|
SELECT FROM_UNIXTIME(toDateTime('2000-12-31 00:33:44'), '%j'), FROM_UNIXTIME(toDate32('2000-12-31'), '%j');
|
||||||
SELECT FROM_UNIXTIME(toDateTime('2018-01-02 22:33:44'), '%m');
|
SELECT FROM_UNIXTIME(toDateTime('2018-01-02 22:33:44'), '%m'), FROM_UNIXTIME(toDate32('2018-01-02'), '%m');
|
||||||
SELECT FROM_UNIXTIME(toDateTime('2018-01-02 22:33:44'), '%M');
|
SELECT FROM_UNIXTIME(toDateTime('2018-01-02 22:33:44'), '%M'), FROM_UNIXTIME(toDate32('2018-01-02'), '%M');
|
||||||
SELECT FROM_UNIXTIME(toDateTime('2018-01-02 22:33:44'), '%n');
|
SELECT FROM_UNIXTIME(toDateTime('2018-01-02 22:33:44'), '%n'), FROM_UNIXTIME(toDate32('2018-01-02'), '%n');
|
||||||
SELECT FROM_UNIXTIME(toDateTime('2018-01-02 00:33:44'), '%p');
|
SELECT FROM_UNIXTIME(toDateTime('2018-01-02 00:33:44'), '%p'), FROM_UNIXTIME(toDate32('2018-01-02'), '%p');
|
||||||
SELECT FROM_UNIXTIME(toDateTime('2018-01-02 11:33:44'), '%p');
|
SELECT FROM_UNIXTIME(toDateTime('2018-01-02 11:33:44'), '%p');
|
||||||
SELECT FROM_UNIXTIME(toDateTime('2018-01-02 12:33:44'), '%p');
|
SELECT FROM_UNIXTIME(toDateTime('2018-01-02 12:33:44'), '%p');
|
||||||
SELECT FROM_UNIXTIME(toDateTime('2018-01-02 22:33:44'), '%R');
|
SELECT FROM_UNIXTIME(toDateTime('2018-01-02 22:33:44'), '%R'), FROM_UNIXTIME(toDate32('2018-01-02'), '%R');
|
||||||
SELECT FROM_UNIXTIME(toDateTime('2018-01-02 22:33:44'), '%S');
|
SELECT FROM_UNIXTIME(toDateTime('2018-01-02 22:33:44'), '%S'), FROM_UNIXTIME(toDate32('2018-01-02'), '%S');
|
||||||
SELECT FROM_UNIXTIME(toDateTime('2018-01-02 22:33:44'), '%t');
|
SELECT FROM_UNIXTIME(toDateTime('2018-01-02 22:33:44'), '%t'), FROM_UNIXTIME(toDate32('2018-01-02'), '%t');
|
||||||
SELECT FROM_UNIXTIME(toDateTime('2018-01-02 22:33:44'), '%T');
|
SELECT FROM_UNIXTIME(toDateTime('2018-01-02 22:33:44'), '%T'), FROM_UNIXTIME(toDate32('2018-01-02'), '%T');
|
||||||
|
@ -0,0 +1 @@
|
|||||||
|
printer1
|
@ -0,0 +1,9 @@
|
|||||||
|
drop table if exists test;
|
||||||
|
|
||||||
|
create table test (Printer LowCardinality(String), IntervalStart DateTime) engine MergeTree partition by (hiveHash(Printer), toYear(IntervalStart)) order by (Printer, IntervalStart);
|
||||||
|
|
||||||
|
insert into test values ('printer1', '2006-02-07 06:28:15');
|
||||||
|
|
||||||
|
select Printer from test where Printer='printer1';
|
||||||
|
|
||||||
|
drop table test;
|
@ -0,0 +1 @@
|
|||||||
|
CREATE DATABASE conv_mian ENGINE QALL(COLUMNS('|T.D'),¸mp} -- { clientError SYNTAX_ERROR }
|
@ -101,7 +101,7 @@ int mainImpl(int argc, char ** argv)
|
|||||||
size_t ops = 0;
|
size_t ops = 0;
|
||||||
while (ops < count)
|
while (ops < count)
|
||||||
{
|
{
|
||||||
if (poll(polls.data(), static_cast<unsigned int>(descriptors), -1) <= 0)
|
if (poll(polls.data(), static_cast<nfds_t>(descriptors), -1) <= 0)
|
||||||
throwFromErrno("poll failed", ErrorCodes::SYSTEM_ERROR);
|
throwFromErrno("poll failed", ErrorCodes::SYSTEM_ERROR);
|
||||||
for (size_t i = 0; i < descriptors; ++i)
|
for (size_t i = 0; i < descriptors; ++i)
|
||||||
{
|
{
|
||||||
|
@ -36,6 +36,7 @@ v22.4.5.9-stable 2022-05-06
|
|||||||
v22.4.4.7-stable 2022-04-29
|
v22.4.4.7-stable 2022-04-29
|
||||||
v22.4.3.3-stable 2022-04-26
|
v22.4.3.3-stable 2022-04-26
|
||||||
v22.4.2.1-stable 2022-04-22
|
v22.4.2.1-stable 2022-04-22
|
||||||
|
v22.3.14.23-lts 2022-10-28
|
||||||
v22.3.13.80-lts 2022-09-30
|
v22.3.13.80-lts 2022-09-30
|
||||||
v22.3.12.19-lts 2022-08-29
|
v22.3.12.19-lts 2022-08-29
|
||||||
v22.3.11.12-lts 2022-08-10
|
v22.3.11.12-lts 2022-08-10
|
||||||
|
|
Loading…
Reference in New Issue
Block a user