mirror of
https://github.com/ClickHouse/ClickHouse.git
synced 2024-11-26 09:32:01 +00:00
time_virtual_col_tests: tests reintroduced
This commit is contained in:
parent
e4a0f42ab8
commit
4baabb16f9
@ -790,6 +790,27 @@ def test_read_subcolumns(cluster):
|
||||
assert res == "42\tcont/test_subcolumns.jsonl\t(42,42)\ttest_subcolumns.jsonl\t42\n"
|
||||
|
||||
|
||||
def test_read_subcolumn_time(cluster):
|
||||
node = cluster.instances["node"]
|
||||
storage_account_url = cluster.env_variables["AZURITE_STORAGE_ACCOUNT_URL"]
|
||||
azure_query(
|
||||
node,
|
||||
f"INSERT INTO TABLE FUNCTION azureBlobStorage('{storage_account_url}', 'cont', 'test_subcolumn_time.tsv', "
|
||||
f"'devstoreaccount1', 'Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==', 'auto', 'auto',"
|
||||
f" 'a Tuple(b Tuple(c UInt32, d UInt32), e UInt32)') select ((1, 2), 3)",
|
||||
)
|
||||
|
||||
res = node.query(
|
||||
f"select a.b.d, _path, a.b, _file, dateDiff('minute', _time, now()) < 59, a.e from azureBlobStorage('{storage_account_url}', 'cont', 'test_subcolumn_time.tsv',"
|
||||
f" 'devstoreaccount1', 'Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==', 'auto', 'auto',"
|
||||
f" 'a Tuple(b Tuple(c UInt32, d UInt32), e UInt32)')"
|
||||
)
|
||||
|
||||
assert (
|
||||
res == "2\tcont/test_subcolumn_time.tsv\t(1,2)\ttest_subcolumn_time.tsv\t1\t3\n"
|
||||
)
|
||||
|
||||
|
||||
def test_read_from_not_existing_container(cluster):
|
||||
node = cluster.instances["node"]
|
||||
query = (
|
||||
|
@ -999,6 +999,20 @@ def test_read_subcolumns(started_cluster):
|
||||
assert res == "42\ttest_subcolumns.jsonl\t(42,42)\ttest_subcolumns.jsonl\t42\n"
|
||||
|
||||
|
||||
def test_read_subcolumn_time(started_cluster):
|
||||
node = started_cluster.instances["node1"]
|
||||
|
||||
node.query(
|
||||
f"insert into function hdfs('hdfs://hdfs1:9000/test_subcolumn_time.tsv', auto, 'a Tuple(b Tuple(c UInt32, d UInt32), e UInt32)') select ((1, 2), 3)"
|
||||
)
|
||||
|
||||
res = node.query(
|
||||
f"select a.b.d, _path, a.b, _file, dateDiff('minute', _time, now()) < 59, a.e from hdfs('hdfs://hdfs1:9000/test_subcolumn_time.tsv', auto, 'a Tuple(b Tuple(c UInt32, d UInt32), e UInt32)')"
|
||||
)
|
||||
|
||||
assert res == "2\ttest_subcolumn_time.tsv\t(1,2)\ttest_subcolumn_time.tsv\t1\t3\n"
|
||||
|
||||
|
||||
def test_union_schema_inference_mode(started_cluster):
|
||||
node = started_cluster.instances["node1"]
|
||||
|
||||
|
@ -2149,6 +2149,23 @@ def test_read_subcolumns(started_cluster):
|
||||
)
|
||||
|
||||
|
||||
def test_read_subcolumn_time(started_cluster):
|
||||
bucket = started_cluster.minio_bucket
|
||||
instance = started_cluster.instances["dummy"]
|
||||
|
||||
instance.query(
|
||||
f"insert into function s3('http://{started_cluster.minio_host}:{started_cluster.minio_port}/{bucket}/test_subcolumn_time.tsv', auto, 'a Tuple(b Tuple(c UInt32, d UInt32), e UInt32)') select ((1, 2), 3)"
|
||||
)
|
||||
|
||||
res = instance.query(
|
||||
f"select a.b.d, _path, a.b, _file, dateDiff('minute', _time, now()) < 59, a.e from s3('http://{started_cluster.minio_host}:{started_cluster.minio_port}/{bucket}/test_subcolumn_time.tsv', auto, 'a Tuple(b Tuple(c UInt32, d UInt32), e UInt32)')"
|
||||
)
|
||||
|
||||
assert (
|
||||
res == "2\troot/test_subcolumn_time.tsv\t(1,2)\ttest_subcolumn_time.tsv\t1\t3\n"
|
||||
)
|
||||
|
||||
|
||||
def test_filtering_by_file_or_path(started_cluster):
|
||||
bucket = started_cluster.minio_bucket
|
||||
instance = started_cluster.instances["dummy"]
|
||||
|
@ -0,0 +1 @@
|
||||
4 1
|
12
tests/queries/0_stateless/03169_time_virtual_column.sh
Executable file
12
tests/queries/0_stateless/03169_time_virtual_column.sh
Executable file
@ -0,0 +1,12 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
CURDIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)
|
||||
# shellcheck source=../shell_config.sh
|
||||
. "$CURDIR"/../shell_config.sh
|
||||
|
||||
echo "1,2" > $CLICKHOUSE_TEST_UNIQUE_NAME.csv
|
||||
sleep 1
|
||||
$CLICKHOUSE_LOCAL -nm -q "
|
||||
select _size, (dateDiff('millisecond', _time, now()) < 600000 AND dateDiff('millisecond', _time, now()) > 0) from file('$CLICKHOUSE_TEST_UNIQUE_NAME.csv');
|
||||
"
|
||||
rm $CLICKHOUSE_TEST_UNIQUE_NAME.csv
|
Loading…
Reference in New Issue
Block a user