Merge pull request #26011 from vzakaznikov/fix_and_enable_testflows_tests

Enabling all TestFlows modules and fixing some tests.
This commit is contained in:
alexey-milovidov 2021-07-07 22:46:25 +03:00 committed by GitHub
commit dfd9e1d737
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
12 changed files with 228 additions and 186 deletions

View File

@ -73,4 +73,4 @@ RUN set -x \
VOLUME /var/lib/docker
EXPOSE 2375
ENTRYPOINT ["dockerd-entrypoint.sh"]
CMD ["sh", "-c", "python3 regression.py --no-color -o new-fails --local --clickhouse-binary-path ${CLICKHOUSE_TESTS_SERVER_BIN_PATH} --log test.log ${TESTFLOWS_OPTS}; cat test.log | tfs report results --format json > results.json; /usr/local/bin/process_testflows_result.py || echo -e 'failure\tCannot parse results' > check_status.tsv; find * -type f | grep _instances | grep clickhouse-server | xargs -n1 tar -rvf clickhouse_logs.tar; gzip -9 clickhouse_logs.tar"]
CMD ["sh", "-c", "python3 regression.py --no-color -o new-fails --local --clickhouse-binary-path ${CLICKHOUSE_TESTS_SERVER_BIN_PATH} --log test.log ${TESTFLOWS_OPTS}; cat test.log | tfs report results --format json > results.json; /usr/local/bin/process_testflows_result.py || echo -e 'failure\tCannot parse results' > check_status.tsv; find * -type f | grep _instances | grep clickhouse-server | xargs -n0 tar -rvf clickhouse_logs.tar; gzip -9 clickhouse_logs.tar"]

View File

@ -1035,10 +1035,12 @@ a
mapAdd_with_Int128_on_a_table = r"""
a
([1,2],[2,4])
"""
mapSubtract_with_Int128_on_a_table = r"""
a
([1,2],[0,0])
"""
mapPopulateSeries_with_Int128_on_a_table = r"""
@ -1563,10 +1565,12 @@ a
mapAdd_with_Int256_on_a_table = r"""
a
([1,2],[2,4])
"""
mapSubtract_with_Int256_on_a_table = r"""
a
([1,2],[0,0])
"""
mapPopulateSeries_with_Int256_on_a_table = r"""
@ -2091,10 +2095,12 @@ a
mapAdd_with_UInt128_on_a_table = r"""
a
([1,2],[2,4])
"""
mapSubtract_with_UInt128_on_a_table = r"""
a
([1,2],[0,0])
"""
mapPopulateSeries_with_UInt128_on_a_table = r"""
@ -2619,10 +2625,12 @@ a
mapAdd_with_UInt256_on_a_table = r"""
a
([1,2],[2,4])
"""
mapSubtract_with_UInt256_on_a_table = r"""
a
([1,2],[0,0])
"""
mapPopulateSeries_with_UInt256_on_a_table = r"""
@ -6280,3 +6288,43 @@ a
\N
"""
mapAdd_with_Int128 = r"""
mapAdd(tuple(array(toInt128(\'1\'), toInt128(\'2\')), array(toInt128(\'1\'), toInt128(\'2\'))), tuple(array(toInt128(\'1\'), toInt128(\'2\')), array(toInt128(\'1\'), toInt128(\'2\'))))
([1,2],[2,4])
"""
mapSubtract_with_Int128 = r"""
mapSubtract(tuple(array(toInt128(\'1\'), toInt128(\'2\')), array(toInt128(\'1\'), toInt128(\'2\'))), tuple(array(toInt128(\'1\'), toInt128(\'2\')), array(toInt128(\'1\'), toInt128(\'2\'))))
([1,2],[0,0])
"""
mapAdd_with_Int256 = r"""
mapAdd(tuple(array(toInt256(\'1\'), toInt256(\'2\')), array(toInt256(\'1\'), toInt256(\'2\'))), tuple(array(toInt256(\'1\'), toInt256(\'2\')), array(toInt256(\'1\'), toInt256(\'2\'))))
([1,2],[2,4])
"""
mapSubtract_with_Int256 = r"""
mapSubtract(tuple(array(toInt256(\'1\'), toInt256(\'2\')), array(toInt256(\'1\'), toInt256(\'2\'))), tuple(array(toInt256(\'1\'), toInt256(\'2\')), array(toInt256(\'1\'), toInt256(\'2\'))))
([1,2],[0,0])
"""
mapAdd_with_UInt128 = r"""
mapAdd(tuple(array(toUInt128(\'1\'), toUInt128(\'2\')), array(toUInt128(\'1\'), toUInt128(\'2\'))), tuple(array(toUInt128(\'1\'), toUInt128(\'2\')), array(toUInt128(\'1\'), toUInt128(\'2\'))))
([1,2],[2,4])
"""
mapSubtract_with_UInt128 = r"""
mapSubtract(tuple(array(toUInt128(\'1\'), toUInt128(\'2\')), array(toUInt128(\'1\'), toUInt128(\'2\'))), tuple(array(toUInt128(\'1\'), toUInt128(\'2\')), array(toUInt128(\'1\'), toUInt128(\'2\'))))
([1,2],[0,0])
"""
mapAdd_with_UInt256 = r"""
mapAdd(tuple(array(toUInt256(\'1\'), toUInt256(\'2\')), array(toUInt256(\'1\'), toUInt256(\'2\'))), tuple(array(toUInt256(\'1\'), toUInt256(\'2\')), array(toUInt256(\'1\'), toUInt256(\'2\'))))
([1,2],[2,4])
"""
mapSubtract_with_UInt256 = r"""
mapSubtract(tuple(array(toUInt256(\'1\'), toUInt256(\'2\')), array(toUInt256(\'1\'), toUInt256(\'2\'))), tuple(array(toUInt256(\'1\'), toUInt256(\'2\')), array(toUInt256(\'1\'), toUInt256(\'2\'))))
([1,2],[0,0])
"""

View File

@ -50,18 +50,16 @@ def array_func(self, data_type, node=None):
table(name = table_name, data_type = f'Array({data_type})')
with When("I insert the output into the table"):
node.query(f"INSERT INTO {table_name} SELECT {func}array({to_data_type(data_type,3)}, {to_data_type(data_type,2)}, {to_data_type(data_type,1)}))")
node.query(f"INSERT INTO {table_name} SELECT {func}array({to_data_type(data_type,3)},"
f"{to_data_type(data_type,2)}, {to_data_type(data_type,1)}))")
execute_query(f"""
SELECT * FROM {table_name} ORDER BY a ASC
""")
execute_query(f"SELECT * FROM {table_name} ORDER BY a ASC")
for func in ['arraySplit((x, y) -> x=y, [0, 0, 0],']:
with Scenario(f"Inline - {data_type} - {func})"):
execute_query(f"""
SELECT {func}array({to_data_type(data_type,3)}, {to_data_type(data_type,2)}, {to_data_type(data_type,1)}))
""")
execute_query(f"SELECT {func}array({to_data_type(data_type,3)}, {to_data_type(data_type,2)},"
f"{to_data_type(data_type,1)}))")
with Scenario(f"Table - {data_type} - {func})"):
table_name = get_table_name()
@ -69,18 +67,15 @@ def array_func(self, data_type, node=None):
table(name = table_name, data_type = f'Array(Array({data_type}))')
with When("I insert the output into the table"):
node.query(f"INSERT INTO {table_name} SELECT {func}array({to_data_type(data_type,3)}, {to_data_type(data_type,2)}, {to_data_type(data_type,1)}))")
node.query(f"INSERT INTO {table_name} SELECT {func}array({to_data_type(data_type,3)},"
f"{to_data_type(data_type,2)}, {to_data_type(data_type,1)}))")
execute_query(f"""
SELECT * FROM {table_name} ORDER BY a ASC
""")
execute_query(f"SELECT * FROM {table_name} ORDER BY a ASC")
for func in [f'arrayZip([{to_data_type(data_type,1)}],']:
with Scenario(f"Inline - {data_type} - {func})"):
execute_query(f"""
SELECT {func}array({to_data_type(data_type,3)}))
""")
execute_query(f"SELECT {func}array({to_data_type(data_type,3)}))")
with Scenario(f"Table - {data_type} - {func})"):
table_name = get_table_name()
@ -90,9 +85,7 @@ def array_func(self, data_type, node=None):
with When("I insert the output into the table"):
node.query(f"INSERT INTO {table_name} SELECT {func}array({to_data_type(data_type,1)}))")
execute_query(f"""
SELECT * FROM {table_name} ORDER BY a ASC
""")
execute_query(f"SELECT * FROM {table_name} ORDER BY a ASC")
for func in ['empty(',
'notEmpty(',
@ -125,20 +118,17 @@ def array_func(self, data_type, node=None):
table(name = table_name, data_type = data_type)
with When("I insert the output into the table"):
node.query(f"INSERT INTO {table_name} SELECT {func}array({to_data_type(data_type,3)}, {to_data_type(data_type,2)}, {to_data_type(data_type,1)}))",
node.query(f"INSERT INTO {table_name} SELECT {func}array({to_data_type(data_type,3)},"
f"{to_data_type(data_type,2)}, {to_data_type(data_type,1)}))",
exitcode = 44, message = 'Exception:')
execute_query(f"""
SELECT * FROM {table_name} ORDER BY a ASC
""")
execute_query(f"SELECT * FROM {table_name} ORDER BY a ASC")
else:
with Scenario(f"Inline - {data_type} - {func})"):
execute_query(f"""
SELECT {func}array({to_data_type(data_type,3)}, {to_data_type(data_type,2)}, {to_data_type(data_type,1)}))
""")
execute_query(f"SELECT {func}array({to_data_type(data_type,3)}, {to_data_type(data_type,2)}, {to_data_type(data_type,1)}))")
with Scenario(f"Table - {data_type} - {func})"):
table_name = get_table_name()
@ -146,11 +136,10 @@ def array_func(self, data_type, node=None):
table(name = table_name, data_type = data_type)
with When("I insert the output into the table"):
node.query(f"INSERT INTO {table_name} SELECT {func}array({to_data_type(data_type,3)}, {to_data_type(data_type,2)}, {to_data_type(data_type,1)}))")
node.query(f"INSERT INTO {table_name} SELECT {func}array({to_data_type(data_type,3)},"
f"{to_data_type(data_type,2)}, {to_data_type(data_type,1)}))")
execute_query(f"""
SELECT * FROM {table_name} ORDER BY a ASC
""")
execute_query(f"SELECT * FROM {table_name} ORDER BY a ASC")
for func in ['arrayDifference(',
'arrayCumSum(',
@ -171,12 +160,11 @@ def array_func(self, data_type, node=None):
table(name = table_name, data_type = data_type)
with When("I insert the output into the table"):
node.query(f"INSERT INTO {table_name} SELECT {func}array({to_data_type(data_type,3)}, {to_data_type(data_type,2)}, {to_data_type(data_type,1)}))",
node.query(f"INSERT INTO {table_name} SELECT {func}array({to_data_type(data_type,3)},"
f"{to_data_type(data_type,2)}, {to_data_type(data_type,1)}))",
exitcode = exitcode, message = 'Exception:')
execute_query(f"""
SELECT * FROM {table_name} ORDER BY a ASC
""")
execute_query(f"SELECT * FROM {table_name} ORDER BY a ASC")
for func in ['arrayElement']:
@ -192,20 +180,18 @@ def array_func(self, data_type, node=None):
table(name = table_name, data_type = data_type)
with When("I insert the output into the table"):
node.query(f"INSERT INTO {table_name} SELECT {func}(array({to_data_type(data_type,3)}, {to_data_type(data_type,2)}, {to_data_type(data_type,1)}), 1)")
node.query(f"INSERT INTO {table_name} SELECT {func}(array({to_data_type(data_type,3)},"
f"{to_data_type(data_type,2)}, {to_data_type(data_type,1)}), 1)")
execute_query(f"""
SELECT * FROM {table_name} ORDER BY a ASC
""")
execute_query(f"SELECT * FROM {table_name} ORDER BY a ASC")
for func in ['arrayPushBack',
'arrayPushFront']:
with Scenario(f"Inline - {data_type} - {func}"):
execute_query(f"""
SELECT {func}(array({to_data_type(data_type,3)}, {to_data_type(data_type,2)}, {to_data_type(data_type,1)}), {to_data_type(data_type,1)})
""")
execute_query(f"SELECT {func}(array({to_data_type(data_type,3)}, {to_data_type(data_type,2)},"
f"{to_data_type(data_type,1)}), {to_data_type(data_type,1)})")
with Scenario(f"Table - {data_type} - {func}"):
table_name = get_table_name()
@ -213,20 +199,18 @@ def array_func(self, data_type, node=None):
table(name = table_name, data_type = f'Array({data_type})')
with When("I insert the output into the table"):
node.query(f"INSERT INTO {table_name} SELECT {func}(array({to_data_type(data_type,3)}, {to_data_type(data_type,2)}, {to_data_type(data_type,1)}), {to_data_type(data_type,1)})")
node.query(f"INSERT INTO {table_name} SELECT {func}(array({to_data_type(data_type,3)},"
f"{to_data_type(data_type,2)}, {to_data_type(data_type,1)}), {to_data_type(data_type,1)})")
execute_query(f"""
SELECT * FROM {table_name} ORDER BY a ASC
""")
execute_query(f"SELECT * FROM {table_name} ORDER BY a ASC")
for func in ['arrayResize',
'arraySlice']:
with Scenario(f"Inline - {data_type} - {func}"):
execute_query(f"""
SELECT {func}(array({to_data_type(data_type,3)}, {to_data_type(data_type,2)}, {to_data_type(data_type,1)}), 1)
""")
execute_query(f"SELECT {func}(array({to_data_type(data_type,3)},"
f"{to_data_type(data_type,2)}, {to_data_type(data_type,1)}), 1)")
with Scenario(f"Table - {data_type} - {func}"):
table_name = get_table_name()
@ -234,20 +218,18 @@ def array_func(self, data_type, node=None):
table(name = table_name, data_type = f'Array({data_type})')
with When("I insert the output into the table"):
node.query(f"INSERT INTO {table_name} SELECT {func}(array({to_data_type(data_type,3)}, {to_data_type(data_type,2)}, {to_data_type(data_type,1)}), 1)")
node.query(f"INSERT INTO {table_name} SELECT {func}(array({to_data_type(data_type,3)},"
f"{to_data_type(data_type,2)}, {to_data_type(data_type,1)}), 1)")
execute_query(f"""
SELECT * FROM {table_name} ORDER BY a ASC
""")
execute_query(f"SELECT * FROM {table_name} ORDER BY a ASC")
for func in ['has',
'indexOf',
'countEqual']:
with Scenario(f"Inline - {data_type} - {func}"):
execute_query(f"""
SELECT {func}(array({to_data_type(data_type,3)}, {to_data_type(data_type,2)}, {to_data_type(data_type,1)}), NULL)
""")
execute_query(f"SELECT {func}(array({to_data_type(data_type,3)},"
f"{to_data_type(data_type,2)}, {to_data_type(data_type,1)}), NULL)")
with Scenario(f"Table - {data_type} - {func}"):
table_name = get_table_name()
@ -255,11 +237,10 @@ def array_func(self, data_type, node=None):
table(name = table_name, data_type = data_type)
with When("I insert the output into the table"):
node.query(f"INSERT INTO {table_name} SELECT {func}(array({to_data_type(data_type,3)}, {to_data_type(data_type,2)}, {to_data_type(data_type,1)}), NULL)")
node.query(f"INSERT INTO {table_name} SELECT {func}(array({to_data_type(data_type,3)},"
f"{to_data_type(data_type,2)}, {to_data_type(data_type,1)}), NULL)")
execute_query(f"""
SELECT * FROM {table_name} ORDER BY a ASC
""")
execute_query(f"SELECT * FROM {table_name} ORDER BY a ASC")
@TestOutline(Suite)
@Requirements(
@ -281,11 +262,10 @@ def tuple_func(self, data_type, node=None):
table(name = table_name, data_type = f'Tuple({data_type}, {data_type}, {data_type})')
with When("I insert the output into a table"):
node.query(f"INSERT INTO {table_name} SELECT tuple({to_data_type(data_type,1)}, {to_data_type(data_type,1)}, {to_data_type(data_type,1)})")
node.query(f"INSERT INTO {table_name} SELECT tuple({to_data_type(data_type,1)},"
f"{to_data_type(data_type,1)}, {to_data_type(data_type,1)})")
execute_query(f"""
SELECT * FROM {table_name} ORDER BY a ASC
""")
execute_query(f"SELECT * FROM {table_name} ORDER BY a ASC")
with Scenario(f"tupleElement with {data_type}"):
node.query(f"SELECT tupleElement(({to_data_type(data_type,1)}, {to_data_type(data_type,1)}), 1)")
@ -298,9 +278,7 @@ def tuple_func(self, data_type, node=None):
with When("I insert the output into a table"):
node.query(f"INSERT INTO {table_name} SELECT tupleElement(({to_data_type(data_type,1)}, {to_data_type(data_type,1)}), 1)")
execute_query(f"""
SELECT * FROM {table_name} ORDER BY a ASC
""")
execute_query(f"SELECT * FROM {table_name} ORDER BY a ASC")
with Scenario(f"untuple with {data_type}"):
node.query(f"SELECT untuple(({to_data_type(data_type,1)},))")
@ -313,12 +291,11 @@ def tuple_func(self, data_type, node=None):
with When("I insert the output into a table"):
node.query(f"INSERT INTO {table_name} SELECT untuple(({to_data_type(data_type,1)},))")
execute_query(f"""
SELECT * FROM {table_name} ORDER BY a ASC
""")
execute_query(f"SELECT * FROM {table_name} ORDER BY a ASC")
with Scenario(f"tupleHammingDistance with {data_type}"):
node.query(f"SELECT tupleHammingDistance(({to_data_type(data_type,1)}, {to_data_type(data_type,1)}), ({to_data_type(data_type,2)}, {to_data_type(data_type,2)}))")
node.query(f"SELECT tupleHammingDistance(({to_data_type(data_type,1)}, {to_data_type(data_type,1)}),"
f"({to_data_type(data_type,2)}, {to_data_type(data_type,2)}))")
with Scenario(f"tupleHammingDistance with {data_type} on a table"):
table_name = get_table_name()
@ -326,11 +303,10 @@ def tuple_func(self, data_type, node=None):
table(name = table_name, data_type = data_type)
with When("I insert the output into a table"):
node.query(f"INSERT INTO {table_name} SELECT tupleHammingDistance(({to_data_type(data_type,1)}, {to_data_type(data_type,1)}), ({to_data_type(data_type,2)}, {to_data_type(data_type,2)}))")
node.query(f"INSERT INTO {table_name} SELECT tupleHammingDistance(({to_data_type(data_type,1)},"
f"{to_data_type(data_type,1)}), ({to_data_type(data_type,2)}, {to_data_type(data_type,2)}))")
execute_query(f"""
SELECT * FROM {table_name} ORDER BY a ASC
""")
execute_query(f"SELECT * FROM {table_name} ORDER BY a ASC")
@TestOutline(Suite)
@Requirements(
@ -355,13 +331,17 @@ def map_func(self, data_type, node=None):
with When("I insert the output into a table"):
node.query(f"INSERT INTO {table_name} SELECT map('key1', {to_data_type(data_type,1)}, 'key2', {to_data_type(data_type,2)})")
execute_query(f"""
SELECT * FROM {table_name}
""")
execute_query(f"SELECT * FROM {table_name}")
with Scenario(f"mapAdd with {data_type}"):
node.query(f"SELECT mapAdd(([{to_data_type(data_type,1)}, {to_data_type(data_type,2)}], [{to_data_type(data_type,1)}, {to_data_type(data_type,2)}]), ([{to_data_type(data_type,1)}, {to_data_type(data_type,2)}], [{to_data_type(data_type,1)}, {to_data_type(data_type,2)}]))",
exitcode = 44, message='Exception:')
sql = (f"SELECT mapAdd(([{to_data_type(data_type,1)}, {to_data_type(data_type,2)}],"
f"[{to_data_type(data_type,1)}, {to_data_type(data_type,2)}]),"
f"([{to_data_type(data_type,1)}, {to_data_type(data_type,2)}],"
f"[{to_data_type(data_type,1)}, {to_data_type(data_type,2)}]))")
if data_type.startswith("Decimal"):
node.query(sql, exitcode=43, message="Exception:")
else:
execute_query(sql)
with Scenario(f"mapAdd with {data_type} on a table"):
table_name = get_table_name()
@ -369,16 +349,30 @@ def map_func(self, data_type, node=None):
table(name = table_name, data_type = f'Tuple(Array({data_type}), Array({data_type}))')
with When("I insert the output into a table"):
node.query(f"INSERT INTO {table_name} SELECT mapAdd(([{to_data_type(data_type,1)}, {to_data_type(data_type,2)}], [{to_data_type(data_type,1)}, {to_data_type(data_type,2)}]), ([{to_data_type(data_type,1)}, {to_data_type(data_type,2)}], [{to_data_type(data_type,1)}, {to_data_type(data_type,2)}]))",
exitcode = 44, message='Exception:')
sql = (f"INSERT INTO {table_name} SELECT mapAdd(("
f"[{to_data_type(data_type,1)}, {to_data_type(data_type,2)}],"
f"[{to_data_type(data_type,1)}, {to_data_type(data_type,2)}]),"
f"([{to_data_type(data_type,1)}, {to_data_type(data_type,2)}],"
f"[{to_data_type(data_type,1)}, {to_data_type(data_type,2)}]))")
exitcode, message = 0, None
execute_query(f"""
SELECT * FROM {table_name} ORDER BY a ASC
""")
if data_type.startswith("Decimal"):
exitcode, message = 43, "Exception:"
node.query(sql, exitcode=exitcode, message=message)
execute_query(f"""SELECT * FROM {table_name} ORDER BY a ASC""")
with Scenario(f"mapSubtract with {data_type}"):
node.query(f"SELECT mapSubtract(([{to_data_type(data_type,1)}, {to_data_type(data_type,2)}], [{to_data_type(data_type,1)}, {to_data_type(data_type,2)}]), ([{to_data_type(data_type,1)}, {to_data_type(data_type,2)}], [{to_data_type(data_type,1)}, {to_data_type(data_type,2)}]))",
exitcode = 44, message='Exception:')
sql = (f"SELECT mapSubtract(("
f"[{to_data_type(data_type,1)}, {to_data_type(data_type,2)}],"
f"[{to_data_type(data_type,1)}, {to_data_type(data_type,2)}]),"
f"([{to_data_type(data_type,1)}, {to_data_type(data_type,2)}],"
f"[{to_data_type(data_type,1)}, {to_data_type(data_type,2)}]))")
if data_type.startswith("Decimal"):
node.query(sql, exitcode=43, message="Exception:")
else:
execute_query(sql)
with Scenario(f"mapSubtract with {data_type} on a table"):
table_name = get_table_name()
@ -386,15 +380,21 @@ def map_func(self, data_type, node=None):
table(name = table_name, data_type = f'Tuple(Array({data_type}), Array({data_type}))')
with When("I insert the output into a table"):
node.query(f"INSERT INTO {table_name} SELECT mapSubtract(([{to_data_type(data_type,1)}, {to_data_type(data_type,2)}], [{to_data_type(data_type,1)}, {to_data_type(data_type,2)}]), ([{to_data_type(data_type,1)}, {to_data_type(data_type,2)}], [{to_data_type(data_type,1)}, {to_data_type(data_type,2)}]))",
exitcode = 44, message='Exception:')
sql = (f"INSERT INTO {table_name} SELECT mapSubtract(([{to_data_type(data_type,1)},"
f"{to_data_type(data_type,2)}], [{to_data_type(data_type,1)},"
f"{to_data_type(data_type,2)}]), ([{to_data_type(data_type,1)},"
f"{to_data_type(data_type,2)}], [{to_data_type(data_type,1)}, {to_data_type(data_type,2)}]))")
exitcode, message = 0, None
execute_query(f"""
SELECT * FROM {table_name} ORDER BY a ASC
""")
if data_type.startswith("Decimal"):
exitcode, message = 43, "Exception:"
node.query(sql, exitcode=exitcode, message=message)
execute_query(f"SELECT * FROM {table_name} ORDER BY a ASC")
with Scenario(f"mapPopulateSeries with {data_type}"):
node.query(f"SELECT mapPopulateSeries([1,2,3], [{to_data_type(data_type,1)}, {to_data_type(data_type,2)}, {to_data_type(data_type,3)}], 5)",
node.query(f"SELECT mapPopulateSeries([1,2,3], [{to_data_type(data_type,1)},"
f"{to_data_type(data_type,2)}, {to_data_type(data_type,3)}], 5)",
exitcode = 44, message='Exception:')
with Scenario(f"mapPopulateSeries with {data_type} on a table"):
@ -403,15 +403,15 @@ def map_func(self, data_type, node=None):
table(name = table_name, data_type = f'Tuple(Array({data_type}), Array({data_type}))')
with When("I insert the output into a table"):
node.query(f"INSERT INTO {table_name} SELECT mapPopulateSeries([1,2,3], [{to_data_type(data_type,1)}, {to_data_type(data_type,2)}, {to_data_type(data_type,3)}], 5)",
node.query(f"INSERT INTO {table_name} SELECT mapPopulateSeries([1,2,3],"
f"[{to_data_type(data_type,1)}, {to_data_type(data_type,2)}, {to_data_type(data_type,3)}], 5)",
exitcode = 44, message='Exception:')
execute_query(f"""
SELECT * FROM {table_name} ORDER BY a ASC
""")
execute_query(f"SELECT * FROM {table_name} ORDER BY a ASC")
with Scenario(f"mapContains with {data_type}"):
node.query(f"SELECT mapContains( map('key1', {to_data_type(data_type,1)}, 'key2', {to_data_type(data_type,2)}), 'key1')")
node.query(f"SELECT mapContains( map('key1', {to_data_type(data_type,1)},"
f"'key2', {to_data_type(data_type,2)}), 'key1')")
with Scenario(f"mapContains with {data_type} on a table"):
table_name = get_table_name()
@ -419,11 +419,10 @@ def map_func(self, data_type, node=None):
table(name = table_name, data_type = data_type)
with When("I insert the output into a table"):
node.query(f"INSERT INTO {table_name} SELECT mapContains( map('key1', {to_data_type(data_type,1)}, 'key2', {to_data_type(data_type,2)}), 'key1')")
node.query(f"INSERT INTO {table_name} SELECT mapContains( map('key1', {to_data_type(data_type,1)},"
f"'key2', {to_data_type(data_type,2)}), 'key1')")
execute_query(f"""
SELECT * FROM {table_name} ORDER BY a ASC
""")
execute_query(f"SELECT * FROM {table_name} ORDER BY a ASC")
with Scenario(f"mapKeys with {data_type}"):
node.query(f"SELECT mapKeys( map('key1', {to_data_type(data_type,1)}, 'key2', {to_data_type(data_type,2)}))")
@ -434,11 +433,10 @@ def map_func(self, data_type, node=None):
table(name = table_name, data_type = 'Array(String)')
with When("I insert the output into a table"):
node.query(f"INSERT INTO {table_name} SELECT mapKeys( map('key1', {to_data_type(data_type,1)}, 'key2', {to_data_type(data_type,2)}))")
node.query(f"INSERT INTO {table_name} SELECT mapKeys( map('key1', {to_data_type(data_type,1)},"
f"'key2', {to_data_type(data_type,2)}))")
execute_query(f"""
SELECT * FROM {table_name} ORDER BY a ASC
""")
execute_query(f"SELECT * FROM {table_name} ORDER BY a ASC")
with Scenario(f"mapValues with {data_type}"):
node.query(f"SELECT mapValues( map('key1', {to_data_type(data_type,1)}, 'key2', {to_data_type(data_type,2)}))")
@ -449,11 +447,10 @@ def map_func(self, data_type, node=None):
table(name = table_name, data_type = f'Array({data_type})')
with When("I insert the output into a table"):
node.query(f"INSERT INTO {table_name} SELECT mapValues( map('key1', {to_data_type(data_type,1)}, 'key2', {to_data_type(data_type,2)}))")
node.query(f"INSERT INTO {table_name} SELECT mapValues( map('key1', {to_data_type(data_type,1)},"
f"'key2', {to_data_type(data_type,2)}))")
execute_query(f"""
SELECT * FROM {table_name} ORDER BY a ASC
""")
execute_query(f"SELECT * FROM {table_name} ORDER BY a ASC")
@TestFeature
@Name("array, tuple, map")
@ -465,7 +462,8 @@ def map_func(self, data_type, node=None):
('Decimal256(0)',),
])
def feature(self, node="clickhouse1", stress=None, parallel=None):
"""Check that array, tuple, and map functions work with extended precision data types.
"""Check that array, tuple, and map functions work with
extended precision data types.
"""
self.context.node = self.context.cluster.node(node)

View File

@ -153,7 +153,10 @@ def add_config(config, timeout=300, restart=False, modify=False):
with node.cluster.shell(node.name) as bash:
bash.expect(bash.prompt)
bash.send("tail -n 0 -f /var/log/clickhouse-server/clickhouse-server.log")
bash.send("tail -v -n 0 -f /var/log/clickhouse-server/clickhouse-server.log")
# make sure tail process is launched and started to follow the file
bash.expect("<==")
bash.expect("\n")
with When("I add the config", description=config.path):
command = f"cat <<HEREDOC > {config.path}\n{config.content}\nHEREDOC"
@ -170,7 +173,10 @@ def add_config(config, timeout=300, restart=False, modify=False):
with Finally(f"I remove {config.name}"):
with node.cluster.shell(node.name) as bash:
bash.expect(bash.prompt)
bash.send("tail -n 0 -f /var/log/clickhouse-server/clickhouse-server.log")
bash.send("tail -v -n 0 -f /var/log/clickhouse-server/clickhouse-server.log")
# make sure tail process is launched and started to follow the file
bash.expect("<==")
bash.expect("\n")
with By("removing the config file", description=config.path):
node.command(f"rm -rf {config.path}", exitcode=0)

View File

@ -89,6 +89,13 @@ xfails = {
[(Fail, "LowCardinality(FixedString) as key not supported")],
"tests/table map with value string/LowCardinality(String) for key and value":
[(Fail, "LowCardinality(String) as key not supported")],
# JSON related
"tests/table map with duplicated keys/Map(Int64, String))":
[(Fail, "new bug due to JSON changes")],
"tests/table map with key integer/UInt64":
[(Fail, "new bug due to JSON changes")],
"tests/table map with value integer/UInt64":
[(Fail, "new bug due to JSON changes")]
}
xflags = {

View File

@ -12,26 +12,6 @@ def getuid():
testname = f"{basename(current().name).replace(' ', '_').replace(',','')}"
return testname + "_" + str(uuid.uuid1()).replace('-', '_')
@TestStep(Given)
def allow_experimental_map_type(self):
"""Set allow_experimental_map_type = 1
"""
setting = ("allow_experimental_map_type", 1)
default_query_settings = None
try:
with By("adding allow_experimental_map_type to the default query settings"):
default_query_settings = getsattr(current().context, "default_query_settings", [])
default_query_settings.append(setting)
yield
finally:
with Finally("I remove allow_experimental_map_type from the default query settings"):
if default_query_settings:
try:
default_query_settings.pop(default_query_settings.index(setting))
except ValueError:
pass
@TestStep(Given)
def create_table(self, name, statement, on_cluster=False):
"""Create table.

View File

@ -254,19 +254,19 @@ def table_map_select_key_with_value_string(self, type, data, output):
RQ_SRS_018_ClickHouse_Map_DataType_Value_Integer("1.0")
)
@Examples("type data output", [
("Map(Int8, Int8)", "('2020-01-01', map(1,127,2,0,3,-128))", '{"d":"2020-01-01","m":{1:127,2:0,3:-128}}', Name("Int8")),
("Map(Int8, UInt8)", "('2020-01-01', map(1,0,2,255))", '{"d":"2020-01-01","m":{1:0,2:255}}', Name("UInt8")),
("Map(Int8, Int16)", "('2020-01-01', map(1,127,2,0,3,-128))", '{"d":"2020-01-01","m":{1:32767,2:0,3:-32768}}', Name("Int16")),
("Map(Int8, UInt16)", "('2020-01-01', map(1,0,2,65535))", '{"d":"2020-01-01","m":{1:0,2:65535}}', Name("UInt16")),
("Map(Int8, Int32)", "('2020-01-01', map(1,127,2,0,3,-128))", '{"d":"2020-01-01","m":{1:2147483647,2:0,3:-2147483648}}', Name("Int32")),
("Map(Int8, UInt32)", "('2020-01-01', map(1,0,2,4294967295))", '{"d":"2020-01-01","m":{1:0,2:4294967295}}', Name("UInt32")),
("Map(Int8, Int8)", "('2020-01-01', map(1,127,2,0,3,-128))", '{"d":"2020-01-01","m":{"1":127,"2":0,"3":-128}}', Name("Int8")),
("Map(Int8, UInt8)", "('2020-01-01', map(1,0,2,255))", '{"d":"2020-01-01","m":{"1":0,"2":255}}', Name("UInt8")),
("Map(Int8, Int16)", "('2020-01-01', map(1,127,2,0,3,-128))", '{"d":"2020-01-01","m":{"1":32767,"2":0,"3":-32768}}', Name("Int16")),
("Map(Int8, UInt16)", "('2020-01-01', map(1,0,2,65535))", '{"d":"2020-01-01","m":{"1":0,"2":65535}}', Name("UInt16")),
("Map(Int8, Int32)", "('2020-01-01', map(1,127,2,0,3,-128))", '{"d":"2020-01-01","m":{"1":2147483647,"2":0,"3":-2147483648}}', Name("Int32")),
("Map(Int8, UInt32)", "('2020-01-01', map(1,0,2,4294967295))", '{"d":"2020-01-01","m":{"1":0,"2":4294967295}}', Name("UInt32")),
("Map(Int8, Int64)", "('2020-01-01', map(1,9223372036854775807,2,0,3,-9223372036854775808))", '{"d":"2020-01-01","m":{1:"9223372036854775807",2:"0",3:"-9223372036854775808"}}', Name("Int64")),
("Map(Int8, UInt64)", "('2020-01-01', map(1,0,2,18446744073709551615))", '{"d":"2020-01-01","m":{1:"0",2:"18446744073709551615"}}', Name("UInt64")),
("Map(Int8, Int128)", "('2020-01-01', map(1,170141183460469231731687303715884105727,2,0,3,-170141183460469231731687303715884105728))", '{"d":"2020-01-01","m":{1:"170141183460469231731687303715884105727",2:"0",3:"-170141183460469231731687303715884105728"}}', Name("Int128")),
("Map(Int8, Int256)", "('2020-01-01', map(1,57896044618658097711785492504343953926634992332820282019728792003956564819967,2,0,3,-57896044618658097711785492504343953926634992332820282019728792003956564819968))", '{"d":"2020-01-01","m":{1:"57896044618658097711785492504343953926634992332820282019728792003956564819967",2:"0",3:"-57896044618658097711785492504343953926634992332820282019728792003956564819968"}}', Name("Int256")),
("Map(Int8, UInt256)", "('2020-01-01', map(1,0,2,115792089237316195423570985008687907853269984665640564039457584007913129639935))", '{"d":"2020-01-01","m":{1:"0",2:"115792089237316195423570985008687907853269984665640564039457584007913129639935"}}', Name("UInt256")),
("Map(Int8, Nullable(Int8))", "('2020-01-01', map(1,toNullable(1)))", '{"d":"2020-01-01","m":{1:1}}', Name("toNullable")),
("Map(Int8, Nullable(Int8))", "('2020-01-01', map(1,toNullable(NULL)))", '{"d":"2020-01-01","m":{1:null}}', Name("toNullable(NULL)")),
("Map(Int8, Nullable(Int8))", "('2020-01-01', map(1,toNullable(1)))", '{"d":"2020-01-01","m":{"1":1}}', Name("toNullable")),
("Map(Int8, Nullable(Int8))", "('2020-01-01', map(1,toNullable(NULL)))", '{"d":"2020-01-01","m":{"1":null}}', Name("toNullable(NULL)")),
])
def table_map_with_value_integer(self, type, data, output):
"""Check what values we can insert into map type column with value integer.
@ -281,8 +281,8 @@ def table_map_with_value_integer(self, type, data, output):
("Map(String, Array(Int8))", "('2020-01-01', map('key',[]))", '{"d":"2020-01-01","m":{"key":[]}}', Name("empty array")),
("Map(String, Array(Int8))", "('2020-01-01', map('key',[1,2,3]))", '{"d":"2020-01-01","m":{"key":[1,2,3]}}', Name("non-empty array of ints")),
("Map(String, Array(String))", "('2020-01-01', map('key',['1','2','3']))", '{"d":"2020-01-01","m":{"key":["1","2","3"]}}', Name("non-empty array of strings")),
("Map(String, Array(Map(Int8, Int8)))", "('2020-01-01', map('key',[map(1,2),map(2,3)]))", '{"d":"2020-01-01","m":{"key":[{1:2},{2:3}]}}', Name("non-empty array of maps")),
("Map(String, Array(Map(Int8, Array(Map(Int8, Array(Int8))))))", "('2020-01-01', map('key',[map(1,[map(1,[1])]),map(2,[map(2,[3])])]))", '{"d":"2020-01-01","m":{"key":[{1:[{1:[1]}]},{2:[{2:[3]}]}]}}', Name("non-empty array of maps of array of maps")),
("Map(String, Array(Map(Int8, Int8)))", "('2020-01-01', map('key',[map(1,2),map(2,3)]))", '{"d":"2020-01-01","m":{"key":[{"1":2},{"2":3}]}}', Name("non-empty array of maps")),
("Map(String, Array(Map(Int8, Array(Map(Int8, Array(Int8))))))", "('2020-01-01', map('key',[map(1,[map(1,[1])]),map(2,[map(2,[3])])]))", '{"d":"2020-01-01","m":{"key":[{"1":[{"1":[1]}]},{"2":[{"2":[3]}]}]}}', Name("non-empty array of maps of array of maps")),
])
def table_map_with_value_array(self, type, data, output):
"""Check what values we can insert into map type column with value Array.
@ -294,12 +294,12 @@ def table_map_with_value_array(self, type, data, output):
RQ_SRS_018_ClickHouse_Map_DataType_Key_Integer("1.0")
)
@Examples("type data output", [
("Map(Int8, Int8)", "('2020-01-01', map(127,1,0,1,-128,1))", '{"d":"2020-01-01","m":{127:1,0:1,-128:1}}', Name("Int8")),
("Map(UInt8, Int8)", "('2020-01-01', map(0,1,255,1))", '{"d":"2020-01-01","m":{0:1,255:1}}', Name("UInt8")),
("Map(Int16, Int8)", "('2020-01-01', map(127,1,0,1,-128,1))", '{"d":"2020-01-01","m":{32767:1,0:1,-32768:1}}', Name("Int16")),
("Map(UInt16, Int8)", "('2020-01-01', map(0,1,65535,1))", '{"d":"2020-01-01","m":{0:1,65535:1}}', Name("UInt16")),
("Map(Int32, Int8)", "('2020-01-01', map(2147483647,1,0,1,-2147483648,1))", '{"d":"2020-01-01","m":{2147483647:1,0:1,-2147483648:1}}', Name("Int32")),
("Map(UInt32, Int8)", "('2020-01-01', map(0,1,4294967295,1))", '{"d":"2020-01-01","m":{0:1,4294967295:1}}', Name("UInt32")),
("Map(Int8, Int8)", "('2020-01-01', map(127,1,0,1,-128,1))", '{"d":"2020-01-01","m":{"127":1,"0":1,"-128":1}}', Name("Int8")),
("Map(UInt8, Int8)", "('2020-01-01', map(0,1,255,1))", '{"d":"2020-01-01","m":{"0":1,"255":1}}', Name("UInt8")),
("Map(Int16, Int8)", "('2020-01-01', map(127,1,0,1,-128,1))", '{"d":"2020-01-01","m":{"32767":1,"0":1,"-32768":1}}', Name("Int16")),
("Map(UInt16, Int8)", "('2020-01-01', map(0,1,65535,1))", '{"d":"2020-01-01","m":{"0":1,"65535":1}}', Name("UInt16")),
("Map(Int32, Int8)", "('2020-01-01', map(2147483647,1,0,1,-2147483648,1))", '{"d":"2020-01-01","m":{"2147483647":1,"0":1,"-2147483648":1}}', Name("Int32")),
("Map(UInt32, Int8)", "('2020-01-01', map(0,1,4294967295,1))", '{"d":"2020-01-01","m":{"0":1,"4294967295":1}}', Name("UInt32")),
("Map(Int64, Int8)", "('2020-01-01', map(9223372036854775807,1,0,1,-9223372036854775808,1))", '{"d":"2020-01-01","m":{"9223372036854775807":1,"0":1,"-9223372036854775808":1}}', Name("Int64")),
("Map(UInt64, Int8)", "('2020-01-01', map(0,1,18446744073709551615,1))", '{"d":"2020-01-01","m":{"0":1,"18446744073709551615":1}}', Name("UInt64")),
("Map(Int128, Int8)", "('2020-01-01', map(170141183460469231731687303715884105727,1,0,1,-170141183460469231731687303715884105728,1))", '{"d":"2020-01-01","m":{170141183460469231731687303715884105727:1,0:1,"-170141183460469231731687303715884105728":1}}', Name("Int128")),
@ -716,7 +716,7 @@ def cast_tuple_of_two_arrays_to_map(self, tuple, type, exitcode, message):
)
@Examples("tuple type exitcode message check_insert", [
("(([1, 2, 3], ['Ready', 'Steady', 'Go']))", "Map(UInt8, String)",
0, '{"m":{1:"Ready",2:"Steady",3:"Go"}}', False, Name("int -> int")),
0, '{"m":{"1":"Ready","2":"Steady","3":"Go"}}', False, Name("int -> int")),
("(([1, 2, 3], ['Ready', 'Steady', 'Go']))", "Map(String, String)",
0, '{"m":{"1":"Ready","2":"Steady","3":"Go"}}', False, Name("int -> string")),
("((['1', '2', '3'], ['Ready', 'Steady', 'Go']))", "Map(UInt8, String)",
@ -728,7 +728,7 @@ def cast_tuple_of_two_arrays_to_map(self, tuple, type, exitcode, message):
("(([[1]],['hello']))", "Map(String, String)",
53, 'DB::Exception: Type mismatch in IN or VALUES section', True, Name("array -> string")),
("(([(1,2),(3,4)]))", "Map(UInt8, UInt8)",
0, '{"m":{1:2,3:4}}', False, Name("array of two tuples")),
0, '{"m":{"1":2,"3":4}}', False, Name("array of two tuples")),
("(([1, 2], ['Ready', 'Steady', 'Go']))", "Map(UInt8, String)",
53, "DB::Exception: CAST AS Map can only be performed from tuple of arrays with equal sizes", True,
Name("unequal array sizes")),
@ -767,7 +767,7 @@ def cast_array_of_two_tuples_to_map(self, tuple, type, exitcode, message):
RQ_SRS_018_ClickHouse_Map_DataType_Conversion_From_ArrayOfTuplesToMap_Invalid("1.0")
)
@Examples("tuple type exitcode message check_insert", [
("(([(1,2),(3,4)]))", "Map(UInt8, UInt8)", 0, '{"m":{1:2,3:4}}', False,
("(([(1,2),(3,4)]))", "Map(UInt8, UInt8)", 0, '{"m":{"1":2,"3":4}}', False,
Name("array of two tuples")),
("(([(1,2),(3)]))", "Map(UInt8, UInt8)", 130,
"DB::Exception: There is no supertype for types Tuple(UInt8, UInt8), UInt8 because some of them are Tuple and some of them are not", True,
@ -1188,8 +1188,5 @@ def performance(self, len=10, rows=6000000):
def feature(self, node="clickhouse1"):
self.context.node = self.context.cluster.node(node)
with Given("I allow experimental map type"):
allow_experimental_map_type()
for scenario in loads(current_module(), Scenario):
scenario()

View File

@ -2103,7 +2103,7 @@ def insert_on_source_table(self, grant_target_name, user_name, node=None):
with When("I grant INSERT on the source table"):
node.query(f"GRANT INSERT ON {table1_name} TO {grant_target_name}")
with Then("I attempt to insert into the source table"):
node.query(f"INSERT INTO {table1_name}(d) VALUES ('01-01-2020')", settings = [("user",f"{user_name}")])
node.query(f"INSERT INTO {table1_name}(d) VALUES ('2020-01-01')", settings = [("user",f"{user_name}")])
finally:
with Finally("I drop the view"):
@ -2152,7 +2152,7 @@ def insert_with_insert_privilege(self, grant_target_name, user_name, node=None):
with When("I grant INSERT on the view"):
node.query(f"GRANT INSERT ON {view_name} TO {grant_target_name}")
with Then("I attempt to insert into the view"):
node.query(f"INSERT INTO {view_name}(d) VALUES ('01-01-2020')",
node.query(f"INSERT INTO {view_name}(d) VALUES ('2020-01-01')",
settings = [("user",f"{user_name}")])
finally:
@ -2201,7 +2201,7 @@ def insert_on_target_table(self, grant_target_name, user_name, node=None):
with When("I grant INSERT on the target table"):
node.query(f"GRANT INSERT ON {table0_name} TO {grant_target_name}")
with Then("I attempt to insert into the target table"):
node.query(f"INSERT INTO {table0_name}(d) VALUES ('01-01-2020')", settings = [("user",f"{user_name}")])
node.query(f"INSERT INTO {table0_name}(d) VALUES ('2020-01-01')", settings = [("user",f"{user_name}")])
finally:
with Finally("I drop the view"):
@ -2248,7 +2248,7 @@ def insert_on_target_table(self, grant_target_name, user_name, node=None):
with When("I grant INSERT on the target table"):
node.query(f"GRANT INSERT ON {implicit_table_name} TO {grant_target_name}")
with Then("I attempt to insert into the target table"):
node.query(f"INSERT INTO {implicit_table_name}(d) VALUES ('01-01-2020')", settings = [("user",f"{user_name}")])
node.query(f"INSERT INTO {implicit_table_name}(d) VALUES ('2020-01-01')", settings = [("user",f"{user_name}")])
finally:
with Finally("I drop the view"):

View File

@ -23,14 +23,14 @@ def regression(self, local, clickhouse_binary_path, stress=None, parallel=None):
with Pool(8) as pool:
try:
run_scenario(pool, tasks, Feature(test=load("example.regression", "regression")), args)
#run_scenario(pool, tasks, Feature(test=load("ldap.regression", "regression")), args)
#run_scenario(pool, tasks, Feature(test=load("rbac.regression", "regression")), args)
#run_scenario(pool, tasks, Feature(test=load("aes_encryption.regression", "regression")), args)
#run_scenario(pool, tasks, Feature(test=load("map_type.regression", "regression")), args)
#run_scenario(pool, tasks, Feature(test=load("window_functions.regression", "regression")), args)
#run_scenario(pool, tasks, Feature(test=load("datetime64_extended_range.regression", "regression")), args)
run_scenario(pool, tasks, Feature(test=load("ldap.regression", "regression")), args)
run_scenario(pool, tasks, Feature(test=load("rbac.regression", "regression")), args)
run_scenario(pool, tasks, Feature(test=load("aes_encryption.regression", "regression")), args)
run_scenario(pool, tasks, Feature(test=load("map_type.regression", "regression")), args)
run_scenario(pool, tasks, Feature(test=load("window_functions.regression", "regression")), args)
run_scenario(pool, tasks, Feature(test=load("datetime64_extended_range.regression", "regression")), args)
#run_scenario(pool, tasks, Feature(test=load("kerberos.regression", "regression")), args)
#run_scenario(pool, tasks, Feature(test=load("extended_precision_data_types.regression", "regression")), args)
run_scenario(pool, tasks, Feature(test=load("extended_precision_data_types.regression", "regression")), args)
finally:
join(tasks)

View File

@ -41,6 +41,8 @@ xfails = {
[(Fail, "not supported, https://github.com/ClickHouse/ClickHouse/issues/19857")],
"tests/:/misc/window functions in subquery":
[(Fail, "not supported, https://github.com/ClickHouse/ClickHouse/issues/19857")],
"tests/:/misc/in view":
[(Fail, "bug, https://github.com/ClickHouse/ClickHouse/issues/26001")],
"tests/:/frame clause/range frame/order by decimal":
[(Fail, "Exception: The RANGE OFFSET frame for 'DB::ColumnDecimal<DB::Decimal<long> >' ORDER BY column is not implemented")],
"tests/:/frame clause/range frame/with nulls":

View File

@ -374,6 +374,10 @@ def create_table(self, name, statement, on_cluster=False):
node = current().context.node
try:
with Given(f"I have a {name} table"):
if on_cluster:
node.query(f"DROP TABLE IF EXISTS {name} ON CLUSTER {on_cluster}")
else:
node.query(f"DROP TABLE IF EXISTS {name}")
node.query(statement.format(name=name))
yield name
finally: